summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFlorian Dold <florian.dold@gmail.com>2019-08-07 22:45:47 +0200
committerFlorian Dold <florian.dold@gmail.com>2019-08-07 22:45:47 +0200
commit65e39b7046a29aa299f06285441b62bcf1e4df01 (patch)
tree2eb012aabb59533b954aa169199733292de336cf
parent936cd90b7def6ef7c1e0b80265a9dc77a9ad23c6 (diff)
downloadandroid-node-v8-65e39b7046a29aa299f06285441b62bcf1e4df01.tar.gz
android-node-v8-65e39b7046a29aa299f06285441b62bcf1e4df01.tar.bz2
android-node-v8-65e39b7046a29aa299f06285441b62bcf1e4df01.zip
Move v8/build into this repository.
Since we need to patch some files, we don't let depot_tools manage these files anymore. build.git commit a0b2e3b2708bcf81ec00ac1738b586bcc5e04eea
-rw-r--r--deps/v8/.gitignore1
-rw-r--r--deps/v8/DEPS2
-rw-r--r--deps/v8/build/.gitignore25
-rw-r--r--deps/v8/build/BUILD.gn9
-rw-r--r--deps/v8/build/OWNERS28
-rw-r--r--deps/v8/build/OWNERS.status12
-rw-r--r--deps/v8/build/README.md34
-rw-r--r--deps/v8/build/android/.style.yapf6
-rw-r--r--deps/v8/build/android/AndroidManifest.xml20
-rw-r--r--deps/v8/build/android/BUILD.gn135
-rw-r--r--deps/v8/build/android/CheckInstallApk-debug.apkbin0 -> 37106 bytes
-rw-r--r--deps/v8/build/android/OWNERS7
-rw-r--r--deps/v8/build/android/PRESUBMIT.py97
-rwxr-xr-xdeps/v8/build/android/adb_chrome_public_command_line16
-rwxr-xr-xdeps/v8/build/android/adb_command_line.py93
-rwxr-xr-xdeps/v8/build/android/adb_gdb1000
-rwxr-xr-xdeps/v8/build/android/adb_install_apk.py132
-rwxr-xr-xdeps/v8/build/android/adb_logcat_monitor.py156
-rwxr-xr-xdeps/v8/build/android/adb_logcat_printer.py222
-rwxr-xr-xdeps/v8/build/android/adb_profile_chrome9
-rwxr-xr-xdeps/v8/build/android/adb_profile_chrome_startup9
-rwxr-xr-xdeps/v8/build/android/adb_reverse_forwarder.py90
-rwxr-xr-xdeps/v8/build/android/adb_system_webview_command_line16
-rw-r--r--deps/v8/build/android/android_only_explicit_jni_exports.lst13
-rw-r--r--deps/v8/build/android/android_only_jni_exports.lst13
-rwxr-xr-xdeps/v8/build/android/apk_operations.py1678
-rwxr-xr-xdeps/v8/build/android/apply_shared_preference_file.py50
-rwxr-xr-xdeps/v8/build/android/asan_symbolize.py138
-rw-r--r--deps/v8/build/android/binary_size/OWNERS4
-rw-r--r--deps/v8/build/android/binary_size/__init__.py3
-rwxr-xr-xdeps/v8/build/android/binary_size/apk_downloader.py138
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/56/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/57/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/57/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/58/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/58/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/58/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/59/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/59/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/59/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/60/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/60/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/60/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/61/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/61/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/61/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/62/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/62/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/62/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/63/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/63/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/63/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/64/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/64/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/64/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/65/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/65/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/65/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/66/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/66/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/66/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/67/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/67/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/67/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/56/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/57/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/57/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/58/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/58/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/59/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/59/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/60/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/60/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/61/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/61/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/62/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/62/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/63/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/63/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/64/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/64/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/65/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/65/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/66/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/66/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/67/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/67/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/README.md45
-rw-r--r--deps/v8/build/android/buildhooks/BUILD.gn58
-rw-r--r--deps/v8/build/android/buildhooks/OWNERS4
-rw-r--r--deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooks.java50
-rw-r--r--deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooksAndroid.java107
-rw-r--r--deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooksAndroidImpl.java54
-rw-r--r--deps/v8/build/android/buildhooks/java/templates/BuildHooksConfig.template16
-rw-r--r--deps/v8/build/android/buildhooks/proguard/build_hooks_android_impl.flags5
-rw-r--r--deps/v8/build/android/bytecode/BUILD.gn27
-rw-r--r--deps/v8/build/android/bytecode/OWNERS4
-rw-r--r--deps/v8/build/android/bytecode/java/org/chromium/bytecode/AssertionEnablerClassAdapter.java109
-rw-r--r--deps/v8/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java293
-rw-r--r--deps/v8/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java167
-rw-r--r--deps/v8/build/android/bytecode/java/org/chromium/bytecode/CustomClassLoaderClassWriter.java51
-rw-r--r--deps/v8/build/android/bytecode/java/org/chromium/bytecode/CustomResourcesClassAdapter.java302
-rw-r--r--deps/v8/build/android/bytecode/java/org/chromium/bytecode/SplitCompatClassAdapter.java149
-rw-r--r--deps/v8/build/android/bytecode/java/org/chromium/bytecode/ThreadAssertionClassAdapter.java83
-rw-r--r--deps/v8/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java87
-rw-r--r--deps/v8/build/android/chromium-debug.keystorebin0 -> 2223 bytes
-rwxr-xr-xdeps/v8/build/android/convert_dex_profile.py557
-rw-r--r--deps/v8/build/android/convert_dex_profile_tests.py276
-rw-r--r--deps/v8/build/android/devil_chromium.json130
-rw-r--r--deps/v8/build/android/devil_chromium.py170
-rw-r--r--deps/v8/build/android/devil_chromium.pydeps38
-rwxr-xr-xdeps/v8/build/android/diff_resource_sizes.py198
-rw-r--r--deps/v8/build/android/docs/README.md11
-rw-r--r--deps/v8/build/android/docs/android_app_bundles.md210
-rw-r--r--deps/v8/build/android/docs/build_config.md170
-rw-r--r--deps/v8/build/android/docs/coverage.md56
-rw-r--r--deps/v8/build/android/docs/life_of_a_resource.md233
-rw-r--r--deps/v8/build/android/docs/lint.md91
-rwxr-xr-xdeps/v8/build/android/download_doclava.py31
-rwxr-xr-xdeps/v8/build/android/dump_apk_resource_strings.py662
-rwxr-xr-xdeps/v8/build/android/emma_coverage_stats.py479
-rwxr-xr-xdeps/v8/build/android/emma_coverage_stats_test.py563
-rw-r--r--deps/v8/build/android/empty/.keep2
-rw-r--r--deps/v8/build/android/empty_proguard.flags1
-rwxr-xr-xdeps/v8/build/android/envsetup.sh29
-rwxr-xr-xdeps/v8/build/android/generate_emma_html.py115
-rw-r--r--deps/v8/build/android/gradle/AndroidManifest.xml14
-rw-r--r--deps/v8/build/android/gradle/OWNERS4
-rw-r--r--deps/v8/build/android/gradle/android.jinja114
-rw-r--r--deps/v8/build/android/gradle/cmake.jinja26
-rw-r--r--deps/v8/build/android/gradle/dependencies.jinja28
-rwxr-xr-xdeps/v8/build/android/gradle/generate_gradle.py974
-rwxr-xr-xdeps/v8/build/android/gradle/gn_to_cmake.py687
-rw-r--r--deps/v8/build/android/gradle/java.jinja41
-rw-r--r--deps/v8/build/android/gradle/manifest.jinja7
-rw-r--r--deps/v8/build/android/gradle/root.jinja20
-rw-r--r--deps/v8/build/android/gyp/OWNERS6
-rwxr-xr-xdeps/v8/build/android/gyp/aar.py166
-rw-r--r--deps/v8/build/android/gyp/aar.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/aidl.py58
-rw-r--r--deps/v8/build/android/gyp/aidl.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/apkbuilder.py377
-rw-r--r--deps/v8/build/android/gyp/apkbuilder.pydeps8
-rwxr-xr-xdeps/v8/build/android/gyp/assert_static_initializers.py160
-rw-r--r--deps/v8/build/android/gyp/assert_static_initializers.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/bundletool.py32
-rwxr-xr-xdeps/v8/build/android/gyp/bytecode_processor.py76
-rw-r--r--deps/v8/build/android/gyp/bytecode_processor.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/compile_resources.py916
-rw-r--r--deps/v8/build/android/gyp/compile_resources.pydeps29
-rwxr-xr-xdeps/v8/build/android/gyp/copy_ex.py128
-rw-r--r--deps/v8/build/android/gyp/copy_ex.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/create_apk_operations_script.py84
-rw-r--r--deps/v8/build/android/gyp/create_apk_operations_script.pydeps3
-rwxr-xr-xdeps/v8/build/android/gyp/create_app_bundle.py377
-rw-r--r--deps/v8/build/android/gyp/create_app_bundle.pydeps30
-rwxr-xr-xdeps/v8/build/android/gyp/create_app_bundle_minimal_apks.py46
-rw-r--r--deps/v8/build/android/gyp/create_app_bundle_minimal_apks.pydeps33
-rwxr-xr-xdeps/v8/build/android/gyp/create_bundle_wrapper_script.py115
-rw-r--r--deps/v8/build/android/gyp/create_bundle_wrapper_script.pydeps102
-rwxr-xr-xdeps/v8/build/android/gyp/create_java_binary_script.py112
-rw-r--r--deps/v8/build/android/gyp/create_java_binary_script.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/create_size_info_files.py167
-rw-r--r--deps/v8/build/android/gyp/create_size_info_files.pydeps8
-rwxr-xr-xdeps/v8/build/android/gyp/create_stack_script.py80
-rw-r--r--deps/v8/build/android/gyp/create_stack_script.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/create_tool_wrapper.py46
-rw-r--r--deps/v8/build/android/gyp/create_tool_wrapper.pydeps3
-rwxr-xr-xdeps/v8/build/android/gyp/create_ui_locale_resources.py91
-rwxr-xr-xdeps/v8/build/android/gyp/desugar.py60
-rw-r--r--deps/v8/build/android/gyp/desugar.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/dex.py388
-rw-r--r--deps/v8/build/android/gyp/dex.pydeps8
-rwxr-xr-xdeps/v8/build/android/gyp/dexsplitter.py133
-rw-r--r--deps/v8/build/android/gyp/dexsplitter.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/dist_aar.py132
-rw-r--r--deps/v8/build/android/gyp/dist_aar.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/emma_instr.py271
-rw-r--r--deps/v8/build/android/gyp/emma_instr.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/extract_unwind_tables.py288
-rwxr-xr-xdeps/v8/build/android/gyp/extract_unwind_tables_tests.py121
-rwxr-xr-xdeps/v8/build/android/gyp/filter_zip.py71
-rw-r--r--deps/v8/build/android/gyp/filter_zip.pydeps7
-rw-r--r--deps/v8/build/android/gyp/finalize_apk.py32
-rwxr-xr-xdeps/v8/build/android/gyp/find.py30
-rwxr-xr-xdeps/v8/build/android/gyp/find_sun_tools_jar.py54
-rwxr-xr-xdeps/v8/build/android/gyp/gcc_preprocess.py54
-rw-r--r--deps/v8/build/android/gyp/gcc_preprocess.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/generate_android_wrapper.py42
-rwxr-xr-xdeps/v8/build/android/gyp/generate_linker_version_script.py72
-rw-r--r--deps/v8/build/android/gyp/generate_linker_version_script.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/generate_resource_rewriter.py109
-rwxr-xr-xdeps/v8/build/android/gyp/generate_v14_compatible_resources.py281
-rwxr-xr-xdeps/v8/build/android/gyp/ijar.py24
-rw-r--r--deps/v8/build/android/gyp/ijar.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/jar.py93
-rwxr-xr-xdeps/v8/build/android/gyp/java_cpp_enum.py435
-rw-r--r--deps/v8/build/android/gyp/java_cpp_enum.pydeps8
-rwxr-xr-xdeps/v8/build/android/gyp/java_cpp_enum_tests.py747
-rwxr-xr-xdeps/v8/build/android/gyp/java_cpp_strings.py213
-rw-r--r--deps/v8/build/android/gyp/java_cpp_strings.pydeps8
-rwxr-xr-xdeps/v8/build/android/gyp/java_cpp_strings_tests.py105
-rwxr-xr-xdeps/v8/build/android/gyp/java_google_api_keys.py123
-rwxr-xr-xdeps/v8/build/android/gyp/java_google_api_keys_tests.py42
-rwxr-xr-xdeps/v8/build/android/gyp/javac.py595
-rw-r--r--deps/v8/build/android/gyp/javac.pydeps15
-rwxr-xr-xdeps/v8/build/android/gyp/jinja_template.py160
-rw-r--r--deps/v8/build/android/gyp/jinja_template.pydeps41
-rwxr-xr-xdeps/v8/build/android/gyp/lint.py399
-rw-r--r--deps/v8/build/android/gyp/lint.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/main_dex_list.py174
-rw-r--r--deps/v8/build/android/gyp/main_dex_list.pydeps8
-rwxr-xr-xdeps/v8/build/android/gyp/merge_manifest.py174
-rw-r--r--deps/v8/build/android/gyp/merge_manifest.pydeps8
-rwxr-xr-xdeps/v8/build/android/gyp/prepare_resources.py324
-rw-r--r--deps/v8/build/android/gyp/prepare_resources.pydeps30
-rwxr-xr-xdeps/v8/build/android/gyp/proguard.py290
-rw-r--r--deps/v8/build/android/gyp/proguard.pydeps9
-rw-r--r--deps/v8/build/android/gyp/test/BUILD.gn13
-rw-r--r--deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java15
-rw-r--r--deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java12
-rw-r--r--deps/v8/build/android/gyp/util/__init__.py3
-rw-r--r--deps/v8/build/android/gyp/util/build_utils.py650
-rwxr-xr-xdeps/v8/build/android/gyp/util/build_utils_test.py48
-rwxr-xr-xdeps/v8/build/android/gyp/util/diff_utils.py44
-rw-r--r--deps/v8/build/android/gyp/util/jar_info_utils.py51
-rwxr-xr-xdeps/v8/build/android/gyp/util/java_cpp_utils.py32
-rw-r--r--deps/v8/build/android/gyp/util/md5_check.py420
-rwxr-xr-xdeps/v8/build/android/gyp/util/md5_check_test.py151
-rw-r--r--deps/v8/build/android/gyp/util/proguard_util.py236
-rw-r--r--deps/v8/build/android/gyp/util/resource_utils.py834
-rwxr-xr-xdeps/v8/build/android/gyp/util/resource_utils_test.py268
-rwxr-xr-xdeps/v8/build/android/gyp/write_build_config.py1643
-rw-r--r--deps/v8/build/android/gyp/write_build_config.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/write_ordered_libraries.py117
-rw-r--r--deps/v8/build/android/gyp/write_ordered_libraries.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/zip.py71
-rw-r--r--deps/v8/build/android/gyp/zip.pydeps7
-rwxr-xr-xdeps/v8/build/android/host_heartbeat.py36
-rw-r--r--deps/v8/build/android/incremental_install/BUILD.gn20
-rw-r--r--deps/v8/build/android/incremental_install/README.md81
-rw-r--r--deps/v8/build/android/incremental_install/__init__.py3
-rwxr-xr-xdeps/v8/build/android/incremental_install/generate_android_manifest.py139
-rw-r--r--deps/v8/build/android/incremental_install/generate_android_manifest.pydeps29
-rwxr-xr-xdeps/v8/build/android/incremental_install/installer.py303
-rw-r--r--deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java288
-rw-r--r--deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java25
-rw-r--r--deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java291
-rw-r--r--deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java129
-rw-r--r--deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java142
-rw-r--r--deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java12
-rwxr-xr-xdeps/v8/build/android/incremental_install/write_installer_json.py80
-rw-r--r--deps/v8/build/android/incremental_install/write_installer_json.pydeps7
-rwxr-xr-xdeps/v8/build/android/lighttpd_server.py258
-rw-r--r--deps/v8/build/android/lint/OWNERS2
-rwxr-xr-xdeps/v8/build/android/lint/suppress.py138
-rw-r--r--deps/v8/build/android/lint/suppressions.xml404
-rwxr-xr-xdeps/v8/build/android/list_class_verification_failures.py282
-rw-r--r--deps/v8/build/android/list_class_verification_failures_test.py233
-rw-r--r--deps/v8/build/android/main_dex_classes.flags61
-rwxr-xr-xdeps/v8/build/android/method_count.py116
-rw-r--r--deps/v8/build/android/multidex.flags8
-rw-r--r--deps/v8/build/android/play_services/__init__.py3
-rwxr-xr-xdeps/v8/build/android/play_services/preprocess.py244
-rw-r--r--deps/v8/build/android/play_services/utils.py144
-rwxr-xr-xdeps/v8/build/android/provision_devices.py561
-rw-r--r--deps/v8/build/android/pylib/OWNERS6
-rw-r--r--deps/v8/build/android/pylib/__init__.py31
-rw-r--r--deps/v8/build/android/pylib/android/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/android/logcat_symbolizer.py98
-rw-r--r--deps/v8/build/android/pylib/base/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/base/base_test_result.py262
-rw-r--r--deps/v8/build/android/pylib/base/base_test_result_unittest.py82
-rw-r--r--deps/v8/build/android/pylib/base/environment.py49
-rw-r--r--deps/v8/build/android/pylib/base/environment_factory.py19
-rw-r--r--deps/v8/build/android/pylib/base/mock_environment.py12
-rw-r--r--deps/v8/build/android/pylib/base/mock_test_instance.py12
-rw-r--r--deps/v8/build/android/pylib/base/output_manager.py158
-rw-r--r--deps/v8/build/android/pylib/base/output_manager_factory.py16
-rw-r--r--deps/v8/build/android/pylib/base/output_manager_test_case.py14
-rw-r--r--deps/v8/build/android/pylib/base/test_collection.py80
-rw-r--r--deps/v8/build/android/pylib/base/test_exception.py8
-rw-r--r--deps/v8/build/android/pylib/base/test_instance.py40
-rw-r--r--deps/v8/build/android/pylib/base/test_instance_factory.py31
-rw-r--r--deps/v8/build/android/pylib/base/test_run.py50
-rw-r--r--deps/v8/build/android/pylib/base/test_run_factory.py56
-rw-r--r--deps/v8/build/android/pylib/base/test_server.py18
-rw-r--r--deps/v8/build/android/pylib/constants/__init__.py274
-rw-r--r--deps/v8/build/android/pylib/constants/host_paths.py95
-rwxr-xr-xdeps/v8/build/android/pylib/constants/host_paths_unittest.py50
-rw-r--r--deps/v8/build/android/pylib/content_settings.py80
-rw-r--r--deps/v8/build/android/pylib/device/__init__.py0
-rw-r--r--deps/v8/build/android/pylib/device/commands/BUILD.gn20
-rw-r--r--deps/v8/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java95
-rw-r--r--deps/v8/build/android/pylib/device_settings.py199
-rw-r--r--deps/v8/build/android/pylib/gtest/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/gtest/filter/OWNERS1
-rw-r--r--deps/v8/build/android/pylib/gtest/filter/base_unittests_disabled25
-rw-r--r--deps/v8/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled10
-rw-r--r--deps/v8/build/android/pylib/gtest/filter/breakpad_unittests_disabled9
-rw-r--r--deps/v8/build/android/pylib/gtest/filter/content_browsertests_disabled50
-rw-r--r--deps/v8/build/android/pylib/gtest/filter/unit_tests_disabled80
-rw-r--r--deps/v8/build/android/pylib/gtest/gtest_config.py57
-rw-r--r--deps/v8/build/android/pylib/gtest/gtest_test_instance.py530
-rwxr-xr-xdeps/v8/build/android/pylib/gtest/gtest_test_instance_test.py217
-rw-r--r--deps/v8/build/android/pylib/instrumentation/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/instrumentation/instrumentation_parser.py105
-rwxr-xr-xdeps/v8/build/android/pylib/instrumentation/instrumentation_parser_test.py134
-rw-r--r--deps/v8/build/android/pylib/instrumentation/instrumentation_test_instance.py944
-rwxr-xr-xdeps/v8/build/android/pylib/instrumentation/instrumentation_test_instance_test.py972
-rw-r--r--deps/v8/build/android/pylib/instrumentation/json_perf_parser.py161
-rw-r--r--deps/v8/build/android/pylib/instrumentation/render_test.html.jinja40
-rw-r--r--deps/v8/build/android/pylib/instrumentation/test_result.py30
-rw-r--r--deps/v8/build/android/pylib/junit/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/junit/junit_test_instance.py80
-rw-r--r--deps/v8/build/android/pylib/linker/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/linker/linker_test_instance.py51
-rw-r--r--deps/v8/build/android/pylib/linker/test_case.py215
-rw-r--r--deps/v8/build/android/pylib/local/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/local/device/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/local/device/local_device_environment.py300
-rw-r--r--deps/v8/build/android/pylib/local/device/local_device_gtest_run.py635
-rw-r--r--deps/v8/build/android/pylib/local/device/local_device_instrumentation_test_run.py965
-rwxr-xr-xdeps/v8/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py69
-rw-r--r--deps/v8/build/android/pylib/local/device/local_device_linker_test_run.py75
-rw-r--r--deps/v8/build/android/pylib/local/device/local_device_monkey_test_run.py126
-rw-r--r--deps/v8/build/android/pylib/local/device/local_device_perf_test_run.py538
-rw-r--r--deps/v8/build/android/pylib/local/device/local_device_test_run.py251
-rwxr-xr-xdeps/v8/build/android/pylib/local/device/local_device_test_run_test.py174
-rw-r--r--deps/v8/build/android/pylib/local/local_test_server_spawner.py100
-rw-r--r--deps/v8/build/android/pylib/local/machine/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/local/machine/local_machine_environment.py24
-rw-r--r--deps/v8/build/android/pylib/local/machine/local_machine_junit_test_run.py136
-rw-r--r--deps/v8/build/android/pylib/monkey/__init__.py0
-rw-r--r--deps/v8/build/android/pylib/monkey/monkey_test_instance.py72
-rw-r--r--deps/v8/build/android/pylib/output/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/output/local_output_manager.py45
-rwxr-xr-xdeps/v8/build/android/pylib/output/local_output_manager_test.py34
-rw-r--r--deps/v8/build/android/pylib/output/noop_output_manager.py42
-rwxr-xr-xdeps/v8/build/android/pylib/output/noop_output_manager_test.py27
-rw-r--r--deps/v8/build/android/pylib/output/remote_output_manager.py89
-rwxr-xr-xdeps/v8/build/android/pylib/output/remote_output_manager_test.py34
-rw-r--r--deps/v8/build/android/pylib/perf/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/perf/perf_test_instance.py239
-rw-r--r--deps/v8/build/android/pylib/pexpect.py21
-rwxr-xr-xdeps/v8/build/android/pylib/restart_adbd.sh20
-rw-r--r--deps/v8/build/android/pylib/results/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/results/flakiness_dashboard/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/results/flakiness_dashboard/json_results_generator.py699
-rw-r--r--deps/v8/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py213
-rw-r--r--deps/v8/build/android/pylib/results/flakiness_dashboard/results_uploader.py176
-rw-r--r--deps/v8/build/android/pylib/results/json_results.py154
-rwxr-xr-xdeps/v8/build/android/pylib/results/json_results_test.py207
-rw-r--r--deps/v8/build/android/pylib/results/presentation/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/results/presentation/javascript/main_html.js214
-rwxr-xr-xdeps/v8/build/android/pylib/results/presentation/standard_gtest_merge.py168
-rw-r--r--deps/v8/build/android/pylib/results/presentation/template/main.html97
-rw-r--r--deps/v8/build/android/pylib/results/presentation/template/table.html60
-rwxr-xr-xdeps/v8/build/android/pylib/results/presentation/test_results_presentation.py543
-rw-r--r--deps/v8/build/android/pylib/results/report_results.py131
-rw-r--r--deps/v8/build/android/pylib/symbols/__init__.py0
-rwxr-xr-xdeps/v8/build/android/pylib/symbols/apk_lib_dump.py59
-rw-r--r--deps/v8/build/android/pylib/symbols/apk_native_libs.py419
-rw-r--r--deps/v8/build/android/pylib/symbols/apk_native_libs_unittest.py396
-rw-r--r--deps/v8/build/android/pylib/symbols/deobfuscator.py165
-rw-r--r--deps/v8/build/android/pylib/symbols/elf_symbolizer.py487
-rwxr-xr-xdeps/v8/build/android/pylib/symbols/elf_symbolizer_unittest.py196
-rw-r--r--deps/v8/build/android/pylib/symbols/mock_addr2line/__init__.py0
-rwxr-xr-xdeps/v8/build/android/pylib/symbols/mock_addr2line/mock_addr2line79
-rw-r--r--deps/v8/build/android/pylib/symbols/stack_symbolizer.py81
-rw-r--r--deps/v8/build/android/pylib/symbols/symbol_utils.py812
-rw-r--r--deps/v8/build/android/pylib/symbols/symbol_utils_unittest.py943
-rw-r--r--deps/v8/build/android/pylib/utils/__init__.py0
-rw-r--r--deps/v8/build/android/pylib/utils/app_bundle_utils.py140
-rw-r--r--deps/v8/build/android/pylib/utils/argparse_utils.py50
-rw-r--r--deps/v8/build/android/pylib/utils/decorators.py37
-rwxr-xr-xdeps/v8/build/android/pylib/utils/decorators_test.py104
-rw-r--r--deps/v8/build/android/pylib/utils/device_dependencies.py117
-rwxr-xr-xdeps/v8/build/android/pylib/utils/device_dependencies_test.py56
-rw-r--r--deps/v8/build/android/pylib/utils/dexdump.py115
-rwxr-xr-xdeps/v8/build/android/pylib/utils/dexdump_test.py141
-rw-r--r--deps/v8/build/android/pylib/utils/google_storage_helper.py126
-rw-r--r--deps/v8/build/android/pylib/utils/instrumentation_tracing.py204
-rw-r--r--deps/v8/build/android/pylib/utils/logdog_helper.py94
-rw-r--r--deps/v8/build/android/pylib/utils/logging_utils.py136
-rwxr-xr-xdeps/v8/build/android/pylib/utils/maven_downloader.py137
-rw-r--r--deps/v8/build/android/pylib/utils/proguard.py288
-rwxr-xr-xdeps/v8/build/android/pylib/utils/proguard_test.py495
-rw-r--r--deps/v8/build/android/pylib/utils/repo_utils.py16
-rw-r--r--deps/v8/build/android/pylib/utils/shared_preference_utils.py95
-rw-r--r--deps/v8/build/android/pylib/utils/simpleperf.py259
-rw-r--r--deps/v8/build/android/pylib/utils/test_filter.py139
-rwxr-xr-xdeps/v8/build/android/pylib/utils/test_filter_test.py233
-rw-r--r--deps/v8/build/android/pylib/utils/time_profile.py45
-rw-r--r--deps/v8/build/android/pylib/utils/xvfb.py58
-rw-r--r--deps/v8/build/android/pylib/valgrind_tools.py129
-rw-r--r--deps/v8/build/android/pylintrc15
-rw-r--r--deps/v8/build/android/resource_sizes.gni39
-rwxr-xr-xdeps/v8/build/android/resource_sizes.py769
-rw-r--r--deps/v8/build/android/resource_sizes.pydeps63
-rwxr-xr-xdeps/v8/build/android/screenshot.py13
-rw-r--r--deps/v8/build/android/stacktrace/BUILD.gn17
-rw-r--r--deps/v8/build/android/stacktrace/README.md23
-rwxr-xr-xdeps/v8/build/android/stacktrace/crashpad_stackwalker.py173
-rw-r--r--deps/v8/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java116
-rwxr-xr-xdeps/v8/build/android/stacktrace/java_deobfuscate_test.py172
-rwxr-xr-xdeps/v8/build/android/stacktrace/stackwalker.py135
-rwxr-xr-xdeps/v8/build/android/test_runner.py1065
-rw-r--r--deps/v8/build/android/test_runner.pydeps210
-rwxr-xr-xdeps/v8/build/android/test_wrapper/logdog_wrapper.py136
-rw-r--r--deps/v8/build/android/test_wrapper/logdog_wrapper.pydeps12
-rw-r--r--deps/v8/build/android/tests/symbolize/Makefile11
-rw-r--r--deps/v8/build/android/tests/symbolize/a.cc14
-rw-r--r--deps/v8/build/android/tests/symbolize/b.cc14
-rwxr-xr-xdeps/v8/build/android/tombstones.py282
-rwxr-xr-xdeps/v8/build/android/update_deps/update_third_party_deps.py142
-rwxr-xr-xdeps/v8/build/android/update_verification.py115
-rwxr-xr-xdeps/v8/build/android/video_recorder.py13
-rwxr-xr-xdeps/v8/build/apply_locales.py45
-rw-r--r--deps/v8/build/args/OWNERS1
-rw-r--r--deps/v8/build/args/README.txt31
-rw-r--r--deps/v8/build/args/fuchsia.gn7
-rw-r--r--deps/v8/build/args/headless.gn43
-rwxr-xr-xdeps/v8/build/build-ctags.sh49
-rw-r--r--deps/v8/build/build_config.h207
-rw-r--r--deps/v8/build/buildflag.h47
-rw-r--r--deps/v8/build/buildflag_header.gni141
-rwxr-xr-xdeps/v8/build/check_gn_headers.py308
-rwxr-xr-xdeps/v8/build/check_gn_headers_unittest.py101
-rw-r--r--deps/v8/build/check_gn_headers_whitelist.txt357
-rwxr-xr-xdeps/v8/build/check_return_value.py17
-rw-r--r--deps/v8/build/chromeos/PRESUBMIT.py24
-rwxr-xr-xdeps/v8/build/chromeos/create_test_runner_script.py118
-rw-r--r--deps/v8/build/chromeos/pylintrc15
-rwxr-xr-xdeps/v8/build/chromeos/test_runner.py765
-rw-r--r--deps/v8/build/ciopfs.sha11
-rwxr-xr-xdeps/v8/build/cipd/clobber_cipd_root.py33
-rwxr-xr-xdeps/v8/build/clobber.py132
-rw-r--r--deps/v8/build/common.croc127
-rw-r--r--deps/v8/build/compiled_action.gni167
-rwxr-xr-xdeps/v8/build/compute_build_timestamp.py122
-rw-r--r--deps/v8/build/config/BUILD.gn437
-rw-r--r--deps/v8/build/config/BUILDCONFIG.gn622
-rw-r--r--deps/v8/build/config/OWNERS4
-rw-r--r--deps/v8/build/config/aix/BUILD.gn50
-rw-r--r--deps/v8/build/config/allocator.gni58
-rw-r--r--deps/v8/build/config/android/BUILD.gn175
-rw-r--r--deps/v8/build/config/android/OWNERS6
-rw-r--r--deps/v8/build/config/android/abi.gni80
-rw-r--r--deps/v8/build/config/android/config.gni380
-rw-r--r--deps/v8/build/config/android/extract_unwind_tables.gni56
-rw-r--r--deps/v8/build/config/android/internal_rules.gni3816
-rw-r--r--deps/v8/build/config/android/linker_version_script.gni37
-rw-r--r--deps/v8/build/config/android/rules.gni4584
-rw-r--r--deps/v8/build/config/android/sdk.gni10
-rw-r--r--deps/v8/build/config/arm.gni127
-rw-r--r--deps/v8/build/config/c++/BUILD.gn117
-rw-r--r--deps/v8/build/config/c++/c++.gni64
-rw-r--r--deps/v8/build/config/chrome_build.gni26
-rw-r--r--deps/v8/build/config/chromecast/BUILD.gn85
-rw-r--r--deps/v8/build/config/chromecast_build.gni79
-rw-r--r--deps/v8/build/config/chromeos/rules.gni235
-rw-r--r--deps/v8/build/config/clang/BUILD.gn42
-rw-r--r--deps/v8/build/config/clang/clang.gni15
-rw-r--r--deps/v8/build/config/compiler/BUILD.gn2423
-rw-r--r--deps/v8/build/config/compiler/compiler.gni286
-rw-r--r--deps/v8/build/config/compiler/pgo/BUILD.gn101
-rw-r--r--deps/v8/build/config/compiler/pgo/pgo.gni17
-rw-r--r--deps/v8/build/config/compute_inputs_for_analyze.gni14
-rw-r--r--deps/v8/build/config/coverage/BUILD.gn44
-rw-r--r--deps/v8/build/config/coverage/OWNERS3
-rw-r--r--deps/v8/build/config/coverage/coverage.gni29
-rw-r--r--deps/v8/build/config/crypto.gni15
-rw-r--r--deps/v8/build/config/dcheck_always_on.gni15
-rw-r--r--deps/v8/build/config/features.gni59
-rw-r--r--deps/v8/build/config/freetype/BUILD.gn16
-rw-r--r--deps/v8/build/config/freetype/OWNERS2
-rw-r--r--deps/v8/build/config/freetype/freetype.gni14
-rw-r--r--deps/v8/build/config/fuchsia/BUILD.gn128
-rw-r--r--deps/v8/build/config/fuchsia/OWNERS1
-rwxr-xr-xdeps/v8/build/config/fuchsia/build_symbol_archive.py76
-rw-r--r--deps/v8/build/config/fuchsia/config.gni22
-rw-r--r--deps/v8/build/config/fuchsia/extend_fvm.py26
-rw-r--r--deps/v8/build/config/fuchsia/fidl_library.gni262
-rw-r--r--deps/v8/build/config/fuchsia/package.gni239
-rw-r--r--deps/v8/build/config/fuchsia/prepare_package_inputs.py220
-rw-r--r--deps/v8/build/config/fuchsia/rules.gni157
-rw-r--r--deps/v8/build/config/fuchsia/symbol_archive.gni46
-rw-r--r--deps/v8/build/config/fuchsia/testing_sandbox_policy25
-rw-r--r--deps/v8/build/config/gcc/BUILD.gn116
-rwxr-xr-xdeps/v8/build/config/get_host_byteorder.py11
-rw-r--r--deps/v8/build/config/host_byteorder.gni27
-rw-r--r--deps/v8/build/config/ios/BUILD.gn136
-rw-r--r--deps/v8/build/config/ios/BuildInfo.plist35
-rw-r--r--deps/v8/build/config/ios/Host-Info.plist126
-rw-r--r--deps/v8/build/config/ios/Module-Info.plist24
-rw-r--r--deps/v8/build/config/ios/OWNERS1
-rw-r--r--deps/v8/build/config/ios/asset_catalog.gni148
-rw-r--r--deps/v8/build/config/ios/codesign.py534
-rw-r--r--deps/v8/build/config/ios/dummy.py15
-rw-r--r--deps/v8/build/config/ios/entitlements.plist12
-rw-r--r--deps/v8/build/config/ios/find_signing_identity.py47
-rw-r--r--deps/v8/build/config/ios/generate_umbrella_header.py75
-rw-r--r--deps/v8/build/config/ios/hardlink.py69
-rw-r--r--deps/v8/build/config/ios/ios_sdk.gni167
-rw-r--r--deps/v8/build/config/ios/ios_sdk_overrides.gni17
-rw-r--r--deps/v8/build/config/ios/rules.gni2047
-rw-r--r--deps/v8/build/config/ios/write_framework_hmap.py97
-rw-r--r--deps/v8/build/config/ios/write_framework_modulemap.py26
-rw-r--r--deps/v8/build/config/ios/xctest_shell.mm19
-rw-r--r--deps/v8/build/config/jumbo.gni313
-rw-r--r--deps/v8/build/config/linux/BUILD.gn110
-rw-r--r--deps/v8/build/config/linux/OWNERS1
-rw-r--r--deps/v8/build/config/linux/atk/BUILD.gn59
-rw-r--r--deps/v8/build/config/linux/atspi2/BUILD.gn29
-rw-r--r--deps/v8/build/config/linux/dbus/BUILD.gn14
-rw-r--r--deps/v8/build/config/linux/dri/BUILD.gn18
-rw-r--r--deps/v8/build/config/linux/gtk/BUILD.gn53
-rw-r--r--deps/v8/build/config/linux/gtk/gtk.gni10
-rw-r--r--deps/v8/build/config/linux/libdrm/BUILD.gn33
-rw-r--r--deps/v8/build/config/linux/libffi/BUILD.gn9
-rw-r--r--deps/v8/build/config/linux/libva/BUILD.gn13
-rw-r--r--deps/v8/build/config/linux/nss/BUILD.gn22
-rw-r--r--deps/v8/build/config/linux/pangocairo/BUILD.gn19
-rw-r--r--deps/v8/build/config/linux/pangocairo/pangocairo.gni7
-rwxr-xr-xdeps/v8/build/config/linux/pkg-config.py246
-rw-r--r--deps/v8/build/config/linux/pkg_config.gni128
-rw-r--r--deps/v8/build/config/locales.gni189
-rw-r--r--deps/v8/build/config/mac/BUILD.gn109
-rw-r--r--deps/v8/build/config/mac/BuildInfo.plist18
-rw-r--r--deps/v8/build/config/mac/OWNERS4
-rw-r--r--deps/v8/build/config/mac/base_rules.gni308
-rw-r--r--deps/v8/build/config/mac/compile_ib_files.py61
-rw-r--r--deps/v8/build/config/mac/mac_sdk.gni112
-rw-r--r--deps/v8/build/config/mac/mac_sdk_overrides.gni16
-rw-r--r--deps/v8/build/config/mac/package_framework.py60
-rw-r--r--deps/v8/build/config/mac/plist_util.py226
-rw-r--r--deps/v8/build/config/mac/prepare_framework_version.py42
-rw-r--r--deps/v8/build/config/mac/rules.gni676
-rw-r--r--deps/v8/build/config/mac/sdk_info.py97
-rw-r--r--deps/v8/build/config/mac/symbols.gni30
-rw-r--r--deps/v8/build/config/mac/write_pkg_info.py47
-rw-r--r--deps/v8/build/config/mac/xcrun.py28
-rwxr-xr-xdeps/v8/build/config/merge_for_jumbo.py145
-rw-r--r--deps/v8/build/config/mips.gni67
-rw-r--r--deps/v8/build/config/nacl/BUILD.gn143
-rw-r--r--deps/v8/build/config/nacl/config.gni55
-rw-r--r--deps/v8/build/config/nacl/rules.gni188
-rw-r--r--deps/v8/build/config/pch.gni12
-rw-r--r--deps/v8/build/config/posix/BUILD.gn75
-rw-r--r--deps/v8/build/config/posix/sysroot_ld_path.py21
-rw-r--r--deps/v8/build/config/python.gni165
-rw-r--r--deps/v8/build/config/sanitizers/BUILD.gn650
-rw-r--r--deps/v8/build/config/sanitizers/OWNERS4
-rw-r--r--deps/v8/build/config/sanitizers/sanitizers.gni277
-rw-r--r--deps/v8/build/config/sysroot.gni101
-rw-r--r--deps/v8/build/config/ui.gni60
-rw-r--r--deps/v8/build/config/v8_target_cpu.gni61
-rw-r--r--deps/v8/build/config/win/BUILD.gn558
-rw-r--r--deps/v8/build/config/win/console_app.gni18
-rw-r--r--deps/v8/build/config/win/manifest.gni112
-rw-r--r--deps/v8/build/config/win/visual_studio_version.gni39
-rw-r--r--deps/v8/build/config/zip.gni50
-rwxr-xr-xdeps/v8/build/copy_test_data_ios.py105
-rwxr-xr-xdeps/v8/build/cp.py23
-rwxr-xr-xdeps/v8/build/detect_host_arch.py53
-rwxr-xr-xdeps/v8/build/dir_exists.py23
-rw-r--r--deps/v8/build/docs/debugging_slow_builds.md19
-rw-r--r--deps/v8/build/docs/mac_hermetic_toolchain.md44
-rw-r--r--deps/v8/build/docs/writing_gn_templates.md259
-rw-r--r--deps/v8/build/dotfile_settings.gni38
-rwxr-xr-xdeps/v8/build/download_nacl_toolchains.py56
-rwxr-xr-xdeps/v8/build/download_translation_unit_tool.py54
-rwxr-xr-xdeps/v8/build/env_dump.py56
-rwxr-xr-xdeps/v8/build/extract_from_cab.py63
-rwxr-xr-xdeps/v8/build/find_depot_tools.py73
-rwxr-xr-xdeps/v8/build/find_isolated_tests.py78
-rwxr-xr-xdeps/v8/build/fix_gn_headers.py218
-rw-r--r--deps/v8/build/fuchsia/OWNERS9
-rw-r--r--deps/v8/build/fuchsia/__init__.py0
-rw-r--r--deps/v8/build/fuchsia/boot_data.py118
-rw-r--r--deps/v8/build/fuchsia/common.py86
-rw-r--r--deps/v8/build/fuchsia/common_args.py123
-rw-r--r--deps/v8/build/fuchsia/device_target.py282
-rwxr-xr-xdeps/v8/build/fuchsia/exe_runner.py36
-rw-r--r--deps/v8/build/fuchsia/fidlgen_js/BUILD.gn63
-rw-r--r--deps/v8/build/fuchsia/fidlgen_js/DEPS4
-rw-r--r--deps/v8/build/fuchsia/fidlgen_js/fidl.py549
-rwxr-xr-xdeps/v8/build/fuchsia/fidlgen_js/gen.py673
-rw-r--r--deps/v8/build/fuchsia/fidlgen_js/runtime/fidl.mjs270
-rw-r--r--deps/v8/build/fuchsia/fidlgen_js/runtime/zircon.cc438
-rw-r--r--deps/v8/build/fuchsia/fidlgen_js/runtime/zircon.h58
-rw-r--r--deps/v8/build/fuchsia/fidlgen_js/test/fidlgen_js_unittest.cc1334
-rw-r--r--deps/v8/build/fuchsia/fidlgen_js/test/simple.fidl142
-rw-r--r--deps/v8/build/fuchsia/fidlgen_js/third_party/__init__.py0
-rw-r--r--deps/v8/build/fuchsia/fidlgen_js/third_party/enum34/LICENSE32
-rw-r--r--deps/v8/build/fuchsia/fidlgen_js/third_party/enum34/README.chromium15
-rw-r--r--deps/v8/build/fuchsia/fidlgen_js/third_party/enum34/__init__.py837
-rw-r--r--deps/v8/build/fuchsia/layout_test_proxy/BUILD.gn34
-rw-r--r--deps/v8/build/fuchsia/layout_test_proxy/DEPS3
-rw-r--r--deps/v8/build/fuchsia/layout_test_proxy/layout_test_proxy.cc78
-rw-r--r--deps/v8/build/fuchsia/linux.sdk.sha11
-rw-r--r--deps/v8/build/fuchsia/mac.sdk.sha11
-rw-r--r--deps/v8/build/fuchsia/net_test_server.py89
-rw-r--r--deps/v8/build/fuchsia/qemu_target.py178
-rwxr-xr-xdeps/v8/build/fuchsia/qemu_target_test.py58
-rw-r--r--deps/v8/build/fuchsia/remote_cmd.py134
-rw-r--r--deps/v8/build/fuchsia/run_package.py224
-rw-r--r--deps/v8/build/fuchsia/symbolizer.py43
-rw-r--r--deps/v8/build/fuchsia/target.py346
-rwxr-xr-xdeps/v8/build/fuchsia/test_runner.py131
-rwxr-xr-xdeps/v8/build/fuchsia/update_sdk.py168
-rwxr-xr-xdeps/v8/build/gdb-add-index184
-rwxr-xr-xdeps/v8/build/get_landmines.py89
-rw-r--r--deps/v8/build/git-hooks/OWNERS3
-rwxr-xr-xdeps/v8/build/git-hooks/pre-commit60
-rw-r--r--deps/v8/build/gn_helpers.py369
-rw-r--r--deps/v8/build/gn_helpers_unittest.py126
-rw-r--r--deps/v8/build/gn_run_binary.py34
-rwxr-xr-xdeps/v8/build/install-build-deps-android.sh74
-rwxr-xr-xdeps/v8/build/install-build-deps.sh717
-rwxr-xr-xdeps/v8/build/install-chroot.sh888
-rw-r--r--deps/v8/build/internal/README.chromium24
-rw-r--r--deps/v8/build/ios/OWNERS1
-rw-r--r--deps/v8/build/ios/chrome_ios.croc71
-rwxr-xr-xdeps/v8/build/ios/clean_env.py77
-rw-r--r--deps/v8/build/landmine_utils.py33
-rwxr-xr-xdeps/v8/build/landmines.py145
-rw-r--r--deps/v8/build/linux/BUILD.gn31
-rw-r--r--deps/v8/build/linux/OWNERS3
-rw-r--r--deps/v8/build/linux/chrome_linux.croc29
-rw-r--r--deps/v8/build/linux/dump_app_syms.py29
-rw-r--r--deps/v8/build/linux/extract_symbols.gni42
-rwxr-xr-xdeps/v8/build/linux/install-chromeos-fonts.py118
-rw-r--r--deps/v8/build/linux/libbrlapi/BUILD.gn26
-rw-r--r--deps/v8/build/linux/libpci/BUILD.gn22
-rw-r--r--deps/v8/build/linux/libudev/BUILD.gn65
-rwxr-xr-xdeps/v8/build/linux/pkg-config-wrapper46
-rwxr-xr-xdeps/v8/build/linux/rewrite_dirs.py71
-rwxr-xr-xdeps/v8/build/linux/sysroot_ld_path.sh99
-rwxr-xr-xdeps/v8/build/linux/sysroot_scripts/build_and_upload.py101
-rw-r--r--deps/v8/build/linux/sysroot_scripts/debian-archive-sid-stable.gpgbin0 -> 54194 bytes
-rwxr-xr-xdeps/v8/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py58
-rwxr-xr-xdeps/v8/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py36
-rw-r--r--deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.amd64329
-rw-r--r--deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.arm327
-rw-r--r--deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.arm64328
-rw-r--r--deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.i386327
-rw-r--r--deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el320
-rw-r--r--deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.mipsel320
-rwxr-xr-xdeps/v8/build/linux/sysroot_scripts/install-sysroot.py160
-rw-r--r--deps/v8/build/linux/sysroot_scripts/libdbus-1-3-symbols235
-rwxr-xr-xdeps/v8/build/linux/sysroot_scripts/merge-package-lists.py34
-rwxr-xr-xdeps/v8/build/linux/sysroot_scripts/sysroot-creator-sid.sh397
-rw-r--r--deps/v8/build/linux/sysroot_scripts/sysroot-creator.sh846
-rw-r--r--deps/v8/build/linux/sysroot_scripts/sysroots.json32
-rwxr-xr-xdeps/v8/build/linux/sysroot_scripts/update-archive-timestamp.sh18
-rw-r--r--deps/v8/build/linux/unbundle/README57
-rw-r--r--deps/v8/build/linux/unbundle/ffmpeg.gn37
-rw-r--r--deps/v8/build/linux/unbundle/flac.gn32
-rw-r--r--deps/v8/build/linux/unbundle/fontconfig.gn13
-rw-r--r--deps/v8/build/linux/unbundle/freetype.gn14
-rw-r--r--deps/v8/build/linux/unbundle/harfbuzz-ng.gn13
-rw-r--r--deps/v8/build/linux/unbundle/icu.gn258
-rw-r--r--deps/v8/build/linux/unbundle/libdrm.gn22
-rw-r--r--deps/v8/build/linux/unbundle/libevent.gn17
-rw-r--r--deps/v8/build/linux/unbundle/libjpeg.gn12
-rw-r--r--deps/v8/build/linux/unbundle/libpng.gn25
-rw-r--r--deps/v8/build/linux/unbundle/libvpx.gn34
-rw-r--r--deps/v8/build/linux/unbundle/libwebp.gn39
-rw-r--r--deps/v8/build/linux/unbundle/libxml.gn18
-rw-r--r--deps/v8/build/linux/unbundle/libxslt.gn13
-rw-r--r--deps/v8/build/linux/unbundle/openh264.gn42
-rw-r--r--deps/v8/build/linux/unbundle/opus.gn45
-rw-r--r--deps/v8/build/linux/unbundle/re2.gn23
-rwxr-xr-xdeps/v8/build/linux/unbundle/remove_bundled_libraries.py106
-rwxr-xr-xdeps/v8/build/linux/unbundle/replace_gn_files.py84
-rw-r--r--deps/v8/build/linux/unbundle/snappy.gn22
-rw-r--r--deps/v8/build/linux/unbundle/yasm.gn102
-rw-r--r--deps/v8/build/linux/unbundle/zlib.gn66
-rwxr-xr-xdeps/v8/build/locale_tool.py1483
-rw-r--r--deps/v8/build/mac/OWNERS4
-rwxr-xr-xdeps/v8/build/mac/find_sdk.py105
-rwxr-xr-xdeps/v8/build/mac/should_use_hermetic_xcode.py46
-rw-r--r--deps/v8/build/mac/tweak_info_plist.gni85
-rwxr-xr-xdeps/v8/build/mac/tweak_info_plist.py366
-rwxr-xr-xdeps/v8/build/mac_toolchain.py171
-rw-r--r--deps/v8/build/nocompile.gni117
-rwxr-xr-xdeps/v8/build/package_mac_toolchain.py145
-rw-r--r--deps/v8/build/precompile.cc7
-rw-r--r--deps/v8/build/precompile.h53
-rwxr-xr-xdeps/v8/build/print_python_deps.py155
-rwxr-xr-xdeps/v8/build/protoc_java.py88
-rw-r--r--deps/v8/build/protoc_java.pydeps7
-rw-r--r--deps/v8/build/redirect_stdout.py19
-rwxr-xr-xdeps/v8/build/rm.py38
-rwxr-xr-xdeps/v8/build/run_swarming_xcode_install.py81
-rw-r--r--deps/v8/build/sample_arg_file.gn6
-rw-r--r--deps/v8/build/sanitize-mac-build-log.sed33
-rwxr-xr-xdeps/v8/build/sanitize-mac-build-log.sh5
-rw-r--r--deps/v8/build/sanitize-win-build-log.sed15
-rwxr-xr-xdeps/v8/build/sanitize-win-build-log.sh5
-rw-r--r--deps/v8/build/sanitizers/OWNERS10
-rw-r--r--deps/v8/build/sanitizers/asan_suppressions.cc23
-rw-r--r--deps/v8/build/sanitizers/lsan_suppressions.cc71
-rw-r--r--deps/v8/build/sanitizers/sanitizer_options.cc181
-rw-r--r--deps/v8/build/sanitizers/tsan_suppressions.cc213
-rw-r--r--deps/v8/build/shim_headers.gni41
-rw-r--r--deps/v8/build/split_static_library.gni77
-rwxr-xr-xdeps/v8/build/swarming_xcode_install.py64
-rw-r--r--deps/v8/build/symlink.gni85
-rwxr-xr-xdeps/v8/build/symlink.py92
-rw-r--r--deps/v8/build/timestamp.gni34
-rw-r--r--deps/v8/build/toolchain/BUILD.gn25
-rw-r--r--deps/v8/build/toolchain/OWNERS8
-rw-r--r--deps/v8/build/toolchain/aix/BUILD.gn21
-rw-r--r--deps/v8/build/toolchain/android/BUILD.gn141
-rw-r--r--deps/v8/build/toolchain/cc_wrapper.gni40
-rwxr-xr-xdeps/v8/build/toolchain/clang_code_coverage_wrapper.py149
-rw-r--r--deps/v8/build/toolchain/concurrent_links.gni60
-rw-r--r--deps/v8/build/toolchain/cros/BUILD.gn173
-rw-r--r--deps/v8/build/toolchain/cros_toolchain.gni81
-rw-r--r--deps/v8/build/toolchain/fuchsia/BUILD.gn41
-rw-r--r--deps/v8/build/toolchain/fuchsia/OWNERS1
-rwxr-xr-xdeps/v8/build/toolchain/gcc_link_wrapper.py74
-rwxr-xr-xdeps/v8/build/toolchain/gcc_solink_wrapper.py123
-rw-r--r--deps/v8/build/toolchain/gcc_toolchain.gni643
-rw-r--r--deps/v8/build/toolchain/get_concurrent_links.py86
-rw-r--r--deps/v8/build/toolchain/get_cpu_count.py23
-rw-r--r--deps/v8/build/toolchain/goma.gni29
-rw-r--r--deps/v8/build/toolchain/linux/BUILD.gn300
-rw-r--r--deps/v8/build/toolchain/linux/unbundle/BUILD.gn41
-rw-r--r--deps/v8/build/toolchain/linux/unbundle/README.md41
-rw-r--r--deps/v8/build/toolchain/mac/BUILD.gn572
-rw-r--r--deps/v8/build/toolchain/mac/OWNERS2
-rw-r--r--deps/v8/build/toolchain/mac/compile_xcassets.py251
-rw-r--r--deps/v8/build/toolchain/mac/compile_xcassets_unittests.py141
-rw-r--r--deps/v8/build/toolchain/mac/filter_libtool.py54
-rw-r--r--deps/v8/build/toolchain/mac/get_tool_mtime.py17
-rwxr-xr-xdeps/v8/build/toolchain/mac/linker_driver.py256
-rw-r--r--deps/v8/build/toolchain/nacl/BUILD.gn266
-rw-r--r--deps/v8/build/toolchain/nacl_toolchain.gni59
-rw-r--r--deps/v8/build/toolchain/toolchain.gni102
-rw-r--r--deps/v8/build/toolchain/win/BUILD.gn496
-rw-r--r--deps/v8/build/toolchain/win/midl.gni118
-rw-r--r--deps/v8/build/toolchain/win/midl.py238
-rwxr-xr-xdeps/v8/build/toolchain/win/ml.py287
-rw-r--r--deps/v8/build/toolchain/win/rc/.gitignore3
-rw-r--r--deps/v8/build/toolchain/win/rc/README.md30
-rw-r--r--deps/v8/build/toolchain/win/rc/linux64/rc.sha11
-rw-r--r--deps/v8/build/toolchain/win/rc/mac/rc.sha11
-rwxr-xr-xdeps/v8/build/toolchain/win/rc/rc.py192
-rwxr-xr-xdeps/v8/build/toolchain/win/rc/upload_rc_binaries.sh46
-rw-r--r--deps/v8/build/toolchain/win/rc/win/rc.exe.sha11
-rw-r--r--deps/v8/build/toolchain/win/setup_toolchain.py291
-rw-r--r--deps/v8/build/toolchain/win/tool_wrapper.py245
-rw-r--r--deps/v8/build/toolchain/wrapper_utils.py93
-rwxr-xr-xdeps/v8/build/tree_truth.sh102
-rwxr-xr-xdeps/v8/build/update-linux-sandbox.sh82
-rw-r--r--deps/v8/build/util/BUILD.gn51
-rw-r--r--deps/v8/build/util/LASTCHANGE.dummy1
-rw-r--r--deps/v8/build/util/PRESUBMIT.py58
-rw-r--r--deps/v8/build/util/android_chrome_version.py173
-rw-r--r--deps/v8/build/util/android_chrome_version_test.py293
-rw-r--r--deps/v8/build/util/branding.gni46
-rw-r--r--deps/v8/build/util/generate_wrapper.gni98
-rwxr-xr-xdeps/v8/build/util/generate_wrapper.py136
-rw-r--r--deps/v8/build/util/java_action.gni103
-rwxr-xr-xdeps/v8/build/util/java_action.py82
-rw-r--r--deps/v8/build/util/lastchange.gni16
-rwxr-xr-xdeps/v8/build/util/lastchange.py318
-rw-r--r--deps/v8/build/util/lib/common/PRESUBMIT.py16
-rw-r--r--deps/v8/build/util/lib/common/__init__.py0
-rw-r--r--deps/v8/build/util/lib/common/chrome_test_server_spawner.py480
-rw-r--r--deps/v8/build/util/lib/common/perf_result_data_type.py20
-rw-r--r--deps/v8/build/util/lib/common/perf_tests_results_helper.py200
-rw-r--r--deps/v8/build/util/lib/common/unittest_util.py155
-rwxr-xr-xdeps/v8/build/util/lib/common/unittest_util_test.py65
-rw-r--r--deps/v8/build/util/lib/common/util.py151
-rw-r--r--deps/v8/build/util/process_version.gni126
-rw-r--r--deps/v8/build/util/version.gni159
-rwxr-xr-xdeps/v8/build/util/version.py259
-rw-r--r--deps/v8/build/util/version_test.py174
-rw-r--r--deps/v8/build/util/webkit_version.h.in9
-rwxr-xr-xdeps/v8/build/vs_toolchain.py509
-rw-r--r--deps/v8/build/whitespace_file.txt177
-rw-r--r--deps/v8/build/win/BUILD.gn168
-rw-r--r--deps/v8/build/win/as_invoker.manifest9
-rw-r--r--deps/v8/build/win/chrome_win.croc26
-rw-r--r--deps/v8/build/win/common_controls.manifest8
-rw-r--r--deps/v8/build/win/compatibility.manifest17
-rwxr-xr-xdeps/v8/build/win/copy_cdb_to_output.py121
-rw-r--r--deps/v8/build/win/gn_meta_sln.py212
-rw-r--r--deps/v8/build/win/message_compiler.gni89
-rw-r--r--deps/v8/build/win/message_compiler.py148
-rwxr-xr-xdeps/v8/build/win/reorder-imports.py96
-rw-r--r--deps/v8/build/win/require_administrator.manifest9
-rwxr-xr-xdeps/v8/build/win/use_ansi_codes.py12
-rwxr-xr-xdeps/v8/build/win_is_xtree_patched.py28
-rwxr-xr-xdeps/v8/build/write_build_date_header.py37
-rwxr-xr-xdeps/v8/build/write_buildflag_header.py95
801 files changed, 111585 insertions, 3 deletions
diff --git a/deps/v8/.gitignore b/deps/v8/.gitignore
index 7fc0f66b37..0309c2290d 100644
--- a/deps/v8/.gitignore
+++ b/deps/v8/.gitignore
@@ -36,7 +36,6 @@
.torquelint-cache
.vscode
/_*
-/build
/buildtools
/check-header-includes
/hydrogen.cfg
diff --git a/deps/v8/DEPS b/deps/v8/DEPS
index 450bfd7862..d259ddf75a 100644
--- a/deps/v8/DEPS
+++ b/deps/v8/DEPS
@@ -56,8 +56,6 @@ vars = {
}
deps = {
- 'v8/build':
- Var('chromium_url') + '/chromium/src/build.git' + '@' + 'a0b2e3b2708bcf81ec00ac1738b586bcc5e04eea',
'v8/third_party/depot_tools':
Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + '7e7523be4e21b0841ae815ef37521a5476f68549',
'v8/third_party/icu':
diff --git a/deps/v8/build/.gitignore b/deps/v8/build/.gitignore
new file mode 100644
index 0000000000..c23f5b2796
--- /dev/null
+++ b/deps/v8/build/.gitignore
@@ -0,0 +1,25 @@
+# This file is needed for projects that has this directory as a separate Git
+# mirror in DEPS. Without it, a lot is wiped and re-downloaded for each sync.
+*.pyc
+ciopfs
+/android/bin
+/android/binary_size/apks/**/*.apk
+/config/gclient_args.gni
+/cros_cache/
+/Debug
+/Debug_x64
+/goma
+/gomacc.lock
+/ipch/
+/Release
+/Release_x64
+/win_toolchain.json
+/util/LASTCHANGE*
+/util/support
+/x64/
+/linux/debian_*-sysroot/
+/linux/ubuntu_*-sysroot/
+/ios_files
+/mac_files
+
+!/util/LASTCHANGE.dummy
diff --git a/deps/v8/build/BUILD.gn b/deps/v8/build/BUILD.gn
new file mode 100644
index 0000000000..7ab955abe1
--- /dev/null
+++ b/deps/v8/build/BUILD.gn
@@ -0,0 +1,9 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+source_set("buildflag_header_h") {
+ sources = [
+ "buildflag.h",
+ ]
+}
diff --git a/deps/v8/build/OWNERS b/deps/v8/build/OWNERS
new file mode 100644
index 0000000000..815173ec89
--- /dev/null
+++ b/deps/v8/build/OWNERS
@@ -0,0 +1,28 @@
+agrieve@chromium.org
+brucedawson@chromium.org
+dpranke@chromium.org
+jbudorick@chromium.org
+jochen@chromium.org
+scottmg@chromium.org
+thakis@chromium.org
+thomasanderson@chromium.org
+tikuta@chromium.org
+
+# Clang build config changes
+hans@chromium.org
+
+per-file .gitignore=*
+per-file check_gn_headers_whitelist.txt=*
+per-file mac_toolchain.py=erikchen@chromium.org
+per-file mac_toolchain.py=justincohen@chromium.org
+per-file package_mac_toolchain.py=erikchen@chromium.org
+per-file package_mac_toolchain.py=justincohen@chromium.org
+per-file whitespace_file.txt=*
+per-file OWNERS.status=*
+
+# gn-dev is probably a better team here, but the tooling won't let us
+# have more than one team per component, and infra-dev is a catch-all
+# for other build-related lists.
+#
+# TEAM: infra-dev@chromium.org
+# COMPONENT: Build
diff --git a/deps/v8/build/OWNERS.status b/deps/v8/build/OWNERS.status
new file mode 100644
index 0000000000..f5cc1fc8ba
--- /dev/null
+++ b/deps/v8/build/OWNERS.status
@@ -0,0 +1,12 @@
+# Use this file to set a global status message that should be shown whenever
+# git cl owners proposes to add you as a reviewer.
+#
+# The status messages should be somewhat stable, so please don't use this for
+# short term, or frequently changing updates.
+#
+# The format of the file is
+#
+# you@chromium.org: Single line status message.
+#
+
+jochen@chromium.org: EMEA based reviewer.
diff --git a/deps/v8/build/README.md b/deps/v8/build/README.md
new file mode 100644
index 0000000000..32a9f85d97
--- /dev/null
+++ b/deps/v8/build/README.md
@@ -0,0 +1,34 @@
+# About
+`//build` contains:
+ * Core GN templates and configuration
+ * Core Python build scripts
+
+Since this directory is DEPS'ed in by some other repositories (webrtc, pdfium,
+v8, etc), it should be kept as self-contained as possible by not referring
+to files outside of it. Some exceptions exist (`//testing`, select
+`//third_party` subdirectories), but new dependencies tend to break these other
+projects, and so should be avoided.
+
+Changes to //build should be landed in the Chromium repo. They will then be
+replicated to the stand-alone [build repo](https://chromium.googlesource.com/chromium/src/build)
+by the [gsubtreed tool.](https://chromium.googlesource.com/infra/infra/+/master/infra/services/gsubtreed)
+
+## Contents
+ * `//build/config` - Common templates via `.gni` files.
+ * `//build/toolchain` - GN toolchain definitions.
+ * `Other .py files` - Some are used by GN/Ninja. Some by gclient hooks, some
+ are just random utilities.
+
+Files referenced by `//.gn`:
+ * `//build/BUILDCONFIG.gn` - Included by all `BUILD.gn` files.
+ * `//build/secondary` - An overlay for `BUILD.gn` files. Enables adding
+ `BUILD.gn` to directories that live in sub-repositories.
+ * `//build_overrides` -
+ Refer to [//build_overrides/README.md](../build_overrides/README.md).
+
+## Docs
+
+* [Writing GN Templates](docs/writing_gn_templates.md)
+* [Debugging Slow Builds](docs/debugging_slow_builds.md)
+* [Mac Hermetic Toolchains](docs/mac_hermetic_toolchain.md)
+* [Android Build Documentation](android/docs/README.md)
diff --git a/deps/v8/build/android/.style.yapf b/deps/v8/build/android/.style.yapf
new file mode 100644
index 0000000000..ef24bfc6b1
--- /dev/null
+++ b/deps/v8/build/android/.style.yapf
@@ -0,0 +1,6 @@
+[style]
+based_on_style = pep8
+column_limit = 80
+blank_line_before_nested_class_or_def = true
+blank_line_before_module_docstring = true
+indent_width = 2
diff --git a/deps/v8/build/android/AndroidManifest.xml b/deps/v8/build/android/AndroidManifest.xml
new file mode 100644
index 0000000000..fe21b80b4b
--- /dev/null
+++ b/deps/v8/build/android/AndroidManifest.xml
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ Copyright (c) 2012 The Chromium Authors. All rights reserved. Use of this
+ source code is governed by a BSD-style license that can be found in the
+ LICENSE file.
+-->
+
+<!--
+ This is a dummy manifest which is required by:
+ 1. aapt when generating R.java in java.gypi:
+ Nothing in the manifest is used, but it is still required by aapt.
+ 2. lint: [min|target]SdkVersion are required by lint and should
+ be kept up to date.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="org.dummy">
+
+ <uses-sdk android:minSdkVersion="19" android:targetSdkVersion="24" />
+
+</manifest>
diff --git a/deps/v8/build/android/BUILD.gn b/deps/v8/build/android/BUILD.gn
new file mode 100644
index 0000000000..f864430562
--- /dev/null
+++ b/deps/v8/build/android/BUILD.gn
@@ -0,0 +1,135 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/android/rules.gni")
+import("//build/config/python.gni")
+
+if (enable_java_templates) {
+ sun_tools_jar_path = "$root_gen_dir/sun_tools_jar/tools.jar"
+
+ # Create or update the API versions cache if necessary by running a
+ # functionally empty lint task. This prevents racy creation of the
+ # cache while linting java targets in android_lint.
+ android_lint("prepare_android_lint_cache") {
+ android_manifest = "//build/android/AndroidManifest.xml"
+ create_cache = true
+ }
+
+ action("find_sun_tools_jar") {
+ script = "//build/android/gyp/find_sun_tools_jar.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ outputs = [
+ sun_tools_jar_path,
+ ]
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--output",
+ rebase_path(sun_tools_jar_path, root_build_dir),
+ ]
+ }
+
+ java_prebuilt("sun_tools_java") {
+ jar_path = sun_tools_jar_path
+ deps = [
+ ":find_sun_tools_jar",
+ ]
+ }
+
+ # Write to a file some GN vars that are useful to scripts that use the output
+ # directory. Format is chosen as easliy importable by both python and bash.
+ _lines = [
+ "android_sdk_build_tools=" +
+ rebase_path(android_sdk_build_tools, root_build_dir),
+ "android_sdk_build_tools_version=$android_sdk_build_tools_version",
+ "android_sdk_tools_version_suffix=$android_sdk_tools_version_suffix",
+ "android_sdk_root=" + rebase_path(android_sdk_root, root_build_dir),
+ "android_sdk_version=$android_sdk_version",
+ "android_ndk_root=" + rebase_path(android_ndk_root, root_build_dir),
+ "android_tool_prefix=" + rebase_path(android_tool_prefix, root_build_dir),
+ ]
+ if (defined(android_secondary_abi_cpu)) {
+ _secondary_label_info =
+ get_label_info(":foo($android_secondary_abi_toolchain)", "root_out_dir")
+ _lines += [ "android_secondary_abi_toolchain=" +
+ rebase_path(_secondary_label_info, root_build_dir) ]
+ }
+ if (defined(build_apk_secondary_abi)) {
+ _lines += [ "build_apk_secondary_abi=$build_apk_secondary_abi" ]
+ }
+ write_file(android_build_vars, _lines)
+}
+
+python_library("devil_chromium_py") {
+ pydeps_file = "devil_chromium.pydeps"
+ data = [
+ "devil_chromium.py",
+ "devil_chromium.json",
+ "//third_party/catapult/third_party/gsutil/",
+ "//third_party/catapult/devil/devil/devil_dependencies.json",
+ ]
+}
+
+python_library("test_runner_py") {
+ pydeps_file = "test_runner.pydeps"
+ data = [
+ "pylib/gtest/filter/",
+ "pylib/instrumentation/render_test.html.jinja",
+ "test_wrapper/logdog_wrapper.py",
+ "${android_sdk_build_tools}/aapt",
+ "${android_sdk_build_tools}/dexdump",
+ "${android_sdk_build_tools}/lib64/libc++.so",
+ "${android_sdk_build_tools}/split-select",
+ "${android_sdk_root}/platform-tools/adb",
+ ]
+ data_deps = [
+ ":devil_chromium_py",
+ ]
+ if (is_asan) {
+ data_deps += [ "//tools/android/asan/third_party:asan_device_setup" ]
+ }
+
+ # Proguard is needed only when using apks (rather than native executables).
+ if (enable_java_templates) {
+ deps = [
+ "//third_party/proguard:proguard603_java",
+ ]
+ }
+}
+
+python_library("logdog_wrapper_py") {
+ pydeps_file = "test_wrapper/logdog_wrapper.pydeps"
+}
+
+python_library("resource_sizes_py") {
+ pydeps_file = "resource_sizes.pydeps"
+ data_deps = [
+ ":devil_chromium_py",
+ ]
+ data = [
+ android_build_vars,
+ android_readelf,
+ ]
+}
+
+# Create wrapper scripts in out/bin that takes care of setting the
+# --output-directory.
+_scripts_to_wrap = [
+ "asan_symbolize.py",
+ "tombstones.py",
+]
+
+_wrapper_targets = []
+foreach(script, _scripts_to_wrap) {
+ _target_name = get_path_info(script, "name") + "_wrapper"
+ _wrapper_targets += [ ":$_target_name" ]
+ wrapper_script(_target_name) {
+ target = script
+ }
+}
+
+group("wrapper_scripts") {
+ deps = _wrapper_targets
+}
diff --git a/deps/v8/build/android/CheckInstallApk-debug.apk b/deps/v8/build/android/CheckInstallApk-debug.apk
new file mode 100644
index 0000000000..3dc31910a5
--- /dev/null
+++ b/deps/v8/build/android/CheckInstallApk-debug.apk
Binary files differ
diff --git a/deps/v8/build/android/OWNERS b/deps/v8/build/android/OWNERS
new file mode 100644
index 0000000000..2feaebcf83
--- /dev/null
+++ b/deps/v8/build/android/OWNERS
@@ -0,0 +1,7 @@
+estevenson@chromium.org
+jbudorick@chromium.org
+pasko@chromium.org
+perezju@chromium.org
+wnwen@chromium.org
+
+# COMPONENT: Build
diff --git a/deps/v8/build/android/PRESUBMIT.py b/deps/v8/build/android/PRESUBMIT.py
new file mode 100644
index 0000000000..0ec045cc23
--- /dev/null
+++ b/deps/v8/build/android/PRESUBMIT.py
@@ -0,0 +1,97 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Presubmit script for android buildbot.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
+details on the presubmit API built into depot_tools.
+"""
+
+
+def CommonChecks(input_api, output_api):
+ output = []
+
+ build_android_dir = input_api.PresubmitLocalPath()
+
+ def J(*dirs):
+ """Returns a path relative to presubmit directory."""
+ return input_api.os_path.join(build_android_dir, *dirs)
+
+ build_pys = [
+ r'gyp/.*\.py$',
+ r'gn/.*\.py',
+ ]
+ output.extend(input_api.canned_checks.RunPylint(
+ input_api,
+ output_api,
+ pylintrc='pylintrc',
+ black_list=build_pys,
+ extra_paths_list=[
+ J(),
+ J('gyp'),
+ J('buildbot'),
+ J('..', 'util', 'lib', 'common'),
+ J('..', '..', 'third_party', 'catapult', 'common', 'py_trace_event'),
+ J('..', '..', 'third_party', 'catapult', 'common', 'py_utils'),
+ J('..', '..', 'third_party', 'catapult', 'devil'),
+ J('..', '..', 'third_party', 'catapult', 'tracing'),
+ J('..', '..', 'third_party', 'depot_tools'),
+ ]))
+ output.extend(input_api.canned_checks.RunPylint(
+ input_api,
+ output_api,
+ white_list=build_pys,
+ extra_paths_list=[J('gyp'), J('gn')]))
+
+ # Disabled due to http://crbug.com/410936
+ #output.extend(input_api.canned_checks.RunUnitTestsInDirectory(
+ #input_api, output_api, J('buildbot', 'tests')))
+
+ pylib_test_env = dict(input_api.environ)
+ pylib_test_env.update({
+ 'PYTHONPATH': build_android_dir,
+ 'PYTHONDONTWRITEBYTECODE': '1',
+ })
+ output.extend(
+ input_api.canned_checks.RunUnitTests(
+ input_api,
+ output_api,
+ unit_tests=[
+ J('.', 'emma_coverage_stats_test.py'),
+ J('.', 'list_class_verification_failures_test.py'),
+ J('gyp', 'util', 'build_utils_test.py'),
+ J('gyp', 'util', 'md5_check_test.py'),
+ J('gyp', 'util', 'resource_utils_test.py'),
+ J('pylib', 'constants', 'host_paths_unittest.py'),
+ J('pylib', 'gtest', 'gtest_test_instance_test.py'),
+ J('pylib', 'instrumentation',
+ 'instrumentation_test_instance_test.py'),
+ J('pylib', 'local', 'device',
+ 'local_device_instrumentation_test_run_test.py'),
+ J('pylib', 'local', 'device', 'local_device_test_run_test.py'),
+ J('pylib', 'output', 'local_output_manager_test.py'),
+ J('pylib', 'output', 'noop_output_manager_test.py'),
+ J('pylib', 'output', 'remote_output_manager_test.py'),
+ J('pylib', 'results', 'json_results_test.py'),
+ J('pylib', 'symbols', 'apk_native_libs_unittest.py'),
+ J('pylib', 'symbols', 'elf_symbolizer_unittest.py'),
+ J('pylib', 'symbols', 'symbol_utils_unittest.py'),
+ J('pylib', 'utils', 'decorators_test.py'),
+ J('pylib', 'utils', 'device_dependencies_test.py'),
+ J('pylib', 'utils', 'dexdump_test.py'),
+ J('pylib', 'utils', 'proguard_test.py'),
+ J('pylib', 'utils', 'test_filter_test.py'),
+ J('.', 'convert_dex_profile_tests.py'),
+ ],
+ env=pylib_test_env))
+
+ return output
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ return CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return CommonChecks(input_api, output_api)
diff --git a/deps/v8/build/android/adb_chrome_public_command_line b/deps/v8/build/android/adb_chrome_public_command_line
new file mode 100755
index 0000000000..86ece8cec7
--- /dev/null
+++ b/deps/v8/build/android/adb_chrome_public_command_line
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current Chrome flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the Chrome
+# flags. For example:
+# adb_chrome_public_command_line --enable-webgl
+#
+# To remove all Chrome flags, pass an empty string for the flags:
+# adb_chrome_public_command_line ""
+
+exec $(dirname $0)/adb_command_line.py --name chrome-command-line "$@"
diff --git a/deps/v8/build/android/adb_command_line.py b/deps/v8/build/android/adb_command_line.py
new file mode 100755
index 0000000000..2f3a615a35
--- /dev/null
+++ b/deps/v8/build/android/adb_command_line.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility for reading / writing command-line flag files on device(s)."""
+
+import argparse
+import logging
+import sys
+
+import devil_chromium # pylint: disable=import-error, unused-import
+
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android import flag_changer
+from devil.android.tools import script_common
+from devil.utils import cmd_helper
+from devil.utils import logging_common
+
+
+def main():
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.usage = '''%(prog)s --name FILENAME [--device SERIAL] [flags...]
+
+No flags: Prints existing command-line file.
+Empty string: Deletes command-line file.
+Otherwise: Writes command-line file.
+
+'''
+ parser.add_argument('--name', required=True,
+ help='Name of file where to store flags on the device.')
+ parser.add_argument('-e', '--executable', dest='executable', default='chrome',
+ help='(deprecated) No longer used.')
+ script_common.AddEnvironmentArguments(parser)
+ script_common.AddDeviceArguments(parser)
+ logging_common.AddLoggingArguments(parser)
+
+ args, remote_args = parser.parse_known_args()
+ script_common.InitializeEnvironment(args)
+ logging_common.InitializeLogging(args)
+
+ devices = device_utils.DeviceUtils.HealthyDevices(device_arg=args.devices,
+ default_retries=0)
+ all_devices = device_utils.DeviceUtils.parallel(devices)
+
+ if not remote_args:
+ # No args == do not update, just print flags.
+ remote_args = None
+ action = ''
+ elif len(remote_args) == 1 and not remote_args[0]:
+ # Single empty string arg == delete flags
+ remote_args = []
+ action = 'Deleted command line file. '
+ else:
+ action = 'Wrote command line file. '
+
+ is_webview = args.name == 'webview-command-line'
+
+ def update_flags(device):
+ if device.IsUserBuild() and is_webview:
+ raise device_errors.CommandFailedError(
+ 'WebView only respects flags on a userdebug or eng device, yours '
+ 'is a user build.', device)
+ elif device.IsUserBuild():
+ logging.warning(
+ 'Your device (%s) is a user build; Chrome may or may not pick up '
+ 'your commandline flags. Check your '
+ '"command_line_on_non_rooted_enabled" preference, or switch '
+ 'devices.', device)
+ changer = flag_changer.FlagChanger(device, args.name)
+ if remote_args is not None:
+ flags = changer.ReplaceFlags(remote_args)
+ else:
+ flags = changer.GetCurrentFlags()
+ return (device, device.build_description, flags)
+
+ updated_values = all_devices.pMap(update_flags).pGet(None)
+
+ print '%sCurrent flags (in %s):' % (action, args.name)
+ for d, desc, flags in updated_values:
+ if flags:
+ # Shell-quote flags for easy copy/paste as new args on the terminal.
+ quoted_flags = ' '.join(cmd_helper.SingleQuote(f) for f in sorted(flags))
+ else:
+ quoted_flags = '( empty )'
+ print ' %s (%s): %s' % (d, desc, quoted_flags)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/adb_gdb b/deps/v8/build/android/adb_gdb
new file mode 100755
index 0000000000..1dc3ce5f3b
--- /dev/null
+++ b/deps/v8/build/android/adb_gdb
@@ -0,0 +1,1000 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+
+# A generic script used to attach to a running Chromium process and
+# debug it. Most users should not use this directly, but one of the
+# wrapper scripts like adb_gdb_content_shell
+#
+# Use --help to print full usage instructions.
+#
+
+PROGNAME=$(basename "$0")
+PROGDIR=$(dirname "$0")
+
+# Force locale to C to allow recognizing output from subprocesses.
+LC_ALL=C
+
+# Location of Chromium-top-level sources.
+CHROMIUM_SRC=$(cd "$PROGDIR"/../.. >/dev/null && pwd 2>/dev/null)
+
+TMPDIR=
+GDBSERVER_PIDFILE=
+TARGET_GDBSERVER=
+COMMAND_PREFIX=
+COMMAND_SUFFIX=
+
+clean_exit () {
+ if [ "$TMPDIR" ]; then
+ GDBSERVER_PID=$(cat $GDBSERVER_PIDFILE 2>/dev/null)
+ if [ "$GDBSERVER_PID" ]; then
+ log "Killing background gdbserver process: $GDBSERVER_PID"
+ kill -9 $GDBSERVER_PID >/dev/null 2>&1
+ rm -f "$GDBSERVER_PIDFILE"
+ fi
+ if [ "$TARGET_GDBSERVER" ]; then
+ log "Removing target gdbserver binary: $TARGET_GDBSERVER."
+ "$ADB" shell "$COMMAND_PREFIX" rm "$TARGET_GDBSERVER" \
+ "$TARGET_DOMAIN_SOCKET" "$COMMAND_SUFFIX" >/dev/null 2>&1
+ fi
+ log "Cleaning up: $TMPDIR"
+ rm -rf "$TMPDIR"
+ fi
+ trap "" EXIT
+ exit $1
+}
+
+# Ensure clean exit on Ctrl-C or normal exit.
+trap "clean_exit 1" INT HUP QUIT TERM
+trap "clean_exit \$?" EXIT
+
+panic () {
+ echo "ERROR: $@" >&2
+ exit 1
+}
+
+fail_panic () {
+ if [ $? != 0 ]; then panic "$@"; fi
+}
+
+log () {
+ if [ "$VERBOSE" -gt 0 ]; then
+ echo "$@"
+ fi
+}
+
+DEFAULT_PULL_LIBS_DIR="/tmp/adb-gdb-support-$USER"
+IDE_DIR="$DEFAULT_PULL_LIBS_DIR"
+
+# NOTE: Allow wrapper scripts to set various default through ADB_GDB_XXX
+# environment variables. This is only for cosmetic reasons, i.e. to
+# display proper
+
+# Allow wrapper scripts to set the program name through ADB_GDB_PROGNAME
+PROGNAME=${ADB_GDB_PROGNAME:-$(basename "$0")}
+
+ADB=
+ANNOTATE=
+CGDB=
+GDBINIT=
+GDBSERVER=
+HELP=
+IDE=
+NDK_DIR=
+NO_PULL_LIBS=
+PACKAGE_NAME=
+PID=
+PORT=
+PROGRAM_NAME="activity"
+PULL_LIBS=
+PULL_LIBS_DIR=
+ATTACH_DELAY=1
+SU_PREFIX=
+SYMBOL_DIR=
+TARGET_ARCH=
+TOOLCHAIN=
+VERBOSE=0
+
+for opt; do
+ optarg=$(expr "x$opt" : 'x[^=]*=\(.*\)')
+ case $opt in
+ --adb=*)
+ ADB=$optarg
+ ;;
+ --device=*)
+ export ANDROID_SERIAL=$optarg
+ ;;
+ --annotate=3)
+ ANNOTATE=$optarg
+ ;;
+ --gdbserver=*)
+ GDBSERVER=$optarg
+ ;;
+ --gdb=*)
+ GDB=$optarg
+ ;;
+ --help|-h|-?)
+ HELP=true
+ ;;
+ --ide)
+ IDE=true
+ ;;
+ --ndk-dir=*)
+ NDK_DIR=$optarg
+ ;;
+ --no-pull-libs)
+ NO_PULL_LIBS=true
+ ;;
+ --package-name=*)
+ PACKAGE_NAME=$optarg
+ ;;
+ --pid=*)
+ PID=$optarg
+ ;;
+ --port=*)
+ PORT=$optarg
+ ;;
+ --program-name=*)
+ PROGRAM_NAME=$optarg
+ ;;
+ --pull-libs)
+ PULL_LIBS=true
+ ;;
+ --pull-libs-dir=*)
+ PULL_LIBS_DIR=$optarg
+ ;;
+ --script=*)
+ GDBINIT=$optarg
+ ;;
+ --attach-delay=*)
+ ATTACH_DELAY=$optarg
+ ;;
+ --su-prefix=*)
+ SU_PREFIX=$optarg
+ ;;
+ --symbol-dir=*)
+ SYMBOL_DIR=$optarg
+ ;;
+ --output-directory=*)
+ CHROMIUM_OUTPUT_DIR=$optarg
+ ;;
+ --target-arch=*)
+ TARGET_ARCH=$optarg
+ ;;
+ --toolchain=*)
+ TOOLCHAIN=$optarg
+ ;;
+ --cgdb)
+ CGDB=cgdb
+ ;;
+ --cgdb=*)
+ CGDB=$optarg
+ ;;
+ --verbose)
+ VERBOSE=$(( $VERBOSE + 1 ))
+ ;;
+ -*)
+ panic "Unknown option $opt, see --help." >&2
+ ;;
+ *)
+ if [ "$PACKAGE_NAME" ]; then
+ panic "You can only provide a single package name as argument!\
+ See --help."
+ fi
+ PACKAGE_NAME=$opt
+ ;;
+ esac
+done
+
+if [ "$HELP" ]; then
+ if [ "$ADB_GDB_PROGNAME" ]; then
+ # Assume wrapper scripts all provide a default package name.
+ cat <<EOF
+Usage: $PROGNAME [options]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+EOF
+ else
+ # Assume this is a direct call to adb_gdb
+ cat <<EOF
+Usage: $PROGNAME [options] [<package-name>]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+
+If provided, <package-name> must be the name of the Android application's
+package name to be debugged. You can also use --package-name=<name> to
+specify it.
+EOF
+ fi
+
+ cat <<EOF
+
+This script is used to debug a running $PROGRAM_NAME process.
+
+This script needs several things to work properly. It will try to pick
+them up automatically for you though:
+
+ - target gdbserver binary
+ - host gdb client (e.g. arm-linux-androideabi-gdb)
+ - directory with symbolic version of $PROGRAM_NAME's shared libraries.
+
+You can also use --ndk-dir=<path> to specify an alternative NDK installation
+directory.
+
+The script tries to find the most recent version of the debug version of
+shared libraries under one of the following directories:
+
+ \$CHROMIUM_SRC/<out>/lib/ (used by GYP builds)
+ \$CHROMIUM_SRC/<out>/lib.unstripped/ (used by GN builds)
+
+Where <out> is determined by CHROMIUM_OUTPUT_DIR, or --output-directory.
+
+You can set the path manually via --symbol-dir.
+
+The script tries to extract the target architecture from your target device,
+but if this fails, will default to 'arm'. Use --target-arch=<name> to force
+its value.
+
+Otherwise, the script will complain, but you can use the --gdbserver,
+--gdb and --symbol-lib options to specify everything manually.
+
+An alternative to --gdb=<file> is to use --toollchain=<path> to specify
+the path to the host target-specific cross-toolchain.
+
+You will also need the 'adb' tool in your path. Otherwise, use the --adb
+option. The script will complain if there is more than one device connected
+and a device is not specified with either --device or ANDROID_SERIAL).
+
+The first time you use it on a device, the script will pull many system
+libraries required by the process into a temporary directory. This
+is done to strongly improve the debugging experience, like allowing
+readable thread stacks and more. The libraries are copied to the following
+directory by default:
+
+ $DEFAULT_PULL_LIBS_DIR/
+
+But you can use the --pull-libs-dir=<path> option to specify an
+alternative. The script can detect when you change the connected device,
+and will re-pull the libraries only in this case. You can however force it
+with the --pull-libs option.
+
+Any local .gdbinit script will be ignored, but it is possible to pass a
+gdb command script with the --script=<file> option. Note that its commands
+will be passed to gdb after the remote connection and library symbol
+loading have completed.
+
+Valid options:
+ --help|-h|-? Print this message.
+ --verbose Increase verbosity.
+
+ --cgdb[=<file>] Use cgdb (an interface for gdb that shows the code).
+ --symbol-dir=<path> Specify directory with symbol shared libraries.
+ --output-directory=<path> Specify the output directory (e.g. "out/Debug").
+ --package-name=<name> Specify package name (alternative to 1st argument).
+ --program-name=<name> Specify program name (cosmetic only).
+ --pid=<pid> Specify application process pid.
+ --attach-delay=<num> Seconds to wait for gdbserver to attach to the
+ remote process before starting gdb. Default 1.
+ <num> may be a float if your sleep(1) supports it.
+ --annotate=<num> Enable gdb annotation.
+ --script=<file> Specify extra GDB init script.
+
+ --gdbserver=<file> Specify target gdbserver binary.
+ --gdb=<file> Specify host gdb client binary.
+ --target-arch=<name> Specify NDK target arch.
+ --adb=<file> Specify host ADB binary.
+ --device=<file> ADB device serial to use (-s flag).
+ --port=<port> Specify the tcp port to use.
+ --ide Forward gdb port, but do not enter gdb console.
+
+ --su-prefix=<prefix> Prepend <prefix> to 'adb shell' commands that are
+ run by this script. This can be useful to use
+ the 'su' program on rooted production devices.
+ e.g. --su-prefix="su -c"
+
+ --pull-libs Force system libraries extraction.
+ --no-pull-libs Do not extract any system library.
+ --libs-dir=<path> Specify system libraries extraction directory.
+
+EOF
+ exit 0
+fi
+
+if [ -z "$PACKAGE_NAME" ]; then
+ panic "Please specify a package name on the command line. See --help."
+fi
+
+if [[ -z "$SYMBOL_DIR" && -z "$CHROMIUM_OUTPUT_DIR" ]]; then
+ if [[ -e "build.ninja" ]]; then
+ CHROMIUM_OUTPUT_DIR=$PWD
+ else
+ panic "Please specify an output directory by using one of:
+ --output-directory=out/Debug
+ CHROMIUM_OUTPUT_DIR=out/Debug
+ Setting working directory to an output directory.
+ See --help."
+ fi
+fi
+
+if ls *.so >/dev/null 2>&1; then
+ panic ".so files found in your working directory. These will conflict with" \
+ "library lookup logic. Change your working directory and try again."
+fi
+
+# Detect the build type and symbol directory. This is done by finding
+# the most recent sub-directory containing debug shared libraries under
+# $CHROMIUM_OUTPUT_DIR.
+#
+# Out: nothing, but this sets SYMBOL_DIR
+#
+detect_symbol_dir () {
+ # GYP places unstripped libraries under out/lib
+ # GN places them under out/lib.unstripped
+ local PARENT_DIR="$CHROMIUM_OUTPUT_DIR"
+ if [[ ! -e "$PARENT_DIR" ]]; then
+ PARENT_DIR="$CHROMIUM_SRC/$PARENT_DIR"
+ fi
+ SYMBOL_DIR="$PARENT_DIR/lib.unstripped"
+ if [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then
+ SYMBOL_DIR="$PARENT_DIR/lib"
+ if [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then
+ panic "Could not find any symbols under \
+$PARENT_DIR/lib{.unstripped}. Please build the program first!"
+ fi
+ fi
+ log "Auto-config: --symbol-dir=$SYMBOL_DIR"
+}
+
+if [ -z "$SYMBOL_DIR" ]; then
+ detect_symbol_dir
+elif [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then
+ panic "Could not find any symbols under $SYMBOL_DIR"
+fi
+
+if [ -z "$NDK_DIR" ]; then
+ ANDROID_NDK_ROOT=$(PYTHONPATH=$CHROMIUM_SRC/build/android python -c \
+'from pylib.constants import ANDROID_NDK_ROOT; print ANDROID_NDK_ROOT,')
+else
+ if [ ! -d "$NDK_DIR" ]; then
+ panic "Invalid directory: $NDK_DIR"
+ fi
+ if [ ! -f "$NDK_DIR/ndk-build" ]; then
+ panic "Not a valid NDK directory: $NDK_DIR"
+ fi
+ ANDROID_NDK_ROOT=$NDK_DIR
+fi
+
+if [ "$GDBINIT" -a ! -f "$GDBINIT" ]; then
+ panic "Unknown --script file: $GDBINIT"
+fi
+
+# Check that ADB is in our path
+if [ -z "$ADB" ]; then
+ ADB=$(which adb 2>/dev/null)
+ if [ -z "$ADB" ]; then
+ panic "Can't find 'adb' tool in your path. Install it or use \
+--adb=<file>"
+ fi
+ log "Auto-config: --adb=$ADB"
+fi
+
+# Check that it works minimally
+ADB_VERSION=$($ADB version 2>/dev/null)
+echo "$ADB_VERSION" | fgrep -q -e "Android Debug Bridge"
+if [ $? != 0 ]; then
+ panic "Your 'adb' tool seems invalid, use --adb=<file> to specify a \
+different one: $ADB"
+fi
+
+# If there are more than one device connected, and ANDROID_SERIAL is not
+# defined, print an error message.
+NUM_DEVICES_PLUS2=$($ADB devices 2>/dev/null | wc -l)
+if [ "$NUM_DEVICES_PLUS2" -gt 3 -a -z "$ANDROID_SERIAL" ]; then
+ echo "ERROR: There is more than one Android device connected to ADB."
+ echo "Please define ANDROID_SERIAL to specify which one to use."
+ exit 1
+fi
+
+# Run a command through adb shell, strip the extra \r from the output
+# and return the correct status code to detect failures. This assumes
+# that the adb shell command prints a final \n to stdout.
+# $1+: command to run
+# Out: command's stdout
+# Return: command's status
+# Note: the command's stderr is lost
+adb_shell () {
+ local TMPOUT="$(mktemp)"
+ local LASTLINE RET
+ local ADB=${ADB:-adb}
+
+ # The weird sed rule is to strip the final \r on each output line
+ # Since 'adb shell' never returns the command's proper exit/status code,
+ # we force it to print it as '%%<status>' in the temporary output file,
+ # which we will later strip from it.
+ $ADB shell $@ ";" echo "%%\$?" 2>/dev/null | \
+ sed -e 's![[:cntrl:]]!!g' > $TMPOUT
+ # Get last line in log, which contains the exit code from the command
+ LASTLINE=$(sed -e '$!d' $TMPOUT)
+ # Extract the status code from the end of the line, which must
+ # be '%%<code>'.
+ RET=$(echo "$LASTLINE" | \
+ awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,RSTART+2); } }')
+ # Remove the status code from the last line. Note that this may result
+ # in an empty line.
+ LASTLINE=$(echo "$LASTLINE" | \
+ awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,1,RSTART-1); } }')
+ # The output itself: all lines except the status code.
+ sed -e '$d' $TMPOUT && printf "%s" "$LASTLINE"
+ # Remove temp file.
+ rm -f $TMPOUT
+ # Exit with the appropriate status.
+ return $RET
+}
+
+# Find the target architecture from a local shared library.
+# This returns an NDK-compatible architecture name.
+# out: NDK Architecture name, or empty string.
+get_gyp_target_arch () {
+ # ls prints a broken pipe error when there are a lot of libs.
+ local RANDOM_LIB=$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null| head -n1)
+ local SO_DESC=$(file $RANDOM_LIB)
+ case $ARCH in
+ *32-bit*ARM,*) echo "arm";;
+ *64-bit*ARM,*) echo "arm64";;
+ *32-bit*Intel,*) echo "x86";;
+ *x86-64,*) echo "x86_64";;
+ *32-bit*MIPS,*) echo "mips";;
+ *) echo "";
+ esac
+}
+
+if [ -z "$TARGET_ARCH" ]; then
+ TARGET_ARCH=$(get_gyp_target_arch)
+ if [ -z "$TARGET_ARCH" ]; then
+ TARGET_ARCH=arm
+ fi
+else
+ # Nit: accept Chromium's 'ia32' as a valid target architecture. This
+ # script prefers the NDK 'x86' name instead because it uses it to find
+ # NDK-specific files (host gdb) with it.
+ if [ "$TARGET_ARCH" = "ia32" ]; then
+ TARGET_ARCH=x86
+ log "Auto-config: --arch=$TARGET_ARCH (equivalent to ia32)"
+ fi
+fi
+
+# Detect the NDK system name, i.e. the name used to identify the host.
+# out: NDK system name (e.g. 'linux' or 'darwin')
+get_ndk_host_system () {
+ local HOST_OS
+ if [ -z "$NDK_HOST_SYSTEM" ]; then
+ HOST_OS=$(uname -s)
+ case $HOST_OS in
+ Linux) NDK_HOST_SYSTEM=linux;;
+ Darwin) NDK_HOST_SYSTEM=darwin;;
+ *) panic "You can't run this script on this system: $HOST_OS";;
+ esac
+ fi
+ echo "$NDK_HOST_SYSTEM"
+}
+
+# Detect the NDK host architecture name.
+# out: NDK arch name (e.g. 'x86' or 'x86_64')
+get_ndk_host_arch () {
+ local HOST_ARCH HOST_OS
+ if [ -z "$NDK_HOST_ARCH" ]; then
+ HOST_OS=$(get_ndk_host_system)
+ HOST_ARCH=$(uname -p)
+ if [ "$HOST_ARCH" = "unknown" ]; then
+ # In case where "-p" returns "unknown" just use "-m" (machine hardware
+ # name). According to this patch from Fedora "-p" is equivalent to "-m"
+ # anyway: https://goo.gl/Pd47x3
+ HOST_ARCH=$(uname -m)
+ fi
+ case $HOST_ARCH in
+ i?86) NDK_HOST_ARCH=x86;;
+ x86_64|amd64) NDK_HOST_ARCH=x86_64;;
+ *) panic "You can't run this script on this host architecture: $HOST_ARCH";;
+ esac
+ # Darwin trick: "uname -p" always returns i386 on 64-bit installations.
+ if [ "$HOST_OS" = darwin -a "$NDK_HOST_ARCH" = "x86" ]; then
+ # Use '/usr/bin/file', not just 'file' to avoid buggy MacPorts
+ # implementations of the tool. See http://b.android.com/53769
+ HOST_64BITS=$(/usr/bin/file -L "$SHELL" | grep -e "x86[_-]64")
+ if [ "$HOST_64BITS" ]; then
+ NDK_HOST_ARCH=x86_64
+ fi
+ fi
+ fi
+ echo "$NDK_HOST_ARCH"
+}
+
+# Convert an NDK architecture name into a GNU configure triplet.
+# $1: NDK architecture name (e.g. 'arm')
+# Out: Android GNU configure triplet (e.g. 'arm-linux-androideabi')
+get_arch_gnu_config () {
+ case $1 in
+ arm)
+ echo "arm-linux-androideabi"
+ ;;
+ arm64)
+ echo "aarch64-linux-android"
+ ;;
+ x86)
+ echo "i686-linux-android"
+ ;;
+ x86_64)
+ echo "x86_64-linux-android"
+ ;;
+ mips)
+ echo "mipsel-linux-android"
+ ;;
+ *)
+ echo "$ARCH-linux-android"
+ ;;
+ esac
+}
+
+# Convert an NDK architecture name into a toolchain name prefix
+# $1: NDK architecture name (e.g. 'arm')
+# Out: NDK toolchain name prefix (e.g. 'arm-linux-androideabi')
+get_arch_toolchain_prefix () {
+ # Return the configure triplet, except for x86 and x86_64!
+ if [ "$1" = "x86" -o "$1" = "x86_64" ]; then
+ echo "$1"
+ else
+ get_arch_gnu_config $1
+ fi
+}
+
+# Find a NDK toolchain prebuilt file or sub-directory.
+# This will probe the various arch-specific toolchain directories
+# in the NDK for the needed file.
+# $1: NDK install path
+# $2: NDK architecture name
+# $3: prebuilt sub-path to look for.
+# Out: file path, or empty if none is found.
+get_ndk_toolchain_prebuilt () {
+ local NDK_DIR="${1%/}"
+ local ARCH="$2"
+ local SUBPATH="$3"
+ local NAME="$(get_arch_toolchain_prefix $ARCH)"
+ local FILE TARGET
+ FILE=$NDK_DIR/toolchains/$NAME-4.9/prebuilt/$SUBPATH
+ if [ ! -f "$FILE" ]; then
+ FILE=$NDK_DIR/toolchains/$NAME-4.8/prebuilt/$SUBPATH
+ if [ ! -f "$FILE" ]; then
+ FILE=
+ fi
+ fi
+ echo "$FILE"
+}
+
+# Find the path to an NDK's toolchain full prefix for a given architecture
+# $1: NDK install path
+# $2: NDK target architecture name
+# Out: install path + binary prefix (e.g.
+# ".../path/to/bin/arm-linux-androideabi-")
+get_ndk_toolchain_fullprefix () {
+ local NDK_DIR="$1"
+ local ARCH="$2"
+ local TARGET NAME HOST_OS HOST_ARCH GCC CONFIG
+
+ # NOTE: This will need to be updated if the NDK changes the names or moves
+ # the location of its prebuilt toolchains.
+ #
+ GCC=
+ HOST_OS=$(get_ndk_host_system)
+ HOST_ARCH=$(get_ndk_host_arch)
+ CONFIG=$(get_arch_gnu_config $ARCH)
+ GCC=$(get_ndk_toolchain_prebuilt \
+ "$NDK_DIR" "$ARCH" "$HOST_OS-$HOST_ARCH/bin/$CONFIG-gcc")
+ if [ -z "$GCC" -a "$HOST_ARCH" = "x86_64" ]; then
+ GCC=$(get_ndk_toolchain_prebuilt \
+ "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/$CONFIG-gcc")
+ fi
+ if [ ! -f "$GCC" -a "$ARCH" = "x86" ]; then
+ # Special case, the x86 toolchain used to be incorrectly
+ # named i686-android-linux-gcc!
+ GCC=$(get_ndk_toolchain_prebuilt \
+ "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/i686-android-linux-gcc")
+ fi
+ if [ -z "$GCC" ]; then
+ panic "Cannot find Android NDK toolchain for '$ARCH' architecture. \
+Please verify your NDK installation!"
+ fi
+ echo "${GCC%%gcc}"
+}
+
+# $1: NDK install path
+get_ndk_host_gdb_client() {
+ local NDK_DIR="$1"
+ local HOST_OS HOST_ARCH
+
+ HOST_OS=$(get_ndk_host_system)
+ HOST_ARCH=$(get_ndk_host_arch)
+ echo "$NDK_DIR/prebuilt/$HOST_OS-$HOST_ARCH/bin/gdb"
+}
+
+# $1: NDK install path
+# $2: target architecture.
+get_ndk_gdbserver () {
+ local NDK_DIR="$1"
+ local ARCH=$2
+ local BINARY
+
+ # The location has moved after NDK r8
+ BINARY=$NDK_DIR/prebuilt/android-$ARCH/gdbserver/gdbserver
+ if [ ! -f "$BINARY" ]; then
+ BINARY=$(get_ndk_toolchain_prebuilt "$NDK_DIR" "$ARCH" gdbserver)
+ fi
+ echo "$BINARY"
+}
+
+# Check/probe the path to the Android toolchain installation. Always
+# use the NDK versions of gdb and gdbserver. They must match to avoid
+# issues when both binaries do not speak the same wire protocol.
+#
+if [ -z "$TOOLCHAIN" ]; then
+ ANDROID_TOOLCHAIN=$(get_ndk_toolchain_fullprefix \
+ "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+ ANDROID_TOOLCHAIN=$(dirname "$ANDROID_TOOLCHAIN")
+ log "Auto-config: --toolchain=$ANDROID_TOOLCHAIN"
+else
+ # Be flexible, allow one to specify either the install path or the bin
+ # sub-directory in --toolchain:
+ #
+ if [ -d "$TOOLCHAIN/bin" ]; then
+ TOOLCHAIN=$TOOLCHAIN/bin
+ fi
+ ANDROID_TOOLCHAIN=$TOOLCHAIN
+fi
+
+# Cosmetic: Remove trailing directory separator.
+ANDROID_TOOLCHAIN=${ANDROID_TOOLCHAIN%/}
+
+# Find host GDB client binary
+if [ -z "$GDB" ]; then
+ GDB=$(get_ndk_host_gdb_client "$ANDROID_NDK_ROOT")
+ if [ -z "$GDB" ]; then
+ panic "Can't find Android gdb client in your path, check your \
+--toolchain or --gdb path."
+ fi
+ log "Host gdb client: $GDB"
+fi
+
+# Find gdbserver binary, we will later push it to /data/local/tmp
+# This ensures that both gdbserver and $GDB talk the same binary protocol,
+# otherwise weird problems will appear.
+#
+if [ -z "$GDBSERVER" ]; then
+ GDBSERVER=$(get_ndk_gdbserver "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+ if [ -z "$GDBSERVER" ]; then
+ panic "Can't find NDK gdbserver binary. use --gdbserver to specify \
+valid one!"
+ fi
+ log "Auto-config: --gdbserver=$GDBSERVER"
+fi
+
+# A unique ID for this script's session. This needs to be the same in all
+# sub-shell commands we're going to launch, so take the PID of the launcher
+# process.
+TMP_ID=$$
+
+# Temporary directory, will get cleaned up on exit.
+TMPDIR=/tmp/$USER-adb-gdb-tmp-$TMP_ID
+mkdir -p "$TMPDIR" && rm -rf "$TMPDIR"/*
+
+GDBSERVER_PIDFILE="$TMPDIR"/gdbserver-$TMP_ID.pid
+
+# Return the timestamp of a given file, as number of seconds since epoch.
+# $1: file path
+# Out: file timestamp
+get_file_timestamp () {
+ stat -c %Y "$1" 2>/dev/null
+}
+
+# Allow several concurrent debugging sessions
+APP_DATA_DIR=$(adb_shell run-as $PACKAGE_NAME /system/bin/sh -c pwd)
+fail_panic "Failed to run-as $PACKAGE_NAME, is the app debuggable?"
+TARGET_GDBSERVER="$APP_DATA_DIR/gdbserver-adb-gdb-$TMP_ID"
+TMP_TARGET_GDBSERVER=/data/local/tmp/gdbserver-adb-gdb-$TMP_ID
+
+# Select correct app_process for architecture.
+case $TARGET_ARCH in
+ arm|x86|mips) GDBEXEC=app_process32;;
+ arm64|x86_64) GDBEXEC=app_process64; SUFFIX_64_BIT=64;;
+ *) panic "Unknown app_process for architecture!";;
+esac
+
+# Default to app_process if bit-width specific process isn't found.
+adb_shell ls /system/bin/$GDBEXEC > /dev/null
+if [ $? != 0 ]; then
+ GDBEXEC=app_process
+fi
+
+# Detect AddressSanitizer setup on the device. In that case app_process is a
+# script, and the real executable is app_process.real.
+GDBEXEC_ASAN=app_process.real
+adb_shell ls /system/bin/$GDBEXEC_ASAN > /dev/null
+if [ $? == 0 ]; then
+ GDBEXEC=$GDBEXEC_ASAN
+fi
+
+ORG_PULL_LIBS_DIR=$PULL_LIBS_DIR
+if [[ -n "$ANDROID_SERIAL" ]]; then
+ DEFAULT_PULL_LIBS_DIR="$DEFAULT_PULL_LIBS_DIR/$ANDROID_SERIAL-$SUFFIX_64_BIT"
+fi
+PULL_LIBS_DIR=${PULL_LIBS_DIR:-$DEFAULT_PULL_LIBS_DIR}
+
+HOST_FINGERPRINT=
+DEVICE_FINGERPRINT=$(adb_shell getprop ro.build.fingerprint)
+[[ "$DEVICE_FINGERPRINT" ]] || panic "Failed to get the device fingerprint"
+log "Device build fingerprint: $DEVICE_FINGERPRINT"
+
+if [ ! -f "$PULL_LIBS_DIR/build.fingerprint" ]; then
+ log "Auto-config: --pull-libs (no cached libraries)"
+ PULL_LIBS=true
+else
+ HOST_FINGERPRINT=$(< "$PULL_LIBS_DIR/build.fingerprint")
+ log "Host build fingerprint: $HOST_FINGERPRINT"
+ if [ "$HOST_FINGERPRINT" == "$DEVICE_FINGERPRINT" ]; then
+ log "Auto-config: --no-pull-libs (fingerprint match)"
+ NO_PULL_LIBS=true
+ else
+ log "Auto-config: --pull-libs (fingerprint mismatch)"
+ PULL_LIBS=true
+ fi
+fi
+
+# If requested, work for M-x gdb. The gdb indirections make it
+# difficult to pass --annotate=3 to the gdb binary itself.
+if [ "$ANNOTATE" ]; then
+ GDB_ARGS=$GDB_ARGS" --annotate=$ANNOTATE"
+fi
+
+# Get the PID from the first argument or else find the PID of the
+# browser process.
+if [ -z "$PID" ]; then
+ PROCESSNAME=$PACKAGE_NAME
+ if [ -z "$PID" ]; then
+ PID=$(adb_shell ps | \
+ awk '$9 == "'$PROCESSNAME'" { print $2; }' | head -1)
+ fi
+ if [ -z "$PID" ]; then
+ panic "Can't find application process PID."
+ fi
+ log "Found process PID: $PID"
+fi
+
+# Determine if 'adb shell' runs as root or not.
+# If so, we can launch gdbserver directly, otherwise, we have to
+# use run-as $PACKAGE_NAME ..., which requires the package to be debuggable.
+#
+if [ "$SU_PREFIX" ]; then
+ # Need to check that this works properly.
+ SU_PREFIX_TEST_LOG=$TMPDIR/su-prefix.log
+ adb_shell $SU_PREFIX \"echo "foo"\" > $SU_PREFIX_TEST_LOG 2>&1
+ if [ $? != 0 -o "$(cat $SU_PREFIX_TEST_LOG)" != "foo" ]; then
+ echo "ERROR: Cannot use '$SU_PREFIX' as a valid su prefix:"
+ echo "$ adb shell $SU_PREFIX \"echo foo\""
+ cat $SU_PREFIX_TEST_LOG
+ exit 1
+ fi
+ COMMAND_PREFIX="$SU_PREFIX \""
+ COMMAND_SUFFIX="\""
+else
+ SHELL_UID=$("$ADB" shell cat /proc/self/status | \
+ awk '$1 == "Uid:" { print $2; }')
+ log "Shell UID: $SHELL_UID"
+ if [ "$SHELL_UID" != 0 -o -n "$NO_ROOT" ]; then
+ COMMAND_PREFIX="run-as $PACKAGE_NAME"
+ COMMAND_SUFFIX=
+ else
+ COMMAND_PREFIX=
+ COMMAND_SUFFIX=
+ fi
+fi
+log "Command prefix: '$COMMAND_PREFIX'"
+log "Command suffix: '$COMMAND_SUFFIX'"
+
+mkdir -p "$PULL_LIBS_DIR"
+fail_panic "Can't create --libs-dir directory: $PULL_LIBS_DIR"
+
+# Pull device's system libraries that are mapped by our process.
+# Pulling all system libraries is too long, so determine which ones
+# we need by looking at /proc/$PID/maps instead
+if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then
+ echo "Extracting system libraries into: $PULL_LIBS_DIR"
+ MAPPINGS=$(adb_shell $COMMAND_PREFIX cat /proc/$PID/maps $COMMAND_SUFFIX)
+ if [ $? != 0 ]; then
+ echo "ERROR: Could not list process's memory mappings."
+ if [ "$SU_PREFIX" ]; then
+ panic "Are you sure your --su-prefix is correct?"
+ else
+ panic "Use --su-prefix if the application is not debuggable."
+ fi
+ fi
+ # Remove the fingerprint file in case pulling one of the libs fails.
+ rm -f "$PULL_LIBS_DIR/build.fingerprint"
+ SYSTEM_LIBS=$(echo "$MAPPINGS" | \
+ awk '$6 ~ /\/system\/.*\.so$/ { print $6; }' | sort -u)
+ for SYSLIB in /system/bin/linker$SUFFIX_64_BIT $SYSTEM_LIBS; do
+ echo "Pulling from device: $SYSLIB"
+ DST_FILE=$PULL_LIBS_DIR$SYSLIB
+ DST_DIR=$(dirname "$DST_FILE")
+ mkdir -p "$DST_DIR" && "$ADB" pull $SYSLIB "$DST_FILE" 2>/dev/null
+ fail_panic "Could not pull $SYSLIB from device !?"
+ done
+ echo "Writing the device fingerprint"
+ echo "$DEVICE_FINGERPRINT" > "$PULL_LIBS_DIR/build.fingerprint"
+fi
+
+# Pull the app_process binary from the device.
+log "Pulling $GDBEXEC from device"
+"$ADB" pull /system/bin/$GDBEXEC "$TMPDIR"/$GDBEXEC &>/dev/null
+fail_panic "Could not retrieve $GDBEXEC from the device!"
+
+# Find all the sub-directories of $PULL_LIBS_DIR, up to depth 4
+# so we can add them to solib-search-path later.
+SOLIB_DIRS=$(find $PULL_LIBS_DIR -mindepth 1 -maxdepth 4 -type d | \
+ grep -v "^$" | tr '\n' ':')
+SOLIB_DIRS=${SOLIB_DIRS%:} # Strip trailing :
+
+# Applications with minSdkVersion >= 24 will have their data directories
+# created with rwx------ permissions, preventing adbd from forwarding to
+# the gdbserver socket.
+adb_shell $COMMAND_PREFIX chmod a+x $APP_DATA_DIR $COMMAND_SUFFIX
+
+# Push gdbserver to the device
+log "Pushing gdbserver $GDBSERVER to $TARGET_GDBSERVER"
+"$ADB" push $GDBSERVER $TMP_TARGET_GDBSERVER >/dev/null && \
+ adb_shell $COMMAND_PREFIX cp $TMP_TARGET_GDBSERVER $TARGET_GDBSERVER $COMMAND_SUFFIX && \
+ adb_shell rm $TMP_TARGET_GDBSERVER
+fail_panic "Could not copy gdbserver to the device!"
+
+if [ -z "$PORT" ]; then
+ # Random port to allow multiple concurrent sessions.
+ PORT=$(( $RANDOM % 1000 + 5039 ))
+fi
+HOST_PORT=$PORT
+TARGET_DOMAIN_SOCKET=$APP_DATA_DIR/gdb-socket-$HOST_PORT
+
+# Setup network redirection
+log "Setting network redirection (host:$HOST_PORT -> device:$TARGET_DOMAIN_SOCKET)"
+"$ADB" forward tcp:$HOST_PORT localfilesystem:$TARGET_DOMAIN_SOCKET
+fail_panic "Could not setup network redirection from \
+host:localhost:$HOST_PORT to device:$TARGET_DOMAIN_SOCKET"
+
+# Start gdbserver in the background
+# Note that using run-as requires the package to be debuggable.
+#
+# If not, this will fail horribly. The alternative is to run the
+# program as root, which requires of course root privileges.
+# Maybe we should add a --root option to enable this?
+#
+
+for i in 1 2; do
+ log "Starting gdbserver in the background:"
+ GDBSERVER_LOG=$TMPDIR/gdbserver-$TMP_ID.log
+ log "adb shell $COMMAND_PREFIX $TARGET_GDBSERVER \
+ --once +$TARGET_DOMAIN_SOCKET \
+ --attach $PID $COMMAND_SUFFIX"
+ "$ADB" shell $COMMAND_PREFIX $TARGET_GDBSERVER \
+ --once +$TARGET_DOMAIN_SOCKET \
+ --attach $PID $COMMAND_SUFFIX > $GDBSERVER_LOG 2>&1 &
+ GDBSERVER_PID=$!
+ echo "$GDBSERVER_PID" > $GDBSERVER_PIDFILE
+ log "background job pid: $GDBSERVER_PID"
+
+ # Sleep to allow gdbserver to attach to the remote process and be
+ # ready to connect to.
+ log "Sleeping ${ATTACH_DELAY}s to ensure gdbserver is alive"
+ sleep "$ATTACH_DELAY"
+ log "Job control: $(jobs -l)"
+ STATE=$(jobs -l | awk '$2 == "'$GDBSERVER_PID'" { print $3; }')
+ if [ "$STATE" != "Running" ]; then
+ pid_msg=$(grep "is already traced by process" $GDBSERVER_LOG 2>/dev/null)
+ if [[ -n "$pid_msg" ]]; then
+ old_pid=${pid_msg##* }
+ old_pid=${old_pid//[$'\r\n']} # Trim trailing \r.
+ echo "Killing previous gdb server process (pid=$old_pid)"
+ adb_shell $COMMAND_PREFIX kill -9 $old_pid $COMMAND_SUFFIX
+ continue
+ fi
+ echo "ERROR: GDBServer either failed to run or attach to PID $PID!"
+ echo "Here is the output from gdbserver (also try --verbose for more):"
+ echo "===== gdbserver.log start ====="
+ cat $GDBSERVER_LOG
+ echo ="===== gdbserver.log end ======"
+ exit 1
+ fi
+ break
+done
+
+# Generate a file containing useful GDB initialization commands
+readonly COMMANDS=$TMPDIR/gdb.init
+log "Generating GDB initialization commands file: $COMMANDS"
+cat > "$COMMANDS" <<EOF
+set osabi GNU/Linux # Copied from ndk-gdb.py.
+set print pretty 1
+python
+import sys
+sys.path.insert(0, '$CHROMIUM_SRC/tools/gdb/')
+try:
+ import gdb_chrome
+finally:
+ sys.path.pop(0)
+end
+file $TMPDIR/$GDBEXEC
+directory $CHROMIUM_OUTPUT_DIR
+set solib-absolute-prefix $PULL_LIBS_DIR
+set solib-search-path $SOLIB_DIRS:$PULL_LIBS_DIR:$SYMBOL_DIR
+
+python
+# Copied from ndk-gdb.py:
+def target_remote_with_retry(target, timeout_seconds):
+ import time
+ end_time = time.time() + timeout_seconds
+ while True:
+ try:
+ gdb.execute('target remote ' + target)
+ return True
+ except gdb.error as e:
+ time_left = end_time - time.time()
+ if time_left < 0 or time_left > timeout_seconds:
+ print("Error: unable to connect to device.")
+ print(e)
+ return False
+ time.sleep(min(0.25, time_left))
+
+print("Connecting to :$HOST_PORT...")
+if target_remote_with_retry(':$HOST_PORT', 5):
+ print("Attached! Reading symbols (takes ~30 seconds).")
+end
+EOF
+
+if [ "$GDBINIT" ]; then
+ cat "$GDBINIT" >> "$COMMANDS"
+fi
+
+if [ "$VERBOSE" -gt 0 ]; then
+ echo "### START $COMMANDS"
+ cat "$COMMANDS"
+ echo "### END $COMMANDS"
+fi
+
+if [ "$IDE" ]; then
+ mkdir -p "$IDE_DIR"
+ SYM_GDB="$IDE_DIR/gdb"
+ SYM_EXE="$IDE_DIR/app_process"
+ SYM_INIT="$IDE_DIR/gdbinit"
+ ln -sf "$TMPDIR/$GDBEXEC" "$SYM_EXE"
+ ln -sf "$COMMANDS" "$SYM_INIT"
+ # gdb doesn't work when symlinked, so create a wrapper.
+ echo
+ cat > $SYM_GDB <<EOF
+#!/bin/sh
+exec $GDB "\$@"
+EOF
+ chmod u+x $SYM_GDB
+
+ echo "GDB server listening on: localhost:$PORT"
+ echo "GDB wrapper script: $SYM_GDB"
+ echo "App executable: $SYM_EXE"
+ echo "gdbinit: $SYM_INIT"
+ echo "Connect with vscode: https://chromium.googlesource.com/chromium/src/+/master/docs/vscode.md#Launch-Commands"
+ echo "Showing gdbserver logs. Press Ctrl-C to disconnect."
+ tail -f "$GDBSERVER_LOG"
+else
+ log "Launching gdb client: $GDB $GDB_ARGS -x $COMMANDS"
+ echo "Server log: $GDBSERVER_LOG"
+ if [ "$CGDB" ]; then
+ $CGDB -d $GDB -- $GDB_ARGS -x "$COMMANDS"
+ else
+ $GDB $GDB_ARGS -x "$COMMANDS"
+ fi
+fi
diff --git a/deps/v8/build/android/adb_install_apk.py b/deps/v8/build/android/adb_install_apk.py
new file mode 100755
index 0000000000..f17143a2a3
--- /dev/null
+++ b/deps/v8/build/android/adb_install_apk.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility script to install APKs from the command line quickly."""
+
+import argparse
+import glob
+import logging
+import os
+import sys
+
+import devil_chromium
+from devil.android import apk_helper
+from devil.android import device_blacklist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.utils import run_tests_helper
+from pylib import constants
+
+
+def main():
+ parser = argparse.ArgumentParser()
+
+ apk_group = parser.add_mutually_exclusive_group(required=True)
+ apk_group.add_argument('--apk', dest='apk_name',
+ help='DEPRECATED The name of the apk containing the'
+ ' application (with the .apk extension).')
+ apk_group.add_argument('apk_path', nargs='?',
+ help='The path to the APK to install.')
+
+ # TODO(jbudorick): Remove once no clients pass --apk_package
+ parser.add_argument('--apk_package', help='DEPRECATED unused')
+ parser.add_argument('--split',
+ action='append',
+ dest='splits',
+ help='A glob matching the apk splits. '
+ 'Can be specified multiple times.')
+ parser.add_argument('--keep_data',
+ action='store_true',
+ default=False,
+ help='Keep the package data when installing '
+ 'the application.')
+ parser.add_argument('--debug', action='store_const', const='Debug',
+ dest='build_type',
+ default=os.environ.get('BUILDTYPE', 'Debug'),
+ help='If set, run test suites under out/Debug. '
+ 'Default is env var BUILDTYPE or Debug')
+ parser.add_argument('--release', action='store_const', const='Release',
+ dest='build_type',
+ help='If set, run test suites under out/Release. '
+ 'Default is env var BUILDTYPE or Debug.')
+ parser.add_argument('-d', '--device', dest='devices', action='append',
+ default=[],
+ help='Target device for apk to install on. Enter multiple'
+ ' times for multiple devices.')
+ parser.add_argument('--adb-path', type=os.path.abspath,
+ help='Absolute path to the adb binary to use.')
+ parser.add_argument('--blacklist-file', help='Device blacklist JSON file.')
+ parser.add_argument('-v', '--verbose', action='count',
+ help='Enable verbose logging.')
+ parser.add_argument('--downgrade', action='store_true',
+ help='If set, allows downgrading of apk.')
+ parser.add_argument('--timeout', type=int,
+ default=device_utils.DeviceUtils.INSTALL_DEFAULT_TIMEOUT,
+ help='Seconds to wait for APK installation. '
+ '(default: %(default)s)')
+
+ args = parser.parse_args()
+
+ run_tests_helper.SetLogLevel(args.verbose)
+ constants.SetBuildType(args.build_type)
+
+ devil_chromium.Initialize(
+ output_directory=constants.GetOutDirectory(),
+ adb_path=args.adb_path)
+
+ apk = args.apk_path or args.apk_name
+ if not apk.endswith('.apk'):
+ apk += '.apk'
+ if not os.path.exists(apk):
+ apk = os.path.join(constants.GetOutDirectory(), 'apks', apk)
+ if not os.path.exists(apk):
+ parser.error('%s not found.' % apk)
+
+ if args.splits:
+ splits = []
+ base_apk_package = apk_helper.ApkHelper(apk).GetPackageName()
+ for split_glob in args.splits:
+ apks = [f for f in glob.glob(split_glob) if f.endswith('.apk')]
+ if not apks:
+ logging.warning('No apks matched for %s.', split_glob)
+ for f in apks:
+ helper = apk_helper.ApkHelper(f)
+ if (helper.GetPackageName() == base_apk_package
+ and helper.GetSplitName()):
+ splits.append(f)
+
+ blacklist = (device_blacklist.Blacklist(args.blacklist_file)
+ if args.blacklist_file
+ else None)
+ devices = device_utils.DeviceUtils.HealthyDevices(blacklist=blacklist,
+ device_arg=args.devices)
+
+ def blacklisting_install(device):
+ try:
+ if args.splits:
+ device.InstallSplitApk(apk, splits, reinstall=args.keep_data,
+ allow_downgrade=args.downgrade)
+ else:
+ device.Install(apk, reinstall=args.keep_data,
+ allow_downgrade=args.downgrade,
+ timeout=args.timeout)
+ except (device_errors.CommandFailedError,
+ device_errors.DeviceUnreachableError):
+ logging.exception('Failed to install %s', apk)
+ if blacklist:
+ blacklist.Extend([str(device)], reason='install_failure')
+ logging.warning('Blacklisting %s', str(device))
+ except device_errors.CommandTimeoutError:
+ logging.exception('Timed out while installing %s', apk)
+ if blacklist:
+ blacklist.Extend([str(device)], reason='install_timeout')
+ logging.warning('Blacklisting %s', str(device))
+
+ device_utils.DeviceUtils.parallel(devices).pMap(blacklisting_install)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/adb_logcat_monitor.py b/deps/v8/build/android/adb_logcat_monitor.py
new file mode 100755
index 0000000000..d3cc67dbcc
--- /dev/null
+++ b/deps/v8/build/android/adb_logcat_monitor.py
@@ -0,0 +1,156 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Saves logcats from all connected devices.
+
+Usage: adb_logcat_monitor.py <base_dir> [<adb_binary_path>]
+
+This script will repeatedly poll adb for new devices and save logcats
+inside the <base_dir> directory, which it attempts to create. The
+script will run until killed by an external signal. To test, run the
+script in a shell and <Ctrl>-C it after a while. It should be
+resilient across phone disconnects and reconnects and start the logcat
+early enough to not miss anything.
+"""
+
+import logging
+import os
+import re
+import shutil
+import signal
+import subprocess
+import sys
+import time
+
+# Map from device_id -> (process, logcat_num)
+devices = {}
+
+
+class TimeoutException(Exception):
+ """Exception used to signal a timeout."""
+ pass
+
+
+class SigtermError(Exception):
+ """Exception used to catch a sigterm."""
+ pass
+
+
+def StartLogcatIfNecessary(device_id, adb_cmd, base_dir):
+ """Spawns a adb logcat process if one is not currently running."""
+ process, logcat_num = devices[device_id]
+ if process:
+ if process.poll() is None:
+ # Logcat process is still happily running
+ return
+ else:
+ logging.info('Logcat for device %s has died', device_id)
+ error_filter = re.compile('- waiting for device -')
+ for line in process.stderr:
+ if not error_filter.match(line):
+ logging.error(device_id + ': ' + line)
+
+ logging.info('Starting logcat %d for device %s', logcat_num,
+ device_id)
+ logcat_filename = 'logcat_%s_%03d' % (device_id, logcat_num)
+ logcat_file = open(os.path.join(base_dir, logcat_filename), 'w')
+ process = subprocess.Popen([adb_cmd, '-s', device_id,
+ 'logcat', '-v', 'threadtime'],
+ stdout=logcat_file,
+ stderr=subprocess.PIPE)
+ devices[device_id] = (process, logcat_num + 1)
+
+
+def GetAttachedDevices(adb_cmd):
+ """Gets the device list from adb.
+
+ We use an alarm in this function to avoid deadlocking from an external
+ dependency.
+
+ Args:
+ adb_cmd: binary to run adb
+
+ Returns:
+ list of devices or an empty list on timeout
+ """
+ signal.alarm(2)
+ try:
+ out, err = subprocess.Popen([adb_cmd, 'devices'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE).communicate()
+ if err:
+ logging.warning('adb device error %s', err.strip())
+ return re.findall('^(\\S+)\tdevice$', out, re.MULTILINE)
+ except TimeoutException:
+ logging.warning('"adb devices" command timed out')
+ return []
+ except (IOError, OSError):
+ logging.exception('Exception from "adb devices"')
+ return []
+ finally:
+ signal.alarm(0)
+
+
+def main(base_dir, adb_cmd='adb'):
+ """Monitor adb forever. Expects a SIGINT (Ctrl-C) to kill."""
+ # We create the directory to ensure 'run once' semantics
+ if os.path.exists(base_dir):
+ print 'adb_logcat_monitor: %s already exists? Cleaning' % base_dir
+ shutil.rmtree(base_dir, ignore_errors=True)
+
+ os.makedirs(base_dir)
+ logging.basicConfig(filename=os.path.join(base_dir, 'eventlog'),
+ level=logging.INFO,
+ format='%(asctime)-2s %(levelname)-8s %(message)s')
+
+ # Set up the alarm for calling 'adb devices'. This is to ensure
+ # our script doesn't get stuck waiting for a process response
+ def TimeoutHandler(_signum, _unused_frame):
+ raise TimeoutException()
+ signal.signal(signal.SIGALRM, TimeoutHandler)
+
+ # Handle SIGTERMs to ensure clean shutdown
+ def SigtermHandler(_signum, _unused_frame):
+ raise SigtermError()
+ signal.signal(signal.SIGTERM, SigtermHandler)
+
+ logging.info('Started with pid %d', os.getpid())
+ pid_file_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+
+ try:
+ with open(pid_file_path, 'w') as f:
+ f.write(str(os.getpid()))
+ while True:
+ for device_id in GetAttachedDevices(adb_cmd):
+ if not device_id in devices:
+ subprocess.call([adb_cmd, '-s', device_id, 'logcat', '-c'])
+ devices[device_id] = (None, 0)
+
+ for device in devices:
+ # This will spawn logcat watchers for any device ever detected
+ StartLogcatIfNecessary(device, adb_cmd, base_dir)
+
+ time.sleep(5)
+ except SigtermError:
+ logging.info('Received SIGTERM, shutting down')
+ except: # pylint: disable=bare-except
+ logging.exception('Unexpected exception in main.')
+ finally:
+ for process, _ in devices.itervalues():
+ if process:
+ try:
+ process.terminate()
+ except OSError:
+ pass
+ os.remove(pid_file_path)
+
+
+if __name__ == '__main__':
+ if 2 <= len(sys.argv) <= 3:
+ print 'adb_logcat_monitor: Initializing'
+ sys.exit(main(*sys.argv[1:3]))
+
+ print 'Usage: %s <base_dir> [<adb_binary_path>]' % sys.argv[0]
diff --git a/deps/v8/build/android/adb_logcat_printer.py b/deps/v8/build/android/adb_logcat_printer.py
new file mode 100755
index 0000000000..a715170759
--- /dev/null
+++ b/deps/v8/build/android/adb_logcat_printer.py
@@ -0,0 +1,222 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shutdown adb_logcat_monitor and print accumulated logs.
+
+To test, call './adb_logcat_printer.py <base_dir>' where
+<base_dir> contains 'adb logcat -v threadtime' files named as
+logcat_<deviceID>_<sequenceNum>
+
+The script will print the files to out, and will combine multiple
+logcats from a single device if there is overlap.
+
+Additionally, if a <base_dir>/LOGCAT_MONITOR_PID exists, the script
+will attempt to terminate the contained PID by sending a SIGINT and
+monitoring for the deletion of the aforementioned file.
+"""
+# pylint: disable=W0702
+
+import cStringIO
+import logging
+import optparse
+import os
+import re
+import signal
+import sys
+import time
+
+
+# Set this to debug for more verbose output
+LOG_LEVEL = logging.INFO
+
+
+def CombineLogFiles(list_of_lists, logger):
+ """Splices together multiple logcats from the same device.
+
+ Args:
+ list_of_lists: list of pairs (filename, list of timestamped lines)
+ logger: handler to log events
+
+ Returns:
+ list of lines with duplicates removed
+ """
+ cur_device_log = ['']
+ for cur_file, cur_file_lines in list_of_lists:
+ # Ignore files with just the logcat header
+ if len(cur_file_lines) < 2:
+ continue
+ common_index = 0
+ # Skip this step if list just has empty string
+ if len(cur_device_log) > 1:
+ try:
+ line = cur_device_log[-1]
+ # Used to make sure we only splice on a timestamped line
+ if re.match(r'^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line):
+ common_index = cur_file_lines.index(line)
+ else:
+ logger.warning('splice error - no timestamp in "%s"?', line.strip())
+ except ValueError:
+ # The last line was valid but wasn't found in the next file
+ cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****']
+ logger.info('Unable to splice %s. Incomplete logcat?', cur_file)
+
+ cur_device_log += ['*'*30 + ' %s' % cur_file]
+ cur_device_log.extend(cur_file_lines[common_index:])
+
+ return cur_device_log
+
+
+def FindLogFiles(base_dir):
+ """Search a directory for logcat files.
+
+ Args:
+ base_dir: directory to search
+
+ Returns:
+ Mapping of device_id to a sorted list of file paths for a given device
+ """
+ logcat_filter = re.compile(r'^logcat_(\S+)_(\d+)$')
+ # list of tuples (<device_id>, <seq num>, <full file path>)
+ filtered_list = []
+ for cur_file in os.listdir(base_dir):
+ matcher = logcat_filter.match(cur_file)
+ if matcher:
+ filtered_list += [(matcher.group(1), int(matcher.group(2)),
+ os.path.join(base_dir, cur_file))]
+ filtered_list.sort()
+ file_map = {}
+ for device_id, _, cur_file in filtered_list:
+ if device_id not in file_map:
+ file_map[device_id] = []
+
+ file_map[device_id] += [cur_file]
+ return file_map
+
+
+def GetDeviceLogs(log_filenames, logger):
+ """Read log files, combine and format.
+
+ Args:
+ log_filenames: mapping of device_id to sorted list of file paths
+ logger: logger handle for logging events
+
+ Returns:
+ list of formatted device logs, one for each device.
+ """
+ device_logs = []
+
+ for device, device_files in log_filenames.iteritems():
+ logger.debug('%s: %s', device, str(device_files))
+ device_file_lines = []
+ for cur_file in device_files:
+ with open(cur_file) as f:
+ device_file_lines += [(cur_file, f.read().splitlines())]
+ combined_lines = CombineLogFiles(device_file_lines, logger)
+ # Prepend each line with a short unique ID so it's easy to see
+ # when the device changes. We don't use the start of the device
+ # ID because it can be the same among devices. Example lines:
+ # AB324: foo
+ # AB324: blah
+ device_logs += [('\n' + device[-5:] + ': ').join(combined_lines)]
+ return device_logs
+
+
+def ShutdownLogcatMonitor(base_dir, logger):
+ """Attempts to shutdown adb_logcat_monitor and blocks while waiting."""
+ try:
+ monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+ with open(monitor_pid_path) as f:
+ monitor_pid = int(f.readline())
+
+ logger.info('Sending SIGTERM to %d', monitor_pid)
+ os.kill(monitor_pid, signal.SIGTERM)
+ i = 0
+ while True:
+ time.sleep(.2)
+ if not os.path.exists(monitor_pid_path):
+ return
+ if not os.path.exists('/proc/%d' % monitor_pid):
+ logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid)
+ return
+ logger.info('Waiting for logcat process to terminate.')
+ i += 1
+ if i >= 10:
+ logger.warning('Monitor pid did not terminate. Continuing anyway.')
+ return
+
+ except (ValueError, IOError, OSError):
+ logger.exception('Error signaling logcat monitor - continuing')
+
+
+def main(argv):
+ parser = optparse.OptionParser(usage='Usage: %prog [options] <log dir>')
+ parser.add_option('--output-path',
+ help='Output file path (if unspecified, prints to stdout)')
+ options, args = parser.parse_args(argv)
+ if len(args) != 1:
+ parser.error('Wrong number of unparsed args')
+ base_dir = args[0]
+
+ log_stringio = cStringIO.StringIO()
+ logger = logging.getLogger('LogcatPrinter')
+ logger.setLevel(LOG_LEVEL)
+ sh = logging.StreamHandler(log_stringio)
+ sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s'
+ ' %(message)s'))
+ logger.addHandler(sh)
+
+ if options.output_path:
+ if not os.path.exists(os.path.dirname(options.output_path)):
+ logger.warning('Output dir %s doesn\'t exist. Creating it.',
+ os.path.dirname(options.output_path))
+ os.makedirs(os.path.dirname(options.output_path))
+ output_file = open(options.output_path, 'w')
+ logger.info('Dumping logcat to local file %s. If running in a build, '
+ 'this file will likely will be uploaded to google storage '
+ 'in a later step. It can be downloaded from there.',
+ options.output_path)
+ else:
+ output_file = sys.stdout
+
+ try:
+ # Wait at least 5 seconds after base_dir is created before printing.
+ #
+ # The idea is that 'adb logcat > file' output consists of 2 phases:
+ # 1 Dump all the saved logs to the file
+ # 2 Stream log messages as they are generated
+ #
+ # We want to give enough time for phase 1 to complete. There's no
+ # good method to tell how long to wait, but it usually only takes a
+ # second. On most bots, this code path won't occur at all, since
+ # adb_logcat_monitor.py command will have spawned more than 5 seconds
+ # prior to called this shell script.
+ try:
+ sleep_time = 5 - (time.time() - os.path.getctime(base_dir))
+ except OSError:
+ sleep_time = 5
+ if sleep_time > 0:
+ logger.warning('Monitor just started? Sleeping %.1fs', sleep_time)
+ time.sleep(sleep_time)
+
+ assert os.path.exists(base_dir), '%s does not exist' % base_dir
+ ShutdownLogcatMonitor(base_dir, logger)
+ separator = '\n' + '*' * 80 + '\n\n'
+ for log in GetDeviceLogs(FindLogFiles(base_dir), logger):
+ output_file.write(log)
+ output_file.write(separator)
+ with open(os.path.join(base_dir, 'eventlog')) as f:
+ output_file.write('\nLogcat Monitor Event Log\n')
+ output_file.write(f.read())
+ except:
+ logger.exception('Unexpected exception')
+
+ logger.info('Done.')
+ sh.flush()
+ output_file.write('\nLogcat Printer Event Log\n')
+ output_file.write(log_stringio.getvalue())
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/adb_profile_chrome b/deps/v8/build/android/adb_profile_chrome
new file mode 100755
index 0000000000..d3244ffdf6
--- /dev/null
+++ b/deps/v8/build/android/adb_profile_chrome
@@ -0,0 +1,9 @@
+#!/bin/bash
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Start / stop profiling in chrome.
+CATAPULT_DIR="$(dirname "$0")"/../../third_party/catapult
+exec "${CATAPULT_DIR}"/systrace/bin/adb_profile_chrome "$@"
diff --git a/deps/v8/build/android/adb_profile_chrome_startup b/deps/v8/build/android/adb_profile_chrome_startup
new file mode 100755
index 0000000000..d5836cdf70
--- /dev/null
+++ b/deps/v8/build/android/adb_profile_chrome_startup
@@ -0,0 +1,9 @@
+#!/bin/bash
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Start / stop profiling for chrome startup.
+CATAPULT_DIR="$(dirname "$0")"/../../third_party/catapult
+exec "${CATAPULT_DIR}"/systrace/bin/adb_profile_chrome_startup "$@"
diff --git a/deps/v8/build/android/adb_reverse_forwarder.py b/deps/v8/build/android/adb_reverse_forwarder.py
new file mode 100755
index 0000000000..6edb43ae5b
--- /dev/null
+++ b/deps/v8/build/android/adb_reverse_forwarder.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Command line tool for forwarding ports from a device to the host.
+
+Allows an Android device to connect to services running on the host machine,
+i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder|
+to be built.
+"""
+
+import argparse
+import sys
+import time
+
+import devil_chromium
+
+from devil.android import device_blacklist
+from devil.android import device_utils
+from devil.android import forwarder
+from devil.utils import run_tests_helper
+
+from pylib import constants
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(
+ usage='Usage: %(prog)s [options] device_port '
+ 'host_port [device_port_2 host_port_2] ...',
+ description=__doc__)
+ parser.add_argument(
+ '-v', '--verbose',
+ dest='verbose_count',
+ default=0,
+ action='count',
+ help='Verbose level (multiple times for more)')
+ parser.add_argument(
+ '--device',
+ help='Serial number of device we should use.')
+ parser.add_argument(
+ '--blacklist-file',
+ help='Device blacklist JSON file.')
+ parser.add_argument(
+ '--debug',
+ action='store_const',
+ const='Debug',
+ dest='build_type',
+ default='Release',
+ help='DEPRECATED: use --output-directory instead.')
+ parser.add_argument(
+ '--output-directory',
+ help='Path to the root build directory.')
+ parser.add_argument(
+ 'ports',
+ nargs='+',
+ type=int,
+ help='Port pair to reverse forward.')
+
+ args = parser.parse_args(argv)
+ run_tests_helper.SetLogLevel(args.verbose_count)
+
+ if len(args.ports) < 2 or len(args.ports) % 2:
+ parser.error('Need even number of port pairs')
+
+ port_pairs = zip(args.ports[::2], args.ports[1::2])
+
+ if args.build_type:
+ constants.SetBuildType(args.build_type)
+ if args.output_directory:
+ constants.SetOutputDirectory(args.output_directory)
+ devil_chromium.Initialize(output_directory=constants.GetOutDirectory())
+
+ blacklist = (device_blacklist.Blacklist(args.blacklist_file)
+ if args.blacklist_file
+ else None)
+ device = device_utils.DeviceUtils.HealthyDevices(
+ blacklist=blacklist, device_arg=args.device)[0]
+ try:
+ forwarder.Forwarder.Map(port_pairs, device)
+ while True:
+ time.sleep(60)
+ except KeyboardInterrupt:
+ sys.exit(0)
+ finally:
+ forwarder.Forwarder.UnmapAllDevicePorts(device)
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/adb_system_webview_command_line b/deps/v8/build/android/adb_system_webview_command_line
new file mode 100755
index 0000000000..a0d2705821
--- /dev/null
+++ b/deps/v8/build/android/adb_system_webview_command_line
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current content shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the content shell
+# flags. For example:
+# adb_system_webview_command_line --enable-webgl
+#
+# To remove all content shell flags, pass an empty string for the flags:
+# adb_system_webview_command_line ""
+
+exec $(dirname $0)/adb_command_line.py --name webview-command-line "$@"
diff --git a/deps/v8/build/android/android_only_explicit_jni_exports.lst b/deps/v8/build/android/android_only_explicit_jni_exports.lst
new file mode 100644
index 0000000000..f989691865
--- /dev/null
+++ b/deps/v8/build/android/android_only_explicit_jni_exports.lst
@@ -0,0 +1,13 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Linker script that exports only JNI_OnLoad.
+# Should be used for libraries that do explicit JNI registration.
+
+{
+ global:
+ JNI_OnLoad;
+ local:
+ *;
+};
diff --git a/deps/v8/build/android/android_only_jni_exports.lst b/deps/v8/build/android/android_only_jni_exports.lst
new file mode 100644
index 0000000000..1336fee145
--- /dev/null
+++ b/deps/v8/build/android/android_only_jni_exports.lst
@@ -0,0 +1,13 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Linker script that exports only symbols required for JNI to work.
+
+{
+ global:
+ JNI_OnLoad;
+ Java_*;
+ local:
+ *;
+};
diff --git a/deps/v8/build/android/apk_operations.py b/deps/v8/build/android/apk_operations.py
new file mode 100755
index 0000000000..91f6851c73
--- /dev/null
+++ b/deps/v8/build/android/apk_operations.py
@@ -0,0 +1,1678 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Using colorama.Fore/Back/Style members
+# pylint: disable=no-member
+
+import argparse
+import collections
+import json
+import logging
+import os
+import pipes
+import posixpath
+import random
+import re
+import shlex
+import shutil
+import sys
+import tempfile
+import textwrap
+
+import devil_chromium
+from devil import devil_env
+from devil.android import apk_helper
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android import flag_changer
+from devil.android.sdk import adb_wrapper
+from devil.android.sdk import intent
+from devil.android.sdk import version_codes
+from devil.utils import run_tests_helper
+
+with devil_env.SysPath(os.path.join(os.path.dirname(__file__), '..', '..',
+ 'third_party', 'colorama', 'src')):
+ import colorama
+
+from incremental_install import installer
+from pylib import constants
+from pylib.symbols import deobfuscator
+from pylib.utils import simpleperf
+from pylib.utils import app_bundle_utils
+
+with devil_env.SysPath(os.path.join(os.path.dirname(__file__), '..', '..',
+ 'build', 'android', 'gyp')):
+ import bundletool
+
+# Matches messages only on pre-L (Dalvik) that are spammy and unimportant.
+_DALVIK_IGNORE_PATTERN = re.compile('|'.join([
+ r'^Added shared lib',
+ r'^Could not find ',
+ r'^DexOpt:',
+ r'^GC_',
+ r'^Late-enabling CheckJNI',
+ r'^Link of class',
+ r'^No JNI_OnLoad found in',
+ r'^Trying to load lib',
+ r'^Unable to resolve superclass',
+ r'^VFY:',
+ r'^WAIT_',
+ ]))
+
+BASE_MODULE = 'base'
+
+
+def _Colorize(text, style=''):
+ return (style
+ + text
+ + colorama.Style.RESET_ALL)
+
+
+def _InstallApk(devices, apk, install_dict):
+ def install(device):
+ if install_dict:
+ installer.Install(device, install_dict, apk=apk)
+ else:
+ device.Install(apk, allow_downgrade=True, reinstall=True)
+
+ logging.info('Installing %sincremental apk.', '' if install_dict else 'non-')
+ device_utils.DeviceUtils.parallel(devices).pMap(install)
+
+
+# A named tuple containing the information needed to convert a bundle into
+# an installable .apks archive.
+# Fields:
+# bundle_path: Path to input bundle file.
+# bundle_apk_path: Path to output bundle .apks archive file.
+# aapt2_path: Path to aapt2 tool.
+# keystore_path: Path to keystore file.
+# keystore_password: Password for the keystore file.
+# keystore_alias: Signing key name alias within the keystore file.
+# system_image_locales: List of Chromium locales to include in system .apks.
+BundleGenerationInfo = collections.namedtuple(
+ 'BundleGenerationInfo',
+ 'bundle_path,bundle_apks_path,aapt2_path,keystore_path,keystore_password,'
+ 'keystore_alias,system_image_locales')
+
+
+def _GenerateBundleApks(info,
+ output_path,
+ minimal=False,
+ minimal_sdk_version=None,
+ mode=None):
+ """Generate an .apks archive from a bundle on demand.
+
+ Args:
+ info: A BundleGenerationInfo instance.
+ output_path: Path of output .apks archive.
+ minimal: Create the minimal set of apks possible (english-only).
+ minimal_sdk_version: When minimal=True, use this sdkVersion.
+ mode: Build mode, either None, or one of app_bundle_utils.BUILD_APKS_MODES.
+ """
+ app_bundle_utils.GenerateBundleApks(
+ info.bundle_path,
+ output_path,
+ info.aapt2_path,
+ info.keystore_path,
+ info.keystore_password,
+ info.keystore_alias,
+ system_image_locales=info.system_image_locales,
+ mode=mode,
+ minimal=minimal,
+ minimal_sdk_version=minimal_sdk_version)
+
+
+def _InstallBundle(devices, bundle_apks, package_name, command_line_flags_file,
+ modules, fake_modules):
+ # Path to push fake modules for Chrome to pick up.
+ MODULES_SRC_DIRECTORY_PATH = '/data/local/tmp/modules'
+ # Path Chrome creates after validating fake modules. This needs to be cleared
+ # for pushed fake modules to be picked up.
+ SPLITCOMPAT_PATH = '/data/data/' + package_name + '/files/splitcompat'
+ # Chrome command line flag needed for fake modules to work.
+ FAKE_FEATURE_MODULE_INSTALL = '--fake-feature-module-install'
+
+ def ShouldWarnFakeFeatureModuleInstallFlag(device):
+ if command_line_flags_file:
+ changer = flag_changer.FlagChanger(device, command_line_flags_file)
+ return FAKE_FEATURE_MODULE_INSTALL not in changer.GetCurrentFlags()
+ return False
+
+ def ClearFakeModules(device):
+ if device.PathExists(SPLITCOMPAT_PATH, as_root=True):
+ device.RemovePath(
+ SPLITCOMPAT_PATH, force=True, recursive=True, as_root=True)
+ logging.info('Removed %s', SPLITCOMPAT_PATH)
+ else:
+ logging.info('Skipped removing nonexistent %s', SPLITCOMPAT_PATH)
+
+ def InstallFakeModules(device):
+ try:
+ temp_path = tempfile.mkdtemp()
+
+ if not fake_modules:
+ # Push empty temp_path to clear folder on device and update the cache.
+ device.PushChangedFiles([(temp_path, MODULES_SRC_DIRECTORY_PATH)],
+ delete_device_stale=True)
+ return
+
+ # Device-spec JSON is needed, so create that first.
+ device_spec_filename = os.path.join(temp_path, 'device_spec.json')
+ get_device_spec_cmd_args = [
+ 'get-device-spec', '--adb=' + adb_wrapper.AdbWrapper.GetAdbPath(),
+ '--device-id=' + device.serial, '--output=' + device_spec_filename
+ ]
+ bundletool.RunBundleTool(get_device_spec_cmd_args)
+
+ # Extract fake modules to temp directory. For now, installation
+ # requires running 'bundletool extract-apks'. Unfortunately, this leads
+ # to unneeded compression of module files.
+ extract_apks_cmd_args = [
+ 'extract-apks', '--apks=' + bundle_apks,
+ '--device-spec=' + device_spec_filename,
+ '--modules=' + ','.join(fake_modules), '--output-dir=' + temp_path
+ ]
+ bundletool.RunBundleTool(extract_apks_cmd_args)
+
+ # Push fake modules, with renames.
+ fake_module_apks = set()
+ for fake_module in fake_modules:
+ found_master = False
+
+ for filename in os.listdir(temp_path):
+ # If file matches expected format, rename it to follow conventions
+ # required by splitcompatting.
+ match = re.match(r'%s-([a-z_0-9]+)\.apk' % fake_module, filename)
+ local_path = os.path.join(temp_path, filename)
+
+ if not match:
+ continue
+
+ module_suffix = match.group(1)
+ remote = os.path.join(
+ temp_path, '%s.config.%s.apk' % (fake_module, module_suffix))
+ # Check if filename matches a master apk.
+ if 'master' in module_suffix:
+ if found_master:
+ raise Exception('Expect 1 master apk file for %s' % fake_module)
+ found_master = True
+ remote = os.path.join(temp_path, '%s.apk' % fake_module)
+
+ os.rename(local_path, remote)
+ fake_module_apks.add(os.path.basename(remote))
+
+ # Files that weren't renamed should not be pushed, remove from temp_path.
+ for filename in os.listdir(temp_path):
+ if filename not in fake_module_apks:
+ os.remove(os.path.join(temp_path, filename))
+
+ device.PushChangedFiles([(temp_path, MODULES_SRC_DIRECTORY_PATH)],
+ delete_device_stale=True)
+
+ finally:
+ shutil.rmtree(temp_path, ignore_errors=True)
+
+ def Install(device):
+ ClearFakeModules(device)
+ if fake_modules:
+ # Print warning if command line is not set up for fake modules.
+ if ShouldWarnFakeFeatureModuleInstallFlag(device):
+ msg = ('Command line has no %s: Fake modules will be ignored.' %
+ FAKE_FEATURE_MODULE_INSTALL)
+ print _Colorize(msg, colorama.Fore.YELLOW + colorama.Style.BRIGHT)
+
+ InstallFakeModules(device)
+
+ # NOTE: For now, installation requires running 'bundletool install-apks'.
+ # TODO(digit): Add proper support for bundles to devil instead, then use it.
+ install_cmd_args = [
+ 'install-apks', '--apks=' + bundle_apks,
+ '--adb=' + adb_wrapper.AdbWrapper.GetAdbPath(),
+ '--device-id=' + device.serial
+ ]
+ if modules:
+ install_cmd_args += ['--modules=' + ','.join(modules)]
+ bundletool.RunBundleTool(install_cmd_args)
+
+ # Basic checks for |modules| and |fake_modules|.
+ # * |fake_modules| cannot include 'base'.
+ # * If |fake_modules| is given, ensure |modules| includes 'base'.
+ # * They must be disjoint.
+ modules_set = set(modules) if modules else set()
+ fake_modules_set = set(fake_modules) if fake_modules else set()
+ if BASE_MODULE in fake_modules_set:
+ raise Exception('\'-f {}\' is disallowed.'.format(BASE_MODULE))
+ if fake_modules_set and BASE_MODULE not in modules_set:
+ raise Exception(
+ '\'-f FAKE\' must be accompanied by \'-m {}\''.format(BASE_MODULE))
+ if fake_modules_set.intersection(modules_set):
+ raise Exception('\'-m\' and \'-f\' entries must be disjoint.')
+
+ logging.info('Installing bundle.')
+ device_utils.DeviceUtils.parallel(devices).pMap(Install)
+
+
+def _UninstallApk(devices, install_dict, package_name):
+ def uninstall(device):
+ if install_dict:
+ installer.Uninstall(device, package_name)
+ else:
+ device.Uninstall(package_name)
+ device_utils.DeviceUtils.parallel(devices).pMap(uninstall)
+
+
+def _IsWebViewProvider(apk_helper_instance):
+ meta_data = apk_helper_instance.GetAllMetadata()
+ meta_data_keys = [pair[0] for pair in meta_data]
+ return 'com.android.webview.WebViewLibrary' in meta_data_keys
+
+
+def _SetWebViewProvider(devices, package_name):
+
+ def switch_provider(device):
+ if device.build_version_sdk < version_codes.NOUGAT:
+ logging.error('No need to switch provider on pre-Nougat devices (%s)',
+ device.serial)
+ else:
+ device.SetWebViewImplementation(package_name)
+
+ device_utils.DeviceUtils.parallel(devices).pMap(switch_provider)
+
+
+def _NormalizeProcessName(debug_process_name, package_name):
+ if not debug_process_name:
+ debug_process_name = package_name
+ elif debug_process_name.startswith(':'):
+ debug_process_name = package_name + debug_process_name
+ elif '.' not in debug_process_name:
+ debug_process_name = package_name + ':' + debug_process_name
+ return debug_process_name
+
+
+def _LaunchUrl(devices, package_name, argv=None, command_line_flags_file=None,
+ url=None, apk=None, wait_for_java_debugger=False,
+ debug_process_name=None, nokill=None):
+ if argv and command_line_flags_file is None:
+ raise Exception('This apk does not support any flags.')
+ if url:
+ # TODO(agrieve): Launch could be changed to require only package name by
+ # parsing "dumpsys package" rather than relying on the apk.
+ if not apk:
+ raise Exception('Launching with URL is not supported when using '
+ '--package-name. Use --apk-path instead.')
+ view_activity = apk.GetViewActivityName()
+ if not view_activity:
+ raise Exception('APK does not support launching with URLs.')
+
+ debug_process_name = _NormalizeProcessName(debug_process_name, package_name)
+
+ def launch(device):
+ # --persistent is required to have Settings.Global.DEBUG_APP be set, which
+ # we currently use to allow reading of flags. https://crbug.com/784947
+ if not nokill:
+ cmd = ['am', 'set-debug-app', '--persistent', debug_process_name]
+ if wait_for_java_debugger:
+ cmd[-1:-1] = ['-w']
+ # Ignore error since it will fail if apk is not debuggable.
+ device.RunShellCommand(cmd, check_return=False)
+
+ # The flags are first updated with input args.
+ if command_line_flags_file:
+ changer = flag_changer.FlagChanger(device, command_line_flags_file)
+ flags = []
+ if argv:
+ flags = shlex.split(argv)
+ try:
+ changer.ReplaceFlags(flags)
+ except device_errors.AdbShellCommandFailedError:
+ logging.exception('Failed to set flags')
+
+ if url is None:
+ # Simulate app icon click if no url is present.
+ cmd = [
+ 'am', 'start', '-p', package_name, '-c',
+ 'android.intent.category.LAUNCHER', '-a', 'android.intent.action.MAIN'
+ ]
+ device.RunShellCommand(cmd, check_return=True)
+ else:
+ launch_intent = intent.Intent(action='android.intent.action.VIEW',
+ activity=view_activity, data=url,
+ package=package_name)
+ device.StartActivity(launch_intent)
+ device_utils.DeviceUtils.parallel(devices).pMap(launch)
+ if wait_for_java_debugger:
+ print ('Waiting for debugger to attach to process: ' +
+ _Colorize(debug_process_name, colorama.Fore.YELLOW))
+
+
+def _ChangeFlags(devices, argv, command_line_flags_file):
+ if argv is None:
+ _DisplayArgs(devices, command_line_flags_file)
+ else:
+ flags = shlex.split(argv)
+ def update(device):
+ changer = flag_changer.FlagChanger(device, command_line_flags_file)
+ changer.ReplaceFlags(flags)
+ device_utils.DeviceUtils.parallel(devices).pMap(update)
+
+
+def _TargetCpuToTargetArch(target_cpu):
+ if target_cpu == 'x64':
+ return 'x86_64'
+ if target_cpu == 'mipsel':
+ return 'mips'
+ return target_cpu
+
+
+def _RunGdb(device, package_name, debug_process_name, pid, output_directory,
+ target_cpu, port, ide, verbose):
+ if not pid:
+ debug_process_name = _NormalizeProcessName(debug_process_name, package_name)
+ pid = device.GetApplicationPids(debug_process_name, at_most_one=True)
+ if not pid:
+ # Attaching gdb makes the app run so slow that it takes *minutes* to start
+ # up (as of 2018). Better to just fail than to start & attach.
+ raise Exception('App not running.')
+
+ gdb_script_path = os.path.dirname(__file__) + '/adb_gdb'
+ cmd = [
+ gdb_script_path,
+ '--package-name=%s' % package_name,
+ '--output-directory=%s' % output_directory,
+ '--adb=%s' % adb_wrapper.AdbWrapper.GetAdbPath(),
+ '--device=%s' % device.serial,
+ '--pid=%s' % pid,
+ '--port=%d' % port,
+ ]
+ if ide:
+ cmd.append('--ide')
+ # Enable verbose output of adb_gdb if it's set for this script.
+ if verbose:
+ cmd.append('--verbose')
+ if target_cpu:
+ cmd.append('--target-arch=%s' % _TargetCpuToTargetArch(target_cpu))
+ logging.warning('Running: %s', ' '.join(pipes.quote(x) for x in cmd))
+ print _Colorize(
+ 'All subsequent output is from adb_gdb script.', colorama.Fore.YELLOW)
+ os.execv(gdb_script_path, cmd)
+
+
+def _PrintPerDeviceOutput(devices, results, single_line=False):
+ for d, result in zip(devices, results):
+ if not single_line and d is not devices[0]:
+ sys.stdout.write('\n')
+ sys.stdout.write(
+ _Colorize('{} ({}):'.format(d, d.build_description),
+ colorama.Fore.YELLOW))
+ sys.stdout.write(' ' if single_line else '\n')
+ yield result
+
+
+def _RunMemUsage(devices, package_name, query_app=False):
+ cmd_args = ['dumpsys', 'meminfo']
+ if not query_app:
+ cmd_args.append('--local')
+
+ def mem_usage_helper(d):
+ ret = []
+ for process in sorted(_GetPackageProcesses(d, package_name)):
+ meminfo = d.RunShellCommand(cmd_args + [str(process.pid)])
+ ret.append((process.name, '\n'.join(meminfo)))
+ return ret
+
+ parallel_devices = device_utils.DeviceUtils.parallel(devices)
+ all_results = parallel_devices.pMap(mem_usage_helper).pGet(None)
+ for result in _PrintPerDeviceOutput(devices, all_results):
+ if not result:
+ print 'No processes found.'
+ else:
+ for name, usage in sorted(result):
+ print _Colorize(
+ '==== Output of "dumpsys meminfo %s" ====' % name,
+ colorama.Fore.GREEN)
+ print usage
+
+
+def _DuHelper(device, path_spec, run_as=None):
+ """Runs "du -s -k |path_spec|" on |device| and returns parsed result.
+
+ Args:
+ device: A DeviceUtils instance.
+ path_spec: The list of paths to run du on. May contain shell expansions
+ (will not be escaped).
+ run_as: Package name to run as, or None to run as shell user. If not None
+ and app is not android:debuggable (run-as fails), then command will be
+ run as root.
+
+ Returns:
+ A dict of path->size in KiB containing all paths in |path_spec| that exist
+ on device. Paths that do not exist are silently ignored.
+ """
+ # Example output for: du -s -k /data/data/org.chromium.chrome/{*,.*}
+ # 144 /data/data/org.chromium.chrome/cache
+ # 8 /data/data/org.chromium.chrome/files
+ # <snip>
+ # du: .*: No such file or directory
+
+ # The -d flag works differently across android version, so use -s instead.
+ # Without the explicit 2>&1, stderr and stdout get combined at random :(.
+ cmd_str = 'du -s -k ' + path_spec + ' 2>&1'
+ lines = device.RunShellCommand(cmd_str, run_as=run_as, shell=True,
+ check_return=False)
+ output = '\n'.join(lines)
+ # run-as: Package 'com.android.chrome' is not debuggable
+ if output.startswith('run-as:'):
+ # check_return=False needed for when some paths in path_spec do not exist.
+ lines = device.RunShellCommand(cmd_str, as_root=True, shell=True,
+ check_return=False)
+ ret = {}
+ try:
+ for line in lines:
+ # du: .*: No such file or directory
+ if line.startswith('du:'):
+ continue
+ size, subpath = line.split(None, 1)
+ ret[subpath] = int(size)
+ return ret
+ except ValueError:
+ logging.error('du command was: %s', cmd_str)
+ logging.error('Failed to parse du output:\n%s', output)
+ raise
+
+
+def _RunDiskUsage(devices, package_name):
+ # Measuring dex size is a bit complicated:
+ # https://source.android.com/devices/tech/dalvik/jit-compiler
+ #
+ # For KitKat and below:
+ # dumpsys package contains:
+ # dataDir=/data/data/org.chromium.chrome
+ # codePath=/data/app/org.chromium.chrome-1.apk
+ # resourcePath=/data/app/org.chromium.chrome-1.apk
+ # nativeLibraryPath=/data/app-lib/org.chromium.chrome-1
+ # To measure odex:
+ # ls -l /data/dalvik-cache/data@app@org.chromium.chrome-1.apk@classes.dex
+ #
+ # For Android L and M (and maybe for N+ system apps):
+ # dumpsys package contains:
+ # codePath=/data/app/org.chromium.chrome-1
+ # resourcePath=/data/app/org.chromium.chrome-1
+ # legacyNativeLibraryDir=/data/app/org.chromium.chrome-1/lib
+ # To measure odex:
+ # # Option 1:
+ # /data/dalvik-cache/arm/data@app@org.chromium.chrome-1@base.apk@classes.dex
+ # /data/dalvik-cache/arm/data@app@org.chromium.chrome-1@base.apk@classes.vdex
+ # ls -l /data/dalvik-cache/profiles/org.chromium.chrome
+ # (these profiles all appear to be 0 bytes)
+ # # Option 2:
+ # ls -l /data/app/org.chromium.chrome-1/oat/arm/base.odex
+ #
+ # For Android N+:
+ # dumpsys package contains:
+ # dataDir=/data/user/0/org.chromium.chrome
+ # codePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==
+ # resourcePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==
+ # legacyNativeLibraryDir=/data/app/org.chromium.chrome-GUID/lib
+ # Instruction Set: arm
+ # path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk
+ # status: /data/.../oat/arm/base.odex[status=kOatUpToDate, compilation_f
+ # ilter=quicken]
+ # Instruction Set: arm64
+ # path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk
+ # status: /data/.../oat/arm64/base.odex[status=..., compilation_filter=q
+ # uicken]
+ # To measure odex:
+ # ls -l /data/app/.../oat/arm/base.odex
+ # ls -l /data/app/.../oat/arm/base.vdex (optional)
+ # To measure the correct odex size:
+ # cmd package compile -m speed org.chromium.chrome # For webview
+ # cmd package compile -m speed-profile org.chromium.chrome # For others
+ def disk_usage_helper(d):
+ package_output = '\n'.join(d.RunShellCommand(
+ ['dumpsys', 'package', package_name], check_return=True))
+ # Does not return error when apk is not installed.
+ if not package_output or 'Unable to find package:' in package_output:
+ return None
+
+ # Ignore system apks that have updates installed.
+ package_output = re.sub(r'Hidden system packages:.*?^\b', '',
+ package_output, flags=re.S | re.M)
+
+ try:
+ data_dir = re.search(r'dataDir=(.*)', package_output).group(1)
+ code_path = re.search(r'codePath=(.*)', package_output).group(1)
+ lib_path = re.search(r'(?:legacyN|n)ativeLibrary(?:Dir|Path)=(.*)',
+ package_output).group(1)
+ except AttributeError:
+ raise Exception('Error parsing dumpsys output: ' + package_output)
+
+ if code_path.startswith('/system'):
+ logging.warning('Measurement of system image apks can be innacurate')
+
+ compilation_filters = set()
+ # Match "compilation_filter=value", where a line break can occur at any spot
+ # (refer to examples above).
+ awful_wrapping = r'\s*'.join('compilation_filter=')
+ for m in re.finditer(awful_wrapping + r'([\s\S]+?)[\],]', package_output):
+ compilation_filters.add(re.sub(r'\s+', '', m.group(1)))
+ compilation_filter = ','.join(sorted(compilation_filters))
+
+ data_dir_sizes = _DuHelper(d, '%s/{*,.*}' % data_dir, run_as=package_name)
+ # Measure code_cache separately since it can be large.
+ code_cache_sizes = {}
+ code_cache_dir = next(
+ (k for k in data_dir_sizes if k.endswith('/code_cache')), None)
+ if code_cache_dir:
+ data_dir_sizes.pop(code_cache_dir)
+ code_cache_sizes = _DuHelper(d, '%s/{*,.*}' % code_cache_dir,
+ run_as=package_name)
+
+ apk_path_spec = code_path
+ if not apk_path_spec.endswith('.apk'):
+ apk_path_spec += '/*.apk'
+ apk_sizes = _DuHelper(d, apk_path_spec)
+ if lib_path.endswith('/lib'):
+ # Shows architecture subdirectory.
+ lib_sizes = _DuHelper(d, '%s/{*,.*}' % lib_path)
+ else:
+ lib_sizes = _DuHelper(d, lib_path)
+
+ # Look at all possible locations for odex files.
+ odex_paths = []
+ for apk_path in apk_sizes:
+ mangled_apk_path = apk_path[1:].replace('/', '@')
+ apk_basename = posixpath.basename(apk_path)[:-4]
+ for ext in ('dex', 'odex', 'vdex', 'art'):
+ # Easier to check all architectures than to determine active ones.
+ for arch in ('arm', 'arm64', 'x86', 'x86_64', 'mips', 'mips64'):
+ odex_paths.append(
+ '%s/oat/%s/%s.%s' % (code_path, arch, apk_basename, ext))
+ # No app could possibly have more than 6 dex files.
+ for suffix in ('', '2', '3', '4', '5'):
+ odex_paths.append('/data/dalvik-cache/%s/%s@classes%s.%s' % (
+ arch, mangled_apk_path, suffix, ext))
+ # This path does not have |arch|, so don't repeat it for every arch.
+ if arch == 'arm':
+ odex_paths.append('/data/dalvik-cache/%s@classes%s.dex' % (
+ mangled_apk_path, suffix))
+
+ odex_sizes = _DuHelper(d, ' '.join(pipes.quote(p) for p in odex_paths))
+
+ return (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes,
+ compilation_filter)
+
+ def print_sizes(desc, sizes):
+ print '%s: %d KiB' % (desc, sum(sizes.itervalues()))
+ for path, size in sorted(sizes.iteritems()):
+ print ' %s: %s KiB' % (path, size)
+
+ parallel_devices = device_utils.DeviceUtils.parallel(devices)
+ all_results = parallel_devices.pMap(disk_usage_helper).pGet(None)
+ for result in _PrintPerDeviceOutput(devices, all_results):
+ if not result:
+ print 'APK is not installed.'
+ continue
+
+ (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes,
+ compilation_filter) = result
+ total = sum(sum(sizes.itervalues()) for sizes in result[:-1])
+
+ print_sizes('Apk', apk_sizes)
+ print_sizes('App Data (non-code cache)', data_dir_sizes)
+ print_sizes('App Data (code cache)', code_cache_sizes)
+ print_sizes('Native Libs', lib_sizes)
+ show_warning = compilation_filter and 'speed' not in compilation_filter
+ compilation_filter = compilation_filter or 'n/a'
+ print_sizes('odex (compilation_filter=%s)' % compilation_filter, odex_sizes)
+ if show_warning:
+ logging.warning('For a more realistic odex size, run:')
+ logging.warning(' %s compile-dex [speed|speed-profile]', sys.argv[0])
+ print 'Total: %s KiB (%.1f MiB)' % (total, total / 1024.0)
+
+
+class _LogcatProcessor(object):
+ ParsedLine = collections.namedtuple(
+ 'ParsedLine',
+ ['date', 'invokation_time', 'pid', 'tid', 'priority', 'tag', 'message'])
+
+ def __init__(self, device, package_name, deobfuscate=None, verbose=False):
+ self._device = device
+ self._package_name = package_name
+ self._verbose = verbose
+ self._deobfuscator = deobfuscate
+ self._primary_pid = None
+ self._my_pids = set()
+ self._seen_pids = set()
+ self._UpdateMyPids()
+
+ def _UpdateMyPids(self):
+ # We intentionally do not clear self._my_pids to make sure that the
+ # ProcessLine method below also includes lines from processes which may
+ # have already exited.
+ self._primary_pid = None
+ for process in _GetPackageProcesses(self._device, self._package_name):
+ # We take only the first "main" process found in order to account for
+ # possibly forked() processes.
+ if ':' not in process.name and self._primary_pid is None:
+ self._primary_pid = process.pid
+ self._my_pids.add(process.pid)
+
+ def _GetPidStyle(self, pid, dim=False):
+ if pid == self._primary_pid:
+ return colorama.Fore.WHITE
+ elif pid in self._my_pids:
+ # TODO(wnwen): Use one separate persistent color per process, pop LRU
+ return colorama.Fore.YELLOW
+ elif dim:
+ return colorama.Style.DIM
+ return ''
+
+ def _GetPriorityStyle(self, priority, dim=False):
+ # pylint:disable=no-self-use
+ if dim:
+ return ''
+ style = ''
+ if priority == 'E' or priority == 'F':
+ style = colorama.Back.RED
+ elif priority == 'W':
+ style = colorama.Back.YELLOW
+ elif priority == 'I':
+ style = colorama.Back.GREEN
+ elif priority == 'D':
+ style = colorama.Back.BLUE
+ return style + colorama.Fore.BLACK
+
+ def _ParseLine(self, line):
+ tokens = line.split(None, 6)
+ date = tokens[0]
+ invokation_time = tokens[1]
+ pid = int(tokens[2])
+ tid = int(tokens[3])
+ priority = tokens[4]
+ tag = tokens[5]
+ if len(tokens) > 6:
+ original_message = tokens[6]
+ else: # Empty log message
+ original_message = ''
+ # Example:
+ # 09-19 06:35:51.113 9060 9154 W GCoreFlp: No location...
+ # 09-19 06:01:26.174 9060 10617 I Auth : [ReflectiveChannelBinder]...
+ # Parsing "GCoreFlp:" vs "Auth :", we only want tag to contain the word,
+ # and we don't want to keep the colon for the message.
+ if tag[-1] == ':':
+ tag = tag[:-1]
+ else:
+ original_message = original_message[2:]
+ return self.ParsedLine(
+ date, invokation_time, pid, tid, priority, tag, original_message)
+
+ def _PrintParsedLine(self, parsed_line, dim=False):
+ tid_style = ''
+ # Make the main thread bright.
+ if not dim and parsed_line.pid == parsed_line.tid:
+ tid_style = colorama.Style.BRIGHT
+ pid_style = self._GetPidStyle(parsed_line.pid, dim)
+ # We have to pad before adding color as that changes the width of the tag.
+ pid_str = _Colorize('{:5}'.format(parsed_line.pid), pid_style)
+ tid_str = _Colorize('{:5}'.format(parsed_line.tid), tid_style)
+ tag = _Colorize('{:8}'.format(parsed_line.tag),
+ pid_style + ('' if dim else colorama.Style.BRIGHT))
+ priority = _Colorize(parsed_line.priority,
+ self._GetPriorityStyle(parsed_line.priority))
+ messages = [parsed_line.message]
+ if self._deobfuscator:
+ messages = self._deobfuscator.TransformLines(messages)
+ for message in messages:
+ message = _Colorize(message, pid_style)
+ sys.stdout.write('{} {} {} {} {} {}: {}\n'.format(
+ parsed_line.date, parsed_line.invokation_time, pid_str, tid_str,
+ priority, tag, message))
+
+ def ProcessLine(self, line, fast=False):
+ if not line or line.startswith('------'):
+ return
+ log = self._ParseLine(line)
+ if log.pid not in self._seen_pids:
+ self._seen_pids.add(log.pid)
+ if not fast:
+ self._UpdateMyPids()
+
+ owned_pid = log.pid in self._my_pids
+ if fast and not owned_pid:
+ return
+ if owned_pid and not self._verbose and log.tag == 'dalvikvm':
+ if _DALVIK_IGNORE_PATTERN.match(log.message):
+ return
+
+ if owned_pid or self._verbose or (
+ log.priority == 'F' or # Java crash dump
+ log.tag == 'ActivityManager' or # Android system
+ log.tag == 'DEBUG'): # Native crash dump
+ self._PrintParsedLine(log, not owned_pid)
+
+
+def _RunLogcat(device, package_name, mapping_path, verbose):
+ deobfuscate = None
+ if mapping_path:
+ try:
+ deobfuscate = deobfuscator.Deobfuscator(mapping_path)
+ except OSError:
+ sys.stderr.write('Error executing "bin/java_deobfuscate". '
+ 'Did you forget to build it?\n')
+ sys.exit(1)
+
+ try:
+ logcat_processor = _LogcatProcessor(
+ device, package_name, deobfuscate, verbose)
+ nonce = 'apk_wrappers.py nonce={}'.format(random.random())
+ device.RunShellCommand(['log', nonce])
+ fast = True
+ for line in device.adb.Logcat(logcat_format='threadtime'):
+ try:
+ logcat_processor.ProcessLine(line, fast)
+ except:
+ sys.stderr.write('Failed to process line: ' + line)
+ raise
+ if fast and nonce in line:
+ fast = False
+ except KeyboardInterrupt:
+ pass # Don't show stack trace upon Ctrl-C
+ finally:
+ if mapping_path:
+ deobfuscate.Close()
+
+
+def _GetPackageProcesses(device, package_name):
+ return [
+ p for p in device.ListProcesses(package_name)
+ if p.name == package_name or p.name.startswith(package_name + ':')]
+
+
+def _RunPs(devices, package_name):
+ parallel_devices = device_utils.DeviceUtils.parallel(devices)
+ all_processes = parallel_devices.pMap(
+ lambda d: _GetPackageProcesses(d, package_name)).pGet(None)
+ for processes in _PrintPerDeviceOutput(devices, all_processes):
+ if not processes:
+ print 'No processes found.'
+ else:
+ proc_map = collections.defaultdict(list)
+ for p in processes:
+ proc_map[p.name].append(str(p.pid))
+ for name, pids in sorted(proc_map.items()):
+ print name, ','.join(pids)
+
+
+def _RunShell(devices, package_name, cmd):
+ if cmd:
+ parallel_devices = device_utils.DeviceUtils.parallel(devices)
+ outputs = parallel_devices.RunShellCommand(
+ cmd, run_as=package_name).pGet(None)
+ for output in _PrintPerDeviceOutput(devices, outputs):
+ for line in output:
+ print line
+ else:
+ adb_path = adb_wrapper.AdbWrapper.GetAdbPath()
+ cmd = [adb_path, '-s', devices[0].serial, 'shell']
+ # Pre-N devices do not support -t flag.
+ if devices[0].build_version_sdk >= version_codes.NOUGAT:
+ cmd += ['-t', 'run-as', package_name]
+ else:
+ print 'Upon entering the shell, run:'
+ print 'run-as', package_name
+ print
+ os.execv(adb_path, cmd)
+
+
+def _RunCompileDex(devices, package_name, compilation_filter):
+ cmd = ['cmd', 'package', 'compile', '-f', '-m', compilation_filter,
+ package_name]
+ parallel_devices = device_utils.DeviceUtils.parallel(devices)
+ outputs = parallel_devices.RunShellCommand(cmd, timeout=120).pGet(None)
+ for output in _PrintPerDeviceOutput(devices, outputs):
+ for line in output:
+ print line
+
+
+def _RunProfile(device, package_name, host_build_directory, pprof_out_path,
+ process_specifier, thread_specifier, extra_args):
+ simpleperf.PrepareDevice(device)
+ device_simpleperf_path = simpleperf.InstallSimpleperf(device, package_name)
+ with tempfile.NamedTemporaryFile() as fh:
+ host_simpleperf_out_path = fh.name
+
+ with simpleperf.RunSimpleperf(device, device_simpleperf_path, package_name,
+ process_specifier, thread_specifier,
+ extra_args, host_simpleperf_out_path):
+ sys.stdout.write('Profiler is running; press Enter to stop...')
+ sys.stdin.read(1)
+ sys.stdout.write('Post-processing data...')
+ sys.stdout.flush()
+
+ simpleperf.ConvertSimpleperfToPprof(host_simpleperf_out_path,
+ host_build_directory, pprof_out_path)
+ print textwrap.dedent("""
+ Profile data written to %(s)s.
+
+ To view profile as a call graph in browser:
+ pprof -web %(s)s
+
+ To print the hottest methods:
+ pprof -top %(s)s
+
+ pprof has many useful customization options; `pprof --help` for details.
+ """ % {'s': pprof_out_path})
+
+
+def _GenerateAvailableDevicesMessage(devices):
+ devices_obj = device_utils.DeviceUtils.parallel(devices)
+ descriptions = devices_obj.pMap(lambda d: d.build_description).pGet(None)
+ msg = 'Available devices:\n'
+ for d, desc in zip(devices, descriptions):
+ msg += ' %s (%s)\n' % (d, desc)
+ return msg
+
+
+# TODO(agrieve):add "--all" in the MultipleDevicesError message and use it here.
+def _GenerateMissingAllFlagMessage(devices):
+ return ('More than one device available. Use --all to select all devices, ' +
+ 'or use --device to select a device by serial.\n\n' +
+ _GenerateAvailableDevicesMessage(devices))
+
+
+def _DisplayArgs(devices, command_line_flags_file):
+ def flags_helper(d):
+ changer = flag_changer.FlagChanger(d, command_line_flags_file)
+ return changer.GetCurrentFlags()
+
+ parallel_devices = device_utils.DeviceUtils.parallel(devices)
+ outputs = parallel_devices.pMap(flags_helper).pGet(None)
+ print 'Existing flags per-device (via /data/local/tmp/{}):'.format(
+ command_line_flags_file)
+ for flags in _PrintPerDeviceOutput(devices, outputs, single_line=True):
+ quoted_flags = ' '.join(pipes.quote(f) for f in flags)
+ print quoted_flags or 'No flags set.'
+
+
+def _DeviceCachePath(device, output_directory):
+ file_name = 'device_cache_%s.json' % device.serial
+ return os.path.join(output_directory, file_name)
+
+
+def _LoadDeviceCaches(devices, output_directory):
+ if not output_directory:
+ return
+ for d in devices:
+ cache_path = _DeviceCachePath(d, output_directory)
+ if os.path.exists(cache_path):
+ logging.debug('Using device cache: %s', cache_path)
+ with open(cache_path) as f:
+ d.LoadCacheData(f.read())
+ # Delete the cached file so that any exceptions cause it to be cleared.
+ os.unlink(cache_path)
+ else:
+ logging.debug('No cache present for device: %s', d)
+
+
+def _SaveDeviceCaches(devices, output_directory):
+ if not output_directory:
+ return
+ for d in devices:
+ cache_path = _DeviceCachePath(d, output_directory)
+ with open(cache_path, 'w') as f:
+ f.write(d.DumpCacheData())
+ logging.info('Wrote device cache: %s', cache_path)
+
+
+class _Command(object):
+ name = None
+ description = None
+ long_description = None
+ needs_package_name = False
+ needs_output_directory = False
+ needs_apk_path = False
+ supports_incremental = False
+ accepts_command_line_flags = False
+ accepts_args = False
+ need_device_args = True
+ all_devices_by_default = False
+ calls_exec = False
+ supports_multiple_devices = True
+
+ def __init__(self, from_wrapper_script, is_bundle):
+ self._parser = None
+ self._from_wrapper_script = from_wrapper_script
+ self.args = None
+ self.apk_helper = None
+ self.install_dict = None
+ self.devices = None
+ self.is_bundle = is_bundle
+ self.bundle_generation_info = None
+ # Only support incremental install from APK wrapper scripts.
+ if is_bundle or not from_wrapper_script:
+ self.supports_incremental = False
+
+ def RegisterBundleGenerationInfo(self, bundle_generation_info):
+ self.bundle_generation_info = bundle_generation_info
+
+ def _RegisterExtraArgs(self, subp):
+ pass
+
+ def RegisterArgs(self, parser):
+ subp = parser.add_parser(
+ self.name, help=self.description,
+ description=self.long_description or self.description,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
+ self._parser = subp
+ subp.set_defaults(command=self)
+ if self.need_device_args:
+ subp.add_argument('--all',
+ action='store_true',
+ default=self.all_devices_by_default,
+ help='Operate on all connected devices.',)
+ subp.add_argument('-d',
+ '--device',
+ action='append',
+ default=[],
+ dest='devices',
+ help='Target device for script to work on. Enter '
+ 'multiple times for multiple devices.')
+ subp.add_argument('-v',
+ '--verbose',
+ action='count',
+ default=0,
+ dest='verbose_count',
+ help='Verbose level (multiple times for more)')
+ group = subp.add_argument_group('%s arguments' % self.name)
+
+ if self.needs_package_name:
+ # Three cases to consider here, since later code assumes
+ # self.args.package_name always exists, even if None:
+ #
+ # - Called from a bundle wrapper script, the package_name is already
+ # set through parser.set_defaults(), so don't call add_argument()
+ # to avoid overriding its value.
+ #
+ # - Called from an apk wrapper script. The --package-name argument
+ # should not appear, but self.args.package_name will be gleaned from
+ # the --apk-path file later.
+ #
+ # - Called directly, then --package-name is required on the command-line.
+ #
+ if not self.is_bundle:
+ group.add_argument(
+ '--package-name',
+ help=argparse.SUPPRESS if self._from_wrapper_script else (
+ "App's package name."))
+
+ if self.needs_apk_path or self.needs_package_name:
+ # Adding this argument to the subparser would override the set_defaults()
+ # value set by on the parent parser (even if None).
+ if not self._from_wrapper_script and not self.is_bundle:
+ group.add_argument('--apk-path',
+ required=self.needs_apk_path,
+ help='Path to .apk')
+
+ if self.supports_incremental:
+ group.add_argument('--incremental',
+ action='store_true',
+ default=False,
+ help='Always install an incremental apk.')
+ group.add_argument('--non-incremental',
+ action='store_true',
+ default=False,
+ help='Always install a non-incremental apk.')
+
+ # accepts_command_line_flags and accepts_args are mutually exclusive.
+ # argparse will throw if they are both set.
+ if self.accepts_command_line_flags:
+ group.add_argument(
+ '--args', help='Command-line flags. Use = to assign args.')
+
+ if self.accepts_args:
+ group.add_argument(
+ '--args', help='Extra arguments. Use = to assign args')
+
+ if not self._from_wrapper_script and self.accepts_command_line_flags:
+ # Provided by wrapper scripts.
+ group.add_argument(
+ '--command-line-flags-file',
+ help='Name of the command-line flags file')
+
+ self._RegisterExtraArgs(group)
+
+ def ProcessArgs(self, args):
+ self.args = args
+ # Ensure these keys always exist. They are set by wrapper scripts, but not
+ # always added when not using wrapper scripts.
+ args.__dict__.setdefault('apk_path', None)
+ args.__dict__.setdefault('incremental_json', None)
+
+ incremental_apk_path = None
+ if args.incremental_json and not (self.supports_incremental and
+ args.non_incremental):
+ with open(args.incremental_json) as f:
+ install_dict = json.load(f)
+ incremental_apk_path = os.path.join(args.output_directory,
+ install_dict['apk_path'])
+ if not os.path.exists(incremental_apk_path):
+ incremental_apk_path = None
+
+ if self.supports_incremental:
+ if args.incremental and args.non_incremental:
+ self._parser.error('Must use only one of --incremental and '
+ '--non-incremental')
+ elif args.non_incremental:
+ if not args.apk_path:
+ self._parser.error('Apk has not been built.')
+ elif args.incremental:
+ if not incremental_apk_path:
+ self._parser.error('Incremental apk has not been built.')
+ args.apk_path = None
+
+ if args.apk_path and incremental_apk_path:
+ self._parser.error('Both incremental and non-incremental apks exist. '
+ 'Select using --incremental or --non-incremental')
+
+ if ((self.needs_apk_path and not self.is_bundle) or args.apk_path or
+ incremental_apk_path):
+ if args.apk_path:
+ self.apk_helper = apk_helper.ToHelper(args.apk_path)
+ elif incremental_apk_path:
+ self.install_dict = install_dict
+ self.apk_helper = apk_helper.ToHelper(incremental_apk_path)
+ else:
+ self._parser.error('Apk is not built.')
+
+ if self.needs_package_name and not args.package_name:
+ if self.apk_helper:
+ args.package_name = self.apk_helper.GetPackageName()
+ elif self._from_wrapper_script:
+ self._parser.error('Apk is not built.')
+ else:
+ self._parser.error('One of --package-name or --apk-path is required.')
+
+ self.devices = []
+ if self.need_device_args:
+ # See https://crbug.com/887964 regarding bundle support in apk_helper.
+ abis = None
+ if not self.is_bundle and self.apk_helper is not None:
+ abis = self.apk_helper.GetAbis()
+ self.devices = device_utils.DeviceUtils.HealthyDevices(
+ device_arg=args.devices,
+ enable_device_files_cache=bool(args.output_directory),
+ default_retries=0,
+ abis=abis)
+ # TODO(agrieve): Device cache should not depend on output directory.
+ # Maybe put int /tmp?
+ _LoadDeviceCaches(self.devices, args.output_directory)
+
+ try:
+ if len(self.devices) > 1:
+ if not self.supports_multiple_devices:
+ self._parser.error(device_errors.MultipleDevicesError(self.devices))
+ if not args.all and not args.devices:
+ self._parser.error(_GenerateMissingAllFlagMessage(self.devices))
+ # Save cache now if command will not get a chance to afterwards.
+ if self.calls_exec:
+ _SaveDeviceCaches(self.devices, args.output_directory)
+ except:
+ _SaveDeviceCaches(self.devices, args.output_directory)
+ raise
+
+
+class _DevicesCommand(_Command):
+ name = 'devices'
+ description = 'Describe attached devices.'
+ all_devices_by_default = True
+
+ def Run(self):
+ print _GenerateAvailableDevicesMessage(self.devices)
+
+
+class _PackageInfoCommand(_Command):
+ name = 'package-info'
+ # TODO(ntfschr): Support this by figuring out how to construct
+ # self.apk_helper for bundles (http://crbug.com/952443).
+ description = 'Show various attributes of this APK.'
+ need_device_args = False
+ needs_package_name = True
+ needs_apk_path = True
+
+ def Run(self):
+ # Format all (even ints) as strings, to handle cases where APIs return None
+ print 'Package name: "%s"' % self.args.package_name
+ print 'versionCode: %s' % self.apk_helper.GetVersionCode()
+ print 'versionName: "%s"' % self.apk_helper.GetVersionName()
+ print 'minSdkVersion: %s' % self.apk_helper.GetMinSdkVersion()
+ print 'targetSdkVersion: "%s"' % self.apk_helper.GetTargetSdkVersion()
+ print 'Supported ABIs: %r' % self.apk_helper.GetAbis()
+
+
+class _InstallCommand(_Command):
+ name = 'install'
+ description = 'Installs the APK or bundle to one or more devices.'
+ needs_apk_path = True
+ supports_incremental = True
+
+ def _RegisterExtraArgs(self, group):
+ if self.is_bundle:
+ group.add_argument(
+ '-m',
+ '--module',
+ action='append',
+ help='Module to install. Can be specified multiple times. ' +
+ 'One of them has to be \'{}\''.format(BASE_MODULE))
+ group.add_argument(
+ '-f',
+ '--fake',
+ action='append',
+ help='Fake bundle module install. Can be specified multiple times. '
+ 'Requires \'-m {0}\' to be given, and \'-f {0}\' is illegal.'.format(
+ BASE_MODULE))
+
+ def Run(self):
+ if self.is_bundle:
+ # Store .apks file beside the .aab file so that it gets cached.
+ output_path = self.bundle_generation_info.bundle_apks_path
+ _GenerateBundleApks(self.bundle_generation_info, output_path)
+ _InstallBundle(self.devices, output_path, self.args.package_name,
+ self.args.command_line_flags_file, self.args.module,
+ self.args.fake)
+ else:
+ _InstallApk(self.devices, self.apk_helper, self.install_dict)
+
+
+class _UninstallCommand(_Command):
+ name = 'uninstall'
+ description = 'Removes the APK or bundle from one or more devices.'
+ needs_package_name = True
+
+ def Run(self):
+ _UninstallApk(self.devices, self.install_dict, self.args.package_name)
+
+
+class _SetWebViewProviderCommand(_Command):
+ name = 'set-webview-provider'
+ description = ("Sets the device's WebView provider to this APK's "
+ "package name.")
+ needs_package_name = True
+
+ def Run(self):
+ if self.is_bundle:
+ # TODO(ntfschr): Support this by figuring out how to construct
+ # self.apk_helper for bundles (http://crbug.com/952443).
+ raise Exception(
+ 'Switching WebView providers not supported for bundles yet!')
+ if not _IsWebViewProvider(self.apk_helper):
+ raise Exception('This package does not have a WebViewLibrary meta-data '
+ 'tag. Are you sure it contains a WebView implementation?')
+ _SetWebViewProvider(self.devices, self.args.package_name)
+
+
+class _LaunchCommand(_Command):
+ name = 'launch'
+ description = ('Sends a launch intent for the APK or bundle after first '
+ 'writing the command-line flags file.')
+ needs_package_name = True
+ accepts_command_line_flags = True
+ all_devices_by_default = True
+
+ def _RegisterExtraArgs(self, group):
+ group.add_argument('-w', '--wait-for-java-debugger', action='store_true',
+ help='Pause execution until debugger attaches. Applies '
+ 'only to the main process. To have renderers wait, '
+ 'use --args="--renderer-wait-for-java-debugger"')
+ group.add_argument('--debug-process-name',
+ help='Name of the process to debug. '
+ 'E.g. "privileged_process0", or "foo.bar:baz"')
+ group.add_argument('--nokill', action='store_true',
+ help='Do not set the debug-app, nor set command-line '
+ 'flags. Useful to load a URL without having the '
+ 'app restart.')
+ group.add_argument('url', nargs='?', help='A URL to launch with.')
+
+ def Run(self):
+ if self.args.url and self.is_bundle:
+ # TODO(digit): Support this, maybe by using 'dumpsys' as described
+ # in the _LaunchUrl() comment.
+ raise Exception('Launching with URL not supported for bundles yet!')
+ _LaunchUrl(self.devices, self.args.package_name, argv=self.args.args,
+ command_line_flags_file=self.args.command_line_flags_file,
+ url=self.args.url, apk=self.apk_helper,
+ wait_for_java_debugger=self.args.wait_for_java_debugger,
+ debug_process_name=self.args.debug_process_name,
+ nokill=self.args.nokill)
+
+
+class _StopCommand(_Command):
+ name = 'stop'
+ description = 'Force-stops the app.'
+ needs_package_name = True
+ all_devices_by_default = True
+
+ def Run(self):
+ device_utils.DeviceUtils.parallel(self.devices).ForceStop(
+ self.args.package_name)
+
+
+class _ClearDataCommand(_Command):
+ name = 'clear-data'
+ descriptions = 'Clears all app data.'
+ needs_package_name = True
+ all_devices_by_default = True
+
+ def Run(self):
+ device_utils.DeviceUtils.parallel(self.devices).ClearApplicationState(
+ self.args.package_name)
+
+
+class _ArgvCommand(_Command):
+ name = 'argv'
+ description = 'Display and optionally update command-line flags file.'
+ needs_package_name = True
+ accepts_command_line_flags = True
+ all_devices_by_default = True
+
+ def Run(self):
+ _ChangeFlags(self.devices, self.args.args,
+ self.args.command_line_flags_file)
+
+
+class _GdbCommand(_Command):
+ name = 'gdb'
+ description = 'Runs //build/android/adb_gdb with apk-specific args.'
+ long_description = description + """
+
+To attach to a process other than the APK's main process, use --pid=1234.
+To list all PIDs, use the "ps" command.
+
+If no apk process is currently running, sends a launch intent.
+"""
+ needs_package_name = True
+ needs_output_directory = True
+ calls_exec = True
+ supports_multiple_devices = False
+
+ def Run(self):
+ _RunGdb(self.devices[0], self.args.package_name,
+ self.args.debug_process_name, self.args.pid,
+ self.args.output_directory, self.args.target_cpu, self.args.port,
+ self.args.ide, bool(self.args.verbose_count))
+
+ def _RegisterExtraArgs(self, group):
+ pid_group = group.add_mutually_exclusive_group()
+ pid_group.add_argument('--debug-process-name',
+ help='Name of the process to attach to. '
+ 'E.g. "privileged_process0", or "foo.bar:baz"')
+ pid_group.add_argument('--pid',
+ help='The process ID to attach to. Defaults to '
+ 'the main process for the package.')
+ group.add_argument('--ide', action='store_true',
+ help='Rather than enter a gdb prompt, set up the '
+ 'gdb connection and wait for an IDE to '
+ 'connect.')
+ # Same default port that ndk-gdb.py uses.
+ group.add_argument('--port', type=int, default=5039,
+ help='Use the given port for the GDB connection')
+
+
+class _LogcatCommand(_Command):
+ name = 'logcat'
+ description = 'Runs "adb logcat" with filters relevant the current APK.'
+ long_description = description + """
+
+"Relevant filters" means:
+ * Log messages from processes belonging to the apk,
+ * Plus log messages from log tags: ActivityManager|DEBUG,
+ * Plus fatal logs from any process,
+ * Minus spamy dalvikvm logs (for pre-L devices).
+
+Colors:
+ * Primary process is white
+ * Other processes (gpu, renderer) are yellow
+ * Non-apk processes are grey
+ * UI thread has a bolded Thread-ID
+
+Java stack traces are detected and deobfuscated (for release builds).
+
+To disable filtering, (but keep coloring), use --verbose.
+"""
+ needs_package_name = True
+ supports_multiple_devices = False
+
+ def Run(self):
+ mapping = self.args.proguard_mapping_path
+ if self.args.no_deobfuscate:
+ mapping = None
+ _RunLogcat(self.devices[0], self.args.package_name, mapping,
+ bool(self.args.verbose_count))
+
+ def _RegisterExtraArgs(self, group):
+ if self._from_wrapper_script:
+ group.add_argument('--no-deobfuscate', action='store_true',
+ help='Disables ProGuard deobfuscation of logcat.')
+ else:
+ group.set_defaults(no_deobfuscate=False)
+ group.add_argument('--proguard-mapping-path',
+ help='Path to ProGuard map (enables deobfuscation)')
+
+
+class _PsCommand(_Command):
+ name = 'ps'
+ description = 'Show PIDs of any APK processes currently running.'
+ needs_package_name = True
+ all_devices_by_default = True
+
+ def Run(self):
+ _RunPs(self.devices, self.args.package_name)
+
+
+class _DiskUsageCommand(_Command):
+ name = 'disk-usage'
+ description = 'Show how much device storage is being consumed by the app.'
+ needs_package_name = True
+ all_devices_by_default = True
+
+ def Run(self):
+ _RunDiskUsage(self.devices, self.args.package_name)
+
+
+class _MemUsageCommand(_Command):
+ name = 'mem-usage'
+ description = 'Show memory usage of currently running APK processes.'
+ needs_package_name = True
+ all_devices_by_default = True
+
+ def _RegisterExtraArgs(self, group):
+ group.add_argument('--query-app', action='store_true',
+ help='Do not add --local to "dumpsys meminfo". This will output '
+ 'additional metrics (e.g. Context count), but also cause memory '
+ 'to be used in order to gather the metrics.')
+
+ def Run(self):
+ _RunMemUsage(self.devices, self.args.package_name,
+ query_app=self.args.query_app)
+
+
+class _ShellCommand(_Command):
+ name = 'shell'
+ description = ('Same as "adb shell <command>", but runs as the apk\'s uid '
+ '(via run-as). Useful for inspecting the app\'s data '
+ 'directory.')
+ needs_package_name = True
+
+ @property
+ def calls_exec(self):
+ return not self.args.cmd
+
+ @property
+ def supports_multiple_devices(self):
+ return not self.args.cmd
+
+ def _RegisterExtraArgs(self, group):
+ group.add_argument(
+ 'cmd', nargs=argparse.REMAINDER, help='Command to run.')
+
+ def Run(self):
+ _RunShell(self.devices, self.args.package_name, self.args.cmd)
+
+
+class _CompileDexCommand(_Command):
+ name = 'compile-dex'
+ description = ('Applicable only for Android N+. Forces .odex files to be '
+ 'compiled with the given compilation filter. To see existing '
+ 'filter, use "disk-usage" command.')
+ needs_package_name = True
+ all_devices_by_default = True
+
+ def _RegisterExtraArgs(self, group):
+ group.add_argument(
+ 'compilation_filter',
+ choices=['verify', 'quicken', 'space-profile', 'space',
+ 'speed-profile', 'speed'],
+ help='For WebView/Monochrome, use "speed". For other apks, use '
+ '"speed-profile".')
+
+ def Run(self):
+ _RunCompileDex(self.devices, self.args.package_name,
+ self.args.compilation_filter)
+
+
+class _ProfileCommand(_Command):
+ name = 'profile'
+ description = ('Run the simpleperf sampling CPU profiler on the currently-'
+ 'running APK. If --args is used, the extra arguments will be '
+ 'passed on to simpleperf; otherwise, the following default '
+ 'arguments are used: -g -f 1000 -o /data/local/tmp/perf.data')
+ needs_package_name = True
+ needs_output_directory = True
+ supports_multiple_devices = False
+ accepts_args = True
+
+ def _RegisterExtraArgs(self, group):
+ group.add_argument(
+ '--profile-process', default='browser',
+ help=('Which process to profile. This may be a process name or pid '
+ 'such as you would get from running `%s ps`; or '
+ 'it can be one of (browser, renderer, gpu).' % sys.argv[0]))
+ group.add_argument(
+ '--profile-thread', default=None,
+ help=('(Optional) Profile only a single thread. This may be either a '
+ 'thread ID such as you would get by running `adb shell ps -t` '
+ '(pre-Oreo) or `adb shell ps -e -T` (Oreo and later); or it may '
+ 'be one of (io, compositor, main, render), in which case '
+ '--profile-process is also required. (Note that "render" thread '
+ 'refers to a thread in the browser process that manages a '
+ 'renderer; to profile the main thread of the renderer process, '
+ 'use --profile-thread=main).'))
+ group.add_argument('--profile-output', default='profile.pb',
+ help='Output file for profiling data')
+
+ def Run(self):
+ extra_args = shlex.split(self.args.args or '')
+ _RunProfile(self.devices[0], self.args.package_name,
+ self.args.output_directory, self.args.profile_output,
+ self.args.profile_process, self.args.profile_thread,
+ extra_args)
+
+
+class _RunCommand(_InstallCommand, _LaunchCommand, _LogcatCommand):
+ name = 'run'
+ description = 'Install, launch, and show logcat (when targeting one device).'
+ all_devices_by_default = False
+ supports_multiple_devices = True
+
+ def _RegisterExtraArgs(self, group):
+ _InstallCommand._RegisterExtraArgs(self, group)
+ _LaunchCommand._RegisterExtraArgs(self, group)
+ _LogcatCommand._RegisterExtraArgs(self, group)
+ group.add_argument('--no-logcat', action='store_true',
+ help='Install and launch, but do not enter logcat.')
+
+ def Run(self):
+ logging.warning('Installing...')
+ _InstallCommand.Run(self)
+ logging.warning('Sending launch intent...')
+ _LaunchCommand.Run(self)
+ if len(self.devices) == 1 and not self.args.no_logcat:
+ logging.warning('Entering logcat...')
+ _LogcatCommand.Run(self)
+
+
+class _BuildBundleApks(_Command):
+ name = 'build-bundle-apks'
+ description = ('Build the .apks archive from an Android app bundle, and '
+ 'optionally copy it to a specific destination.')
+ need_device_args = False
+
+ def _RegisterExtraArgs(self, group):
+ group.add_argument(
+ '--output-apks', required=True, help='Destination path for .apks file.')
+ group.add_argument(
+ '--minimal',
+ action='store_true',
+ help='Build .apks archive that targets the bundle\'s minSdkVersion and '
+ 'contains only english splits. It still contains optional splits.')
+ group.add_argument(
+ '--sdk-version',
+ help='Implies --minimal. The sdkVersion to build the .apks for.')
+ group.add_argument(
+ '--build-mode',
+ choices=app_bundle_utils.BUILD_APKS_MODES,
+ help='Specify which type of APKs archive to build. "default" '
+ 'generates regular splits, "universal" generates an archive with a '
+ 'single universal APK, "system" generates an archive with a system '
+ 'image APK, while "system_compressed" generates a compressed system '
+ 'APK, with an additional stub APK for the system image.')
+
+ def Run(self):
+ _GenerateBundleApks(
+ self.bundle_generation_info,
+ self.args.output_apks,
+ minimal=self.args.sdk_version is not None or self.args.minimal,
+ minimal_sdk_version=self.args.sdk_version,
+ mode=self.args.build_mode)
+
+
+class _ManifestCommand(_Command):
+ name = 'dump-manifest'
+ description = 'Dump the android manifest from this bundle, as XML, to stdout.'
+ need_device_args = False
+
+ def Run(self):
+ bundletool.RunBundleTool([
+ 'dump', 'manifest', '--bundle', self.bundle_generation_info.bundle_path
+ ])
+
+
+# Shared commands for regular APKs and app bundles.
+_COMMANDS = [
+ _DevicesCommand,
+ _PackageInfoCommand,
+ _InstallCommand,
+ _UninstallCommand,
+ _SetWebViewProviderCommand,
+ _LaunchCommand,
+ _StopCommand,
+ _ClearDataCommand,
+ _ArgvCommand,
+ _GdbCommand,
+ _LogcatCommand,
+ _PsCommand,
+ _DiskUsageCommand,
+ _MemUsageCommand,
+ _ShellCommand,
+ _CompileDexCommand,
+ _ProfileCommand,
+ _RunCommand,
+]
+
+# Commands specific to app bundles.
+_BUNDLE_COMMANDS = [
+ _BuildBundleApks,
+ _ManifestCommand,
+]
+
+
+def _ParseArgs(parser, from_wrapper_script, is_bundle):
+ subparsers = parser.add_subparsers()
+ command_list = _COMMANDS + (_BUNDLE_COMMANDS if is_bundle else [])
+ commands = [clazz(from_wrapper_script, is_bundle) for clazz in command_list]
+
+ for command in commands:
+ if from_wrapper_script or not command.needs_output_directory:
+ command.RegisterArgs(subparsers)
+
+ # Show extended help when no command is passed.
+ argv = sys.argv[1:]
+ if not argv:
+ argv = ['--help']
+
+ return parser.parse_args(argv)
+
+
+def _RunInternal(parser, output_directory=None, bundle_generation_info=None):
+ colorama.init()
+ parser.set_defaults(output_directory=output_directory)
+ from_wrapper_script = bool(output_directory)
+ args = _ParseArgs(parser, from_wrapper_script, bool(bundle_generation_info))
+ run_tests_helper.SetLogLevel(args.verbose_count)
+ args.command.ProcessArgs(args)
+ if bundle_generation_info:
+ args.command.RegisterBundleGenerationInfo(bundle_generation_info)
+ args.command.Run()
+ # Incremental install depends on the cache being cleared when uninstalling.
+ if args.command.name != 'uninstall':
+ _SaveDeviceCaches(args.command.devices, output_directory)
+
+
+def Run(output_directory, apk_path, incremental_json, command_line_flags_file,
+ target_cpu, proguard_mapping_path):
+ """Entry point for generated wrapper scripts."""
+ constants.SetOutputDirectory(output_directory)
+ devil_chromium.Initialize(output_directory=output_directory)
+ parser = argparse.ArgumentParser()
+ exists_or_none = lambda p: p if p and os.path.exists(p) else None
+ parser.set_defaults(
+ command_line_flags_file=command_line_flags_file,
+ target_cpu=target_cpu,
+ apk_path=exists_or_none(apk_path),
+ incremental_json=exists_or_none(incremental_json),
+ proguard_mapping_path=proguard_mapping_path)
+ _RunInternal(parser, output_directory=output_directory)
+
+
+def RunForBundle(output_directory, bundle_path, bundle_apks_path, aapt2_path,
+ keystore_path, keystore_password, keystore_alias, package_name,
+ command_line_flags_file, proguard_mapping_path, target_cpu,
+ system_image_locales):
+ """Entry point for generated app bundle wrapper scripts.
+
+ Args:
+ output_dir: Chromium output directory path.
+ bundle_path: Input bundle path.
+ bundle_apks_path: Output bundle .apks archive path.
+ aapt2_path: Aapt2 tool path.
+ keystore_path: Keystore file path.
+ keystore_password: Keystore password.
+ keystore_alias: Signing key name alias in keystore file.
+ package_name: Application's package name.
+ command_line_flags_file: Optional. Name of an on-device file that will be
+ used to store command-line flags for this bundle.
+ proguard_mapping_path: Input path to the Proguard mapping file, used to
+ deobfuscate Java stack traces.
+ target_cpu: Chromium target CPU name, used by the 'gdb' command.
+ system_image_locales: List of Chromium locales that should be included in
+ system image APKs.
+ """
+ constants.SetOutputDirectory(output_directory)
+ devil_chromium.Initialize(output_directory=output_directory)
+ bundle_generation_info = BundleGenerationInfo(
+ bundle_path=bundle_path,
+ bundle_apks_path=bundle_apks_path,
+ aapt2_path=aapt2_path,
+ keystore_path=keystore_path,
+ keystore_password=keystore_password,
+ keystore_alias=keystore_alias,
+ system_image_locales=system_image_locales)
+
+ parser = argparse.ArgumentParser()
+ parser.set_defaults(
+ package_name=package_name,
+ command_line_flags_file=command_line_flags_file,
+ proguard_mapping_path=proguard_mapping_path,
+ target_cpu=target_cpu)
+ _RunInternal(parser, output_directory=output_directory,
+ bundle_generation_info=bundle_generation_info)
+
+
+def main():
+ devil_chromium.Initialize()
+ _RunInternal(argparse.ArgumentParser(), output_directory=None)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/apply_shared_preference_file.py b/deps/v8/build/android/apply_shared_preference_file.py
new file mode 100755
index 0000000000..b224081957
--- /dev/null
+++ b/deps/v8/build/android/apply_shared_preference_file.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Manually applies a shared preference JSON file.
+
+If needed during automation, use the --shared-prefs-file in test_runner.py
+instead.
+"""
+
+import argparse
+import sys
+
+# pylint: disable=ungrouped-imports
+from pylib.constants import host_paths
+if host_paths.DEVIL_PATH not in sys.path:
+ sys.path.append(host_paths.DEVIL_PATH)
+
+from devil.android import device_utils
+from devil.android.sdk import shared_prefs
+from pylib.utils import shared_preference_utils
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Manually apply shared preference JSON files.')
+ parser.add_argument('filepaths', nargs='*',
+ help='Any number of paths to shared preference JSON '
+ 'files to apply.')
+ args = parser.parse_args()
+
+ all_devices = device_utils.DeviceUtils.HealthyDevices()
+ if not all_devices:
+ raise RuntimeError('No healthy devices attached')
+
+ for filepath in args.filepaths:
+ all_settings = shared_preference_utils.ExtractSettingsFromJson(filepath)
+ for setting in all_settings:
+ for device in all_devices:
+ shared_pref = shared_prefs.SharedPrefs(
+ device, setting['package'], setting['filename'],
+ use_encrypted_path=setting.get('supports_encrypted_path', False))
+ shared_preference_utils.ApplySharedPreferenceSetting(
+ shared_pref, setting)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/asan_symbolize.py b/deps/v8/build/android/asan_symbolize.py
new file mode 100755
index 0000000000..9f2e88a60d
--- /dev/null
+++ b/deps/v8/build/android/asan_symbolize.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import collections
+import optparse
+import os
+import re
+import sys
+
+from pylib import constants
+from pylib.constants import host_paths
+
+# pylint: disable=wrong-import-order
+# Uses symbol.py from third_party/android_platform, not python's.
+with host_paths.SysPath(
+ host_paths.ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH,
+ position=0):
+ import symbol
+
+
+_RE_ASAN = re.compile(r'(.*?)(#\S*?)\s+(\S*?)\s+\((.*?)\+(.*?)\)')
+
+# This named tuple models a parsed Asan log line.
+AsanParsedLine = collections.namedtuple('AsanParsedLine',
+ 'prefix,library,pos,rel_address')
+
+# This named tuple models an Asan log line. 'raw' is the raw content
+# while 'parsed' is None or an AsanParsedLine instance.
+AsanLogLine = collections.namedtuple('AsanLogLine', 'raw,parsed')
+
+def _ParseAsanLogLine(line):
+ """Parse line into corresponding AsanParsedLine value, if any, or None."""
+ m = re.match(_RE_ASAN, line)
+ if not m:
+ return None
+ return AsanParsedLine(prefix=m.group(1),
+ library=m.group(4),
+ pos=m.group(2),
+ rel_address='%08x' % int(m.group(5), 16))
+
+def _FindASanLibraries():
+ asan_lib_dir = os.path.join(host_paths.DIR_SOURCE_ROOT,
+ 'third_party', 'llvm-build',
+ 'Release+Asserts', 'lib')
+ asan_libs = []
+ for src_dir, _, files in os.walk(asan_lib_dir):
+ asan_libs += [os.path.relpath(os.path.join(src_dir, f))
+ for f in files
+ if f.endswith('.so')]
+ return asan_libs
+
+
+def _TranslateLibPath(library, asan_libs):
+ for asan_lib in asan_libs:
+ if os.path.basename(library) == os.path.basename(asan_lib):
+ return '/' + asan_lib
+ # pylint: disable=no-member
+ return symbol.TranslateLibPath(library)
+
+
+def _PrintSymbolized(asan_input, arch):
+ """Print symbolized logcat output for Asan symbols.
+
+ Args:
+ asan_input: list of input lines.
+ arch: Target CPU architecture.
+ """
+ asan_libs = _FindASanLibraries()
+
+ # Maps library -> [ AsanParsedLine... ]
+ libraries = collections.defaultdict(list)
+
+ asan_log_lines = []
+ for line in asan_input:
+ line = line.rstrip()
+ parsed = _ParseAsanLogLine(line)
+ if parsed:
+ libraries[parsed.library].append(parsed)
+ asan_log_lines.append(AsanLogLine(raw=line, parsed=parsed))
+
+ # Maps library -> { address -> [(symbol, location, obj_sym_with_offset)...] }
+ all_symbols = collections.defaultdict(dict)
+
+ for library, items in libraries.iteritems():
+ libname = _TranslateLibPath(library, asan_libs)
+ lib_relative_addrs = set([i.rel_address for i in items])
+ # pylint: disable=no-member
+ info_dict = symbol.SymbolInformationForSet(libname,
+ lib_relative_addrs,
+ True,
+ cpu_arch=arch)
+ if info_dict:
+ all_symbols[library] = info_dict
+
+ for log_line in asan_log_lines:
+ m = log_line.parsed
+ if (m and m.library in all_symbols and
+ m.rel_address in all_symbols[m.library]):
+ # NOTE: all_symbols[lib][address] is a never-emtpy list of tuples.
+ # NOTE: The documentation for SymbolInformationForSet() indicates
+ # that usually one wants to display the last list item, not the first.
+ # The code below takes the first, is this the best choice here?
+ s = all_symbols[m.library][m.rel_address][0]
+ print '%s%s %s %s' % (m.prefix, m.pos, s[0], s[1])
+ else:
+ print log_line.raw
+
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option('-l', '--logcat',
+ help='File containing adb logcat output with ASan stacks. '
+ 'Use stdin if not specified.')
+ parser.add_option('--output-directory',
+ help='Path to the root build directory.')
+ parser.add_option('--arch', default='arm',
+ help='CPU architecture name')
+ options, _ = parser.parse_args()
+
+ if options.output_directory:
+ constants.SetOutputDirectory(options.output_directory)
+ # Do an up-front test that the output directory is known.
+ constants.CheckOutputDirectory()
+
+ if options.logcat:
+ asan_input = file(options.logcat, 'r')
+ else:
+ asan_input = sys.stdin
+
+ _PrintSymbolized(asan_input.readlines(), options.arch)
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/deps/v8/build/android/binary_size/OWNERS b/deps/v8/build/android/binary_size/OWNERS
new file mode 100644
index 0000000000..c964495a78
--- /dev/null
+++ b/deps/v8/build/android/binary_size/OWNERS
@@ -0,0 +1,4 @@
+agrieve@chromium.org
+estevenson@chromium.org
+
+# COMPONENT: Build
diff --git a/deps/v8/build/android/binary_size/__init__.py b/deps/v8/build/android/binary_size/__init__.py
new file mode 100644
index 0000000000..a22a6ee39a
--- /dev/null
+++ b/deps/v8/build/android/binary_size/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/binary_size/apk_downloader.py b/deps/v8/build/android/binary_size/apk_downloader.py
new file mode 100755
index 0000000000..aa7d12fdc1
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apk_downloader.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import sys
+import zipfile
+
+_BUILD_ANDROID = os.path.join(os.path.dirname(__file__), os.pardir)
+sys.path.append(_BUILD_ANDROID)
+from pylib.constants import host_paths
+
+sys.path.append(os.path.join(_BUILD_ANDROID, 'gyp'))
+from util import build_utils
+
+sys.path.append(
+ os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'depot_tools'))
+import download_from_google_storage
+import upload_to_google_storage
+
+CURRENT_MILESTONE = '67'
+DEFAULT_BUCKET = 'gs://chromium-android-tools/apks'
+DEFAULT_DOWNLOAD_PATH = os.path.join(os.path.dirname(__file__), 'apks')
+DEFAULT_BUILDER = 'Android_Builder'
+DEFAULT_APK = 'MonochromePublic.apk'
+_ALL_BUILDER_APKS = {
+ 'Android Builder': ['ChromePublic.apk', 'ChromeModernPublic.apk',
+ 'MonochromePublic.apk'],
+ 'Android arm64 Builder': ['ChromePublic.apk', 'ChromeModernPublic.apk'],
+}
+
+
+def MaybeDownloadApk(builder, milestone, apk, download_path, bucket):
+ """Returns path to the downloaded APK or None if not found."""
+ apk_path = os.path.join(download_path, builder, milestone, apk)
+ sha1_path = apk_path + '.sha1'
+ base_url = os.path.join(bucket, builder, milestone)
+ if os.path.exists(apk_path):
+ print '%s already exists' % apk_path
+ return apk_path
+ elif not os.path.exists(sha1_path):
+ print 'Skipping %s, file not found' % sha1_path
+ return None
+ else:
+ download_from_google_storage.download_from_google_storage(
+ input_filename=sha1_path,
+ sha1_file=sha1_path,
+ base_url=base_url,
+ gsutil=download_from_google_storage.Gsutil(
+ download_from_google_storage.GSUTIL_DEFAULT_PATH),
+ num_threads=1,
+ directory=False,
+ recursive=False,
+ force=False,
+ output=apk_path,
+ ignore_errors=False,
+ verbose=True,
+ auto_platform=False,
+ extract=False)
+ return apk_path
+
+
+def _UpdateReferenceApks(milestones):
+ """Update reference APKs and creates .sha1 files ready for commit.
+
+ Will fail if perf builders were broken for the given milestone (use next
+ passing build in this case).
+ """
+ with build_utils.TempDir() as temp_dir:
+ for milestone, crrev in milestones:
+ for builder, apks in _ALL_BUILDER_APKS.iteritems():
+ tools_builder_path = builder.replace(' ', '_')
+ zip_path = os.path.join(temp_dir, 'build_product.zip')
+ commit = build_utils.CheckOutput(['git', 'crrev-parse', crrev]).strip()
+ # Download build product from perf builders.
+ build_utils.CheckOutput([
+ 'gsutil', 'cp', 'gs://chrome-perf/%s/full-build-linux_%s.zip' % (
+ builder, commit), zip_path])
+
+ # Extract desired .apks.
+ with zipfile.ZipFile(zip_path) as z:
+ in_zip_paths = z.namelist()
+ out_dir = os.path.commonprefix(in_zip_paths)
+ for apk_name in apks:
+ output_path = os.path.join(
+ DEFAULT_DOWNLOAD_PATH, tools_builder_path, milestone)
+ apk_path = os.path.join(out_dir, 'apks', apk_name)
+ zip_info = z.getinfo(apk_path)
+ zip_info.filename = apk_path.replace(apk_path, apk_name)
+ z.extract(zip_info, output_path)
+ input_files = [os.path.join(output_path, apk_name)]
+ bucket_path = os.path.join(
+ DEFAULT_BUCKET, tools_builder_path, milestone)
+
+ # Upload .apks to chromium-android-tools so that they aren't
+ # automatically removed in the future.
+ upload_to_google_storage.upload_to_google_storage(
+ input_files,
+ bucket_path,
+ upload_to_google_storage.Gsutil(
+ upload_to_google_storage.GSUTIL_DEFAULT_PATH),
+ False, # force
+ False, # use_md5
+ 10, # num_threads
+ False, # skip_hashing
+ None) # gzip
+
+
+def main():
+ argparser = argparse.ArgumentParser(
+ description='Utility for downloading archived APKs used for measuring '
+ 'per-milestone patch size growth.',
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ argparser.add_argument('--download-path', default=DEFAULT_DOWNLOAD_PATH,
+ help='Directory to store downloaded APKs.')
+ argparser.add_argument('--milestone', default=CURRENT_MILESTONE,
+ help='Download reference APK for this milestone.')
+ argparser.add_argument('--apk', default=DEFAULT_APK, help='APK name.')
+ argparser.add_argument('--builder', default=DEFAULT_BUILDER,
+ help='Builder name.')
+ argparser.add_argument('--bucket', default=DEFAULT_BUCKET,
+ help='Google storage bucket where APK is stored.')
+ argparser.add_argument('--update', action='append', nargs=2,
+ help='List of MILESTONE CRREV pairs to upload '
+ 'reference APKs for. Mutally exclusive with '
+ 'downloading reference APKs.')
+ args = argparser.parse_args()
+ if args.update:
+ _UpdateReferenceApks(args.update)
+ else:
+ MaybeDownloadApk(args.builder, args.milestone, args.apk,
+ args.download_path, args.bucket)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/56/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/56/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..c2629a666a
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/56/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+f6a9731abe16df80a4026843a850d3c721414b96 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/57/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/57/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..8c0ab5d5cd
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/57/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+a168708620b6370e0325a00c0bc3b4b53ad86a18 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/57/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/57/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..119dc0ec48
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/57/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+75bc1faae7eff3c3781d1e0343414c1e42d8aeef \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/58/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/58/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..28ddb43013
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/58/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+3e9673008a930aa8bb2bcd7e26f8da91a0448ec3 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/58/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/58/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..935e09d2ba
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/58/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+ec034225a5e637fc83944b5ada634aba8075d1b2 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/58/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/58/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..8ac82edc40
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/58/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+68925703102a2ff5a55e3b00e90a086dfd6d7ee6 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/59/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/59/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..2e24ec0d2f
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/59/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+286671da954e55a708481042276d209f769d3af8 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/59/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/59/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..4ed75c8cec
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/59/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+b7d2c8299b263d33e76b9d845bfea7631cca1fc2 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/59/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/59/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..caee66cb49
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/59/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+e292bd4db30461f755188059a5ca31053b6720b8 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/60/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/60/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..a51316d5b9
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/60/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+bfa2c1c1750c5d6bbe40591ac8914a3b848d4e5b \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/60/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/60/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..e85b68e78c
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/60/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+c546e0cccae5b2da3834466337f7c2872889e6c8 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/60/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/60/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..10cc649d7d
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/60/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+3b847e7aeafc265640d5092ef40852cf47fe743b \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/61/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/61/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..b26d7382ab
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/61/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+d4f1347c5baea6ebb296b450cf54abb629030328 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/61/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/61/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..0de158d0c8
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/61/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+b2644c78846d1297c61bf75b1543d74e24f777b6 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/61/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/61/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..4473f6e275
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/61/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+80413a607555c33960241ef10f5897b02383fe2e \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/62/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/62/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..3c50afe5b3
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/62/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+a819a1ca4da0ae9e3accdbf38a6fad00814ab1c9 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/62/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/62/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..dd379384bb
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/62/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+8023a7eeaf30a4cdbdfcc309e6f89ad99cf0e9d0 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/62/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/62/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..0a1ff4dc77
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/62/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+f726d0a312f934b94fb6b8e9e2e6a81011cdf922 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/63/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/63/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..92ec9d1edf
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/63/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+e7b3d22b0d20ceec9307cbf734bc6ee98b2433e3 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/63/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/63/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..b3f545e615
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/63/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+1ec72262d91b86f3569bdf6b6ca49a6212c629d8 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/63/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/63/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..fb8ac39faa
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/63/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+8fe3e9821006688f8caebfaae912c19338b8641a \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/64/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/64/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..92e9519e36
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/64/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+98af8f2265f68dea89eecd1d93410fd36bf233c4 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/64/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/64/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..7bbf28ae22
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/64/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+d5c209e967fa2f6b98fd4e3d6882a85ad67a9d87 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/64/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/64/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..123d071a52
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/64/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+a5059a9bbe1dd9b6750bee7c2728a366bf9864e9 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/65/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/65/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..696f05d0ec
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/65/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+f7cd8a90f4b0f038e3c4165c9111757bbd357e23 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/65/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/65/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..406a044a3f
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/65/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+415533c239b40780156fed15c4760b748b8752a5 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/65/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/65/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..7ecb21ee98
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/65/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+743e67eda110f303e48826d059352dac7e6837a4 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/66/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/66/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..01556e8fda
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/66/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+f0bdfb932b1136c408f7a4f4ba8054e98b128eb6 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/66/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/66/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..7319f7dcd9
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/66/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+4af2534cd223be64c17f2ffff04340e093690d78 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/66/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/66/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..23b3125bcb
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/66/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+ef9cce8af1cc4fb533e5a19c206cc40185602a7f \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/67/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/67/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..2127fdc963
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/67/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+b266b4168dd50384c39b57a6ac9c6c260edd225f \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/67/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/67/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..05fd194135
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/67/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+443888b240953a9c0cac582b82587e06cc9c1af9 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/67/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/67/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..563969c3aa
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/67/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+7c930a47e9b48c21bd2c09cddc6e9326b8dc15ef \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/56/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/56/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..432f6aef87
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/56/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+c980519b19f3eb010fe0e54a436272e3c94656be \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/57/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/57/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..aa40702ea6
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/57/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+c1f4797decdd33465d671cf2fb5f266f4c9e1c7a \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/57/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/57/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..7e7ddf5b81
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/57/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+47f902113feb297714ba22d25d7cb51754990923 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/58/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/58/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..b97041ab12
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/58/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+255104059ee2e51541d67a504ff22327b945768b \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/58/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/58/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..bd8ffec678
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/58/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+0a18193a6534ccc31deaffecb817b8b6c991647a \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/59/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/59/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..1026ad71b5
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/59/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+62897dbec4761546b9c94e258c99c51dc816c1a9 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/59/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/59/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..05052bd971
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/59/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+9f2e5aee1ca7d8a88e05587137a0f4859e76296b \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/60/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/60/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..ad52e8ef8b
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/60/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+241970643e7cc8a5078946092298229bce422c5c \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/60/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/60/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..3d1dff8612
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/60/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+3dd1e9be5c64c97e3a5d93b7c43e0cddb5548003 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/61/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/61/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..13d4b6a003
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/61/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+8e30941b4fc3a33701dba26aac2d686120bb9588 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/61/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/61/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..4e72ae79e4
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/61/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+d2ed32c20b65f86a839cc01839a6f258e8fe909f \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/62/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/62/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..0f14fdca3d
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/62/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+b7d6363def07dbff34424f7df52bfe492b354569 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/62/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/62/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..9f8d745357
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/62/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+f46c29b53910ec5094860d820b8b99540706e9e4 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/63/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/63/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..a19ee6507e
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/63/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+172e8f53695780960ab50908b262c7763bbd2bad \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/63/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/63/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..739baa857c
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/63/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+4db69a57e34287ab21e8f5fae282cf861316384c \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/64/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/64/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..e0df57bb81
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/64/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+3e4b8fd75aea0a22fa769c6686dd153d72741e91 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/64/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/64/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..e65c26f9ab
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/64/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+b7b222a6650e98dda980dd9bc922763e4102d0b6 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/65/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/65/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..e3921ca31a
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/65/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+112273d8bb6942a89d47f4ada5f85cc9d5e69073 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/65/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/65/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..7c78ee8393
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/65/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+fde7bd26a11ab8d301efe1ae7115c615f03ed3d0 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/66/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/66/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..b9960e07c8
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/66/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+1abaf4fea673cd75031ee6bcb8382abc7ffe2b92 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/66/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/66/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..98445823cf
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/66/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+ce72e18b9a9028cbde71a3ab36580d6bf3c8e531 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/67/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/67/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..6d8e57c62f
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/67/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+9ecb48a44c57c9bbbe85dcf0d9cd446529640807 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/67/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/67/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..a06d22d349
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/67/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+60f7ca1ebf019bf3a77b372c5f4c80592e36b942 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/README.md b/deps/v8/build/android/binary_size/apks/README.md
new file mode 100644
index 0000000000..d6e4f58de6
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/README.md
@@ -0,0 +1,45 @@
+## Milestone Reference APKs
+
+This folder contains APKs for official (upstream) builds for each milestone.
+The primary use for these APKs is per-milestone binary size analysis.
+ * `//build/android/resource_sizes.py` uses them for calculating patch size
+ * They can be used with `tools/binary_size/diagnose_bloat.py` for analyzing
+ what grew in an APK milestone-to-milestone
+
+## Downloading Reference APKs
+
+```bash
+# Downloads ARM 32 MonochromePublic.apk for the latest milestone that we've
+# uploaded APKs for.
+build/android/binary_size/apk_downloader.py
+
+# Print usage and see all options.
+build/android/binary_size/apk_downloader.py -h
+```
+
+## Updating Reference APKs
+```bash
+# Downloads build products from perf builders and uploads the following APKs
+# for M62 and M63:
+# ARM 32 - ChromePublic.apk, ChromeModernPublic.apk, MonochromePublic.apk
+# ARM 64 - ChromePublic.apk ChromeModernPublic.apk
+build/android/binary_size/apk_downloader.py --update 63 508578 --update 62 499187
+```
+
+ * **Remember to commit the generated .sha1 files, update the
+ CURRENT_MILESTONE variable in apk_downloader.py, and update the list of
+ revisions below**
+
+## Chromium revisions for each APK
+ * [M56](https://crrev.com/433059)
+ * [M57](https://crrev.com/444943)
+ * [M58](https://crrev.com/454471)
+ * [M59](https://crrev.com/464641)
+ * [M60](https://crrev.com/474934)
+ * [M61](https://crrev.com/488528)
+ * [M62](https://crrev.com/499187)
+ * [M63](https://crrev.com/508578)
+ * [M64](https://crrev.com/520840)
+ * [M65](https://crrev.com/530369)
+ * [M66](https://crrev.com/540276)
+ * [M67](https://crrev.com/550428)
diff --git a/deps/v8/build/android/buildhooks/BUILD.gn b/deps/v8/build/android/buildhooks/BUILD.gn
new file mode 100644
index 0000000000..0ccd4ce2d8
--- /dev/null
+++ b/deps/v8/build/android/buildhooks/BUILD.gn
@@ -0,0 +1,58 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+import("//build/config/android/config.gni")
+
+java_library("build_hooks_java") {
+ emma_never_instrument = true
+ java_files = [ "java/org/chromium/build/BuildHooks.java" ]
+
+ # Make all targets pull in the try-with-resources support files.
+ # If an apk ends up not using any such statements, ProGuard will remove
+ # them.
+ deps = [
+ "//third_party/bazel/desugar:desugar_runtime_java",
+ ]
+ srcjar_deps = [ ":base_build_hooks_config" ]
+ no_build_hooks = true
+ supports_android = true
+}
+
+java_cpp_template("base_build_hooks_config") {
+ sources = [
+ "java/templates/BuildHooksConfig.template",
+ ]
+ package_path = "org/chromium/build"
+
+ defines = []
+ if (report_java_assert) {
+ defines += [ "_REPORT_JAVA_ASSERT" ]
+ }
+}
+
+build_hooks_android_impl = "java/org/chromium/build/BuildHooksAndroidImpl.java"
+
+android_library("build_hooks_android_java") {
+ emma_never_instrument = true
+ java_files = [
+ "java/org/chromium/build/BuildHooksAndroid.java",
+ build_hooks_android_impl,
+ ]
+
+ jar_excluded_patterns = [ "*/BuildHooksAndroidImpl.class" ]
+ no_build_hooks = true
+ proguard_configs = [ "proguard/build_hooks_android_impl.flags" ]
+}
+
+# This default implementation is used if an android_apk target doesn't
+# specify a different implementation via build_hooks_android_impl_deps.
+android_library("build_hooks_android_impl_java") {
+ emma_never_instrument = true
+ java_files = [ build_hooks_android_impl ]
+ deps = [
+ ":build_hooks_android_java",
+ ]
+ no_build_hooks = true
+}
diff --git a/deps/v8/build/android/buildhooks/OWNERS b/deps/v8/build/android/buildhooks/OWNERS
new file mode 100644
index 0000000000..c964495a78
--- /dev/null
+++ b/deps/v8/build/android/buildhooks/OWNERS
@@ -0,0 +1,4 @@
+agrieve@chromium.org
+estevenson@chromium.org
+
+# COMPONENT: Build
diff --git a/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooks.java b/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooks.java
new file mode 100644
index 0000000000..7364898699
--- /dev/null
+++ b/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooks.java
@@ -0,0 +1,50 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build;
+
+/**
+ * All Java targets that support android have dependence on this class.
+ */
+public abstract class BuildHooks {
+ /**
+ * Defines an interface for reporting assertion error.
+ */
+ @FunctionalInterface
+ public interface ReportAssertionCallback {
+ void run(AssertionError arg);
+ }
+
+ private static ReportAssertionCallback sReportAssertionCallback;
+
+ /**
+ * This method is used to handle assert failures when asserts are enabled by
+ * //build/android/bytecode:java_bytecode_rewriter. For non-release builds, this is always
+ * enabled and assert failures will result in an assertion error being thrown. For release
+ * builds, this is only enabled when report_java_assert = true. Assert failures will result in
+ * an error report being uploaded to the crash servers only if the callback is set (so that this
+ * can be a no-op for WebView in Monochrome). This also means that asserts hit before the
+ * callback is set will be no-op's as well.
+ */
+ public static void assertFailureHandler(AssertionError assertionError) {
+ if (BuildHooksConfig.REPORT_JAVA_ASSERT) {
+ if (sReportAssertionCallback != null) {
+ sReportAssertionCallback.run(assertionError);
+ }
+ } else {
+ throw assertionError;
+ }
+ }
+
+ /**
+ * Set the callback function that handles assert failure.
+ * This should be called from attachBaseContext.
+ */
+ public static void setReportAssertionCallback(ReportAssertionCallback callback) {
+ if (!BuildHooksConfig.REPORT_JAVA_ASSERT) {
+ throw new AssertionError();
+ }
+ sReportAssertionCallback = callback;
+ }
+}
diff --git a/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooksAndroid.java b/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooksAndroid.java
new file mode 100644
index 0000000000..f6fef20e07
--- /dev/null
+++ b/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooksAndroid.java
@@ -0,0 +1,107 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build;
+
+import android.content.Context;
+import android.content.res.AssetManager;
+import android.content.res.Resources;
+
+/**
+ * All Java targets that require android have dependence on this class. Add methods that do not
+ * require Android to {@link BuildHooks}.
+ *
+ * This class provides hooks needed when bytecode rewriting. Static convenience methods are used to
+ * minimize the amount of code required to be manually generated when bytecode rewriting.
+ *
+ * This class contains default implementations for all methods and is used when no other
+ * implementation is supplied to an android_apk target (via build_hooks_android_impl_deps).
+ */
+public abstract class BuildHooksAndroid {
+ private static BuildHooksAndroid sInstance;
+
+ private static BuildHooksAndroid get() {
+ if (sInstance == null) {
+ sInstance = constructBuildHooksAndroidImpl();
+ }
+ return sInstance;
+ }
+
+ // Creates an instance of BuildHooksAndroidImpl using reflection. Why is this necessary?
+ // The downstream version of BuildHooksAndroidImpl pulls a bunch of methods into the main dex
+ // that don't actually need to be there. This happens because there are @MainDex classes that
+ // have Context methods added (via. bytecode rewriting) that call into BuildHooksAndroid.
+ // Creating the instance via. reflection tricks proguard into thinking BuildHooksAndroidImpl
+ // doesn't need to be in the main dex file.
+ private static BuildHooksAndroid constructBuildHooksAndroidImpl() {
+ try {
+ // Not final to avoid inlining. Without this proguard is able to figure out that
+ // BuildHooksAndroidImpl is actually used.
+ String implClazzName = "org.chromium.build.BuildHooksAndroidImpl";
+ Class<?> implClazz = Class.forName(implClazzName);
+ return (BuildHooksAndroid) implClazz.newInstance();
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public static Resources getResources(Context context) {
+ return get().getResourcesImpl(context);
+ }
+
+ protected abstract Resources getResourcesImpl(Context context);
+
+ public static AssetManager getAssets(Context context) {
+ return get().getAssetsImpl(context);
+ }
+
+ protected abstract AssetManager getAssetsImpl(Context context);
+
+ public static Resources.Theme getTheme(Context context) {
+ return get().getThemeImpl(context);
+ }
+
+ protected abstract Resources.Theme getThemeImpl(Context context);
+
+ public static void setTheme(Context context, int theme) {
+ get().setThemeImpl(context, theme);
+ }
+
+ protected abstract void setThemeImpl(Context context, int theme);
+
+ public static Context createConfigurationContext(Context context) {
+ return get().createConfigurationContextImpl(context);
+ }
+
+ protected abstract Context createConfigurationContextImpl(Context context);
+
+ public static int getIdentifier(
+ Resources resources, String name, String defType, String defPackage) {
+ return get().getIdentifierImpl(resources, name, defType, defPackage);
+ }
+
+ protected abstract int getIdentifierImpl(
+ Resources resources, String name, String defType, String defPackage);
+
+ public static boolean isEnabled() {
+ return get().isEnabledImpl();
+ }
+
+ protected abstract boolean isEnabledImpl();
+
+ public static void initCustomResources(Context context) {
+ get().initCustomResourcesImpl(context);
+ }
+
+ protected abstract void initCustomResourcesImpl(Context context);
+
+ /**
+ * Record custom resources related UMA. Requires native library to be loaded.
+ */
+ public static void maybeRecordResourceMetrics() {
+ get().maybeRecordResourceMetricsImpl();
+ }
+
+ protected abstract void maybeRecordResourceMetricsImpl();
+} \ No newline at end of file
diff --git a/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooksAndroidImpl.java b/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooksAndroidImpl.java
new file mode 100644
index 0000000000..5b9b997161
--- /dev/null
+++ b/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooksAndroidImpl.java
@@ -0,0 +1,54 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build;
+
+import android.content.Context;
+import android.content.res.AssetManager;
+import android.content.res.Resources;
+
+/**
+ * Instantiatable version of {@link BuildHooksAndroid} with dummy implementations.
+ */
+public class BuildHooksAndroidImpl extends BuildHooksAndroid {
+ @Override
+ protected final Resources getResourcesImpl(Context context) {
+ return null;
+ }
+
+ @Override
+ protected AssetManager getAssetsImpl(Context context) {
+ return null;
+ }
+
+ @Override
+ protected Resources.Theme getThemeImpl(Context context) {
+ return null;
+ }
+
+ @Override
+ protected void setThemeImpl(Context context, int theme) {}
+
+ @Override
+ protected Context createConfigurationContextImpl(Context context) {
+ return null;
+ }
+
+ @Override
+ protected int getIdentifierImpl(
+ Resources resources, String name, String defType, String defPackage) {
+ return resources.getIdentifier(name, defType, defPackage);
+ }
+
+ @Override
+ protected boolean isEnabledImpl() {
+ return false;
+ }
+
+ @Override
+ protected void initCustomResourcesImpl(Context context) {}
+
+ @Override
+ protected void maybeRecordResourceMetricsImpl() {}
+}
diff --git a/deps/v8/build/android/buildhooks/java/templates/BuildHooksConfig.template b/deps/v8/build/android/buildhooks/java/templates/BuildHooksConfig.template
new file mode 100644
index 0000000000..bdaa550110
--- /dev/null
+++ b/deps/v8/build/android/buildhooks/java/templates/BuildHooksConfig.template
@@ -0,0 +1,16 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build;
+
+/**
+ * BuildHooks configuration. Generated on a per-target basis.
+ */
+public class BuildHooksConfig {
+#if defined(_REPORT_JAVA_ASSERT)
+ public static final boolean REPORT_JAVA_ASSERT = true;
+#else
+ public static final boolean REPORT_JAVA_ASSERT = false;
+#endif
+}
diff --git a/deps/v8/build/android/buildhooks/proguard/build_hooks_android_impl.flags b/deps/v8/build/android/buildhooks/proguard/build_hooks_android_impl.flags
new file mode 100644
index 0000000000..62dc1aa0f3
--- /dev/null
+++ b/deps/v8/build/android/buildhooks/proguard/build_hooks_android_impl.flags
@@ -0,0 +1,5 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+-keep class org.chromium.build.BuildHooksAndroidImpl
diff --git a/deps/v8/build/android/bytecode/BUILD.gn b/deps/v8/build/android/bytecode/BUILD.gn
new file mode 100644
index 0000000000..1584becd43
--- /dev/null
+++ b/deps/v8/build/android/bytecode/BUILD.gn
@@ -0,0 +1,27 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+assert(current_toolchain == default_toolchain)
+
+java_binary("java_bytecode_rewriter") {
+ emma_never_instrument = true
+ java_files = [
+ "java/org/chromium/bytecode/AssertionEnablerClassAdapter.java",
+ "java/org/chromium/bytecode/ByteCodeProcessor.java",
+ "java/org/chromium/bytecode/ClassPathValidator.java",
+ "java/org/chromium/bytecode/CustomClassLoaderClassWriter.java",
+ "java/org/chromium/bytecode/CustomResourcesClassAdapter.java",
+ "java/org/chromium/bytecode/SplitCompatClassAdapter.java",
+ "java/org/chromium/bytecode/ThreadAssertionClassAdapter.java",
+ "java/org/chromium/bytecode/TypeUtils.java",
+ ]
+ main_class = "org.chromium.bytecode.ByteCodeProcessor"
+ deps = [
+ "//third_party/ow2_asm:asm_java",
+ "//third_party/ow2_asm:asm_util_java",
+ ]
+ wrapper_script_name = "helper/java_bytecode_rewriter"
+}
diff --git a/deps/v8/build/android/bytecode/OWNERS b/deps/v8/build/android/bytecode/OWNERS
new file mode 100644
index 0000000000..c964495a78
--- /dev/null
+++ b/deps/v8/build/android/bytecode/OWNERS
@@ -0,0 +1,4 @@
+agrieve@chromium.org
+estevenson@chromium.org
+
+# COMPONENT: Build
diff --git a/deps/v8/build/android/bytecode/java/org/chromium/bytecode/AssertionEnablerClassAdapter.java b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/AssertionEnablerClassAdapter.java
new file mode 100644
index 0000000000..0a903a60f9
--- /dev/null
+++ b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/AssertionEnablerClassAdapter.java
@@ -0,0 +1,109 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import static org.chromium.bytecode.TypeUtils.ASSERTION_ERROR;
+import static org.chromium.bytecode.TypeUtils.BUILD_HOOKS;
+import static org.chromium.bytecode.TypeUtils.VOID;
+
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.Label;
+import org.objectweb.asm.MethodVisitor;
+import org.objectweb.asm.Opcodes;
+
+/**
+ * An ClassVisitor for replacing Java ASSERT statements with a function by modifying Java bytecode.
+ *
+ * We do this in two steps, first step is to enable assert.
+ * Following bytecode is generated for each class with ASSERT statements:
+ * 0: ldc #8 // class CLASSNAME
+ * 2: invokevirtual #9 // Method java/lang/Class.desiredAssertionStatus:()Z
+ * 5: ifne 12
+ * 8: iconst_1
+ * 9: goto 13
+ * 12: iconst_0
+ * 13: putstatic #2 // Field $assertionsDisabled:Z
+ * Replaces line #13 to the following:
+ * 13: pop
+ * Consequently, $assertionsDisabled is assigned the default value FALSE.
+ * This is done in the first if statement in overridden visitFieldInsn. We do this per per-assert.
+ *
+ * Second step is to replace assert statement with a function:
+ * The followed instructions are generated by a java assert statement:
+ * getstatic #3 // Field $assertionsDisabled:Z
+ * ifne 118 // Jump to instruction as if assertion if not enabled
+ * ...
+ * ifne 19
+ * new #4 // class java/lang/AssertionError
+ * dup
+ * ldc #5 // String (don't have this line if no assert message given)
+ * invokespecial #6 // Method java/lang/AssertionError.
+ * athrow
+ * Replace athrow with:
+ * invokestatic #7 // Method org/chromium/base/JavaExceptionReporter.assertFailureHandler
+ * goto 118
+ * JavaExceptionReporter.assertFailureHandler is a function that handles the AssertionError,
+ * 118 is the instruction to execute as if assertion if not enabled.
+ */
+class AssertionEnablerClassAdapter extends ClassVisitor {
+ AssertionEnablerClassAdapter(ClassVisitor visitor) {
+ super(Opcodes.ASM5, visitor);
+ }
+
+ @Override
+ public MethodVisitor visitMethod(final int access, final String name, String desc,
+ String signature, String[] exceptions) {
+ return new RewriteAssertMethodVisitor(
+ Opcodes.ASM5, super.visitMethod(access, name, desc, signature, exceptions));
+ }
+
+ static class RewriteAssertMethodVisitor extends MethodVisitor {
+ static final String ASSERTION_DISABLED_NAME = "$assertionsDisabled";
+ static final String INSERT_INSTRUCTION_NAME = "assertFailureHandler";
+ static final String INSERT_INSTRUCTION_DESC =
+ TypeUtils.getMethodDescriptor(VOID, ASSERTION_ERROR);
+ static final boolean INSERT_INSTRUCTION_ITF = false;
+
+ boolean mStartLoadingAssert;
+ Label mGotoLabel;
+
+ public RewriteAssertMethodVisitor(int api, MethodVisitor mv) {
+ super(api, mv);
+ }
+
+ @Override
+ public void visitFieldInsn(int opcode, String owner, String name, String desc) {
+ if (opcode == Opcodes.PUTSTATIC && name.equals(ASSERTION_DISABLED_NAME)) {
+ super.visitInsn(Opcodes.POP); // enable assert
+ } else if (opcode == Opcodes.GETSTATIC && name.equals(ASSERTION_DISABLED_NAME)) {
+ mStartLoadingAssert = true;
+ super.visitFieldInsn(opcode, owner, name, desc);
+ } else {
+ super.visitFieldInsn(opcode, owner, name, desc);
+ }
+ }
+
+ @Override
+ public void visitJumpInsn(int opcode, Label label) {
+ if (mStartLoadingAssert && opcode == Opcodes.IFNE && mGotoLabel == null) {
+ mGotoLabel = label;
+ }
+ super.visitJumpInsn(opcode, label);
+ }
+
+ @Override
+ public void visitInsn(int opcode) {
+ if (!mStartLoadingAssert || opcode != Opcodes.ATHROW) {
+ super.visitInsn(opcode);
+ } else {
+ super.visitMethodInsn(Opcodes.INVOKESTATIC, BUILD_HOOKS, INSERT_INSTRUCTION_NAME,
+ INSERT_INSTRUCTION_DESC, INSERT_INSTRUCTION_ITF);
+ super.visitJumpInsn(Opcodes.GOTO, mGotoLabel);
+ mStartLoadingAssert = false;
+ mGotoLabel = null;
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
new file mode 100644
index 0000000000..37dc192d81
--- /dev/null
+++ b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
@@ -0,0 +1,293 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import static org.objectweb.asm.ClassWriter.COMPUTE_FRAMES;
+
+import org.objectweb.asm.ClassReader;
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.ClassWriter;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.StandardCopyOption;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.zip.CRC32;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+import java.util.zip.ZipOutputStream;
+
+/**
+ * Java application that takes in an input jar, performs a series of bytecode transformations,
+ * and generates an output jar.
+ *
+ * Two types of transformations are performed:
+ * 1) Enabling assertions via {@link AssertionEnablerClassAdapter}
+ * 2) Providing support for custom resources via {@link CustomResourcesClassAdapter}
+ */
+class ByteCodeProcessor {
+ private static final String CLASS_FILE_SUFFIX = ".class";
+ private static final String TEMPORARY_FILE_SUFFIX = ".temp";
+ private static final int BUFFER_SIZE = 16384;
+ private static boolean sVerbose;
+ private static boolean sIsPrebuilt;
+ private static boolean sShouldAssert;
+ private static boolean sShouldUseCustomResources;
+ private static boolean sShouldUseThreadAnnotations;
+ private static boolean sShouldCheckClassPath;
+ private static ClassLoader sDirectClassPathClassLoader;
+ private static ClassLoader sFullClassPathClassLoader;
+ private static Set<String> sFullClassPathJarPaths;
+ private static Set<String> sSplitCompatClassNames;
+ private static ClassPathValidator sValidator;
+
+ private static class EntryDataPair {
+ private final ZipEntry mEntry;
+ private final byte[] mData;
+
+ private EntryDataPair(ZipEntry mEntry, byte[] mData) {
+ this.mEntry = mEntry;
+ this.mData = mData;
+ }
+
+ private static EntryDataPair create(String zipPath, byte[] data) {
+ ZipEntry entry = new ZipEntry(zipPath);
+ entry.setMethod(ZipEntry.STORED);
+ entry.setTime(0);
+ entry.setSize(data.length);
+ CRC32 crc = new CRC32();
+ crc.update(data);
+ entry.setCrc(crc.getValue());
+ return new EntryDataPair(entry, data);
+ }
+ }
+
+ private static EntryDataPair processEntry(ZipEntry entry, byte[] data)
+ throws ClassPathValidator.ClassNotLoadedException {
+ // Copy all non-.class files to the output jar.
+ if (entry.isDirectory() || !entry.getName().endsWith(CLASS_FILE_SUFFIX)) {
+ return new EntryDataPair(entry, data);
+ }
+
+ ClassReader reader = new ClassReader(data);
+
+ if (sShouldCheckClassPath) {
+ sValidator.validateClassPathsAndOutput(reader, sDirectClassPathClassLoader,
+ sFullClassPathClassLoader, sFullClassPathJarPaths, sIsPrebuilt, sVerbose);
+ }
+
+ ClassWriter writer;
+ if (sShouldUseCustomResources) {
+ // Use the COMPUTE_FRAMES flag to have asm figure out the stack map frames.
+ // This is necessary because GCMBaseIntentService in android_gcm_java contains
+ // incorrect stack map frames. This option slows down processing time by 2x.
+ writer = new CustomClassLoaderClassWriter(
+ sFullClassPathClassLoader, reader, COMPUTE_FRAMES);
+ } else {
+ writer = new ClassWriter(reader, 0);
+ }
+ ClassVisitor chain = writer;
+ /* DEBUGGING:
+ To see the bytecode for a specific class:
+ if (entry.getName().contains("YourClassName")) {
+ chain = new TraceClassVisitor(chain, new PrintWriter(System.out));
+ }
+ To see objectweb.asm code that will generate bytecode for a given class:
+ java -cp "third_party/ow2_asm/lib/asm-5.0.1.jar:third_party/ow2_asm/lib/"\
+ "asm-util-5.0.1.jar:out/Debug/lib.java/jar_containing_yourclass.jar" \
+ org.objectweb.asm.util.ASMifier org.package.YourClassName
+ */
+ if (sShouldUseThreadAnnotations) {
+ chain = new ThreadAssertionClassAdapter(chain);
+ }
+ if (sShouldAssert) {
+ chain = new AssertionEnablerClassAdapter(chain);
+ }
+ if (sShouldUseCustomResources) {
+ chain = new CustomResourcesClassAdapter(
+ chain, reader.getClassName(), reader.getSuperName(), sFullClassPathClassLoader);
+ }
+ if (!sSplitCompatClassNames.isEmpty()) {
+ chain = new SplitCompatClassAdapter(
+ chain, sSplitCompatClassNames, sFullClassPathClassLoader);
+ }
+ reader.accept(chain, 0);
+ byte[] patchedByteCode = writer.toByteArray();
+ return EntryDataPair.create(entry.getName(), patchedByteCode);
+ }
+
+ private static void process(String inputJarPath, String outputJarPath)
+ throws ClassPathValidator.ClassNotLoadedException, ExecutionException,
+ InterruptedException {
+ String tempJarPath = outputJarPath + TEMPORARY_FILE_SUFFIX;
+ ExecutorService executorService =
+ Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
+ try (ZipInputStream inputStream = new ZipInputStream(
+ new BufferedInputStream(new FileInputStream(inputJarPath)));
+ ZipOutputStream tempStream = new ZipOutputStream(
+ new BufferedOutputStream(new FileOutputStream(tempJarPath)))) {
+ List<Future<EntryDataPair>> list = new ArrayList<>();
+ while (true) {
+ ZipEntry entry = inputStream.getNextEntry();
+ if (entry == null) {
+ break;
+ }
+ byte[] data = readAllBytes(inputStream);
+ list.add(executorService.submit(() -> processEntry(entry, data)));
+ }
+ executorService.shutdown(); // This is essential in order to avoid waiting infinitely.
+ // Write the zip file entries in order to preserve determinism.
+ for (Future<EntryDataPair> futurePair : list) {
+ EntryDataPair pair = futurePair.get();
+ tempStream.putNextEntry(pair.mEntry);
+ tempStream.write(pair.mData);
+ tempStream.closeEntry();
+ }
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ try {
+ Path src = Paths.get(tempJarPath);
+ Path dest = Paths.get(outputJarPath);
+ Files.move(src, dest, StandardCopyOption.REPLACE_EXISTING);
+ } catch (IOException ioException) {
+ throw new RuntimeException(ioException);
+ }
+
+ if (sValidator.hasErrors()) {
+ System.err.println("Direct classpath is incomplete. To fix, add deps on the "
+ + "GN target(s) that provide:");
+ for (Map.Entry<String, Map<String, Set<String>>> entry :
+ sValidator.getErrors().entrySet()) {
+ printValidationError(System.err, entry.getKey(), entry.getValue());
+ }
+ System.exit(1);
+ }
+ }
+
+ private static void printValidationError(
+ PrintStream out, String jarName, Map<String, Set<String>> missingClasses) {
+ out.print(" * ");
+ out.println(jarName);
+ int i = 0;
+ final int numErrorsPerJar = 2;
+ // The list of missing classes is non-exhaustive because each class that fails to validate
+ // reports only the first missing class.
+ for (Map.Entry<String, Set<String>> entry : missingClasses.entrySet()) {
+ String missingClass = entry.getKey();
+ Set<String> filesThatNeededIt = entry.getValue();
+ out.print(" * ");
+ if (i == numErrorsPerJar) {
+ out.print(String.format("And %d more...", missingClasses.size() - numErrorsPerJar));
+ break;
+ }
+ out.print(missingClass.replace('/', '.'));
+ out.print(" (needed by ");
+ out.print(filesThatNeededIt.iterator().next().replace('/', '.'));
+ if (filesThatNeededIt.size() > 1) {
+ out.print(String.format(" and %d more", filesThatNeededIt.size() - 1));
+ }
+ out.println(")");
+ i++;
+ }
+ }
+
+ private static byte[] readAllBytes(InputStream inputStream) throws IOException {
+ ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+ int numRead = 0;
+ byte[] data = new byte[BUFFER_SIZE];
+ while ((numRead = inputStream.read(data, 0, data.length)) != -1) {
+ buffer.write(data, 0, numRead);
+ }
+ return buffer.toByteArray();
+ }
+
+ /**
+ * Loads a list of jars and returns a ClassLoader capable of loading all classes found in the
+ * given jars.
+ */
+ static ClassLoader loadJars(Collection<String> paths) {
+ URL[] jarUrls = new URL[paths.size()];
+ int i = 0;
+ for (String path : paths) {
+ try {
+ jarUrls[i++] = new File(path).toURI().toURL();
+ } catch (MalformedURLException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ return new URLClassLoader(jarUrls);
+ }
+
+ public static void main(String[] args) throws ClassPathValidator.ClassNotLoadedException,
+ ExecutionException, InterruptedException {
+ // Invoke this script using //build/android/gyp/bytecode_processor.py
+ int currIndex = 0;
+ String inputJarPath = args[currIndex++];
+ String outputJarPath = args[currIndex++];
+ sVerbose = args[currIndex++].equals("--verbose");
+ sIsPrebuilt = args[currIndex++].equals("--is-prebuilt");
+ sShouldAssert = args[currIndex++].equals("--enable-assert");
+ sShouldUseCustomResources = args[currIndex++].equals("--enable-custom-resources");
+ sShouldUseThreadAnnotations = args[currIndex++].equals("--enable-thread-annotations");
+ sShouldCheckClassPath = args[currIndex++].equals("--enable-check-class-path");
+ int sdkJarsLength = Integer.parseInt(args[currIndex++]);
+ List<String> sdkJarPaths =
+ Arrays.asList(Arrays.copyOfRange(args, currIndex, currIndex + sdkJarsLength));
+ currIndex += sdkJarsLength;
+
+ int directJarsLength = Integer.parseInt(args[currIndex++]);
+ ArrayList<String> directClassPathJarPaths = new ArrayList<>();
+ directClassPathJarPaths.add(inputJarPath);
+ directClassPathJarPaths.addAll(sdkJarPaths);
+ directClassPathJarPaths.addAll(
+ Arrays.asList(Arrays.copyOfRange(args, currIndex, currIndex + directJarsLength)));
+ currIndex += directJarsLength;
+ sDirectClassPathClassLoader = loadJars(directClassPathJarPaths);
+
+ // Load list of class names that need to be fixed.
+ int splitCompatClassNamesLength = Integer.parseInt(args[currIndex++]);
+ sSplitCompatClassNames = new HashSet<>();
+ sSplitCompatClassNames.addAll(Arrays.asList(
+ Arrays.copyOfRange(args, currIndex, currIndex + splitCompatClassNamesLength)));
+ currIndex += splitCompatClassNamesLength;
+
+ // Load all jars that are on the classpath for the input jar for analyzing class hierarchy.
+ sFullClassPathJarPaths = new HashSet<>();
+ sFullClassPathJarPaths.clear();
+ sFullClassPathJarPaths.add(inputJarPath);
+ sFullClassPathJarPaths.addAll(sdkJarPaths);
+ sFullClassPathJarPaths.addAll(
+ Arrays.asList(Arrays.copyOfRange(args, currIndex, args.length)));
+ sFullClassPathClassLoader = loadJars(sFullClassPathJarPaths);
+ sFullClassPathJarPaths.removeAll(directClassPathJarPaths);
+
+ sValidator = new ClassPathValidator();
+ process(inputJarPath, outputJarPath);
+ }
+}
diff --git a/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
new file mode 100644
index 0000000000..c35c3f6820
--- /dev/null
+++ b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
@@ -0,0 +1,167 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassReader;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+/**
+ * Checks classpaths (given as ClassLoaders) by reading the constant pool of the class file and
+ * attempting to load every referenced class. If there are some that are unable to be found, it
+ * stores a helpful error message if it knows where it might find them, and exits the program if it
+ * can't find the class with any given classpath.
+ */
+public class ClassPathValidator {
+ // Map of missing .jar -> Missing class -> Classes that failed.
+ // TreeMap so that error messages have sorted list of jars.
+ private final Map<String, Map<String, Set<String>>> mErrors = new TreeMap<>();
+
+ static class ClassNotLoadedException extends ClassNotFoundException {
+ private final String mClassName;
+
+ ClassNotLoadedException(String className, Throwable ex) {
+ super("Couldn't load " + className, ex);
+ mClassName = className;
+ }
+
+ public String getClassName() {
+ return mClassName;
+ }
+ }
+
+ private static void printAndQuit(ClassNotLoadedException e, ClassReader classReader,
+ boolean verbose) throws ClassNotLoadedException {
+ System.err.println("Class \"" + e.getClassName()
+ + "\" not found on any classpath. Used by class \"" + classReader.getClassName()
+ + "\"");
+ if (verbose) {
+ throw e;
+ }
+ System.exit(1);
+ }
+
+ private static void validateClass(ClassLoader classLoader, String className)
+ throws ClassNotLoadedException {
+ if (className.startsWith("[")) {
+ // Dealing with an array type which isn't encoded nicely in the constant pool.
+ // For example, [[Lorg/chromium/Class$1;
+ className = className.substring(className.lastIndexOf('[') + 1);
+ if (className.charAt(0) == 'L' && className.endsWith(";")) {
+ className = className.substring(1, className.length() - 1);
+ } else {
+ // Bailing out if we have an non-class array type.
+ // This could be something like [B
+ return;
+ }
+ }
+ if (className.matches(".*\\bR(\\$\\w+)?$")) {
+ // Resources in R.java files are not expected to be valid at this stage in the build.
+ return;
+ }
+ if (className.matches("^libcore\\b.*")) {
+ // libcore exists on devices, but is not included in the Android sdk as it is a private
+ // API.
+ return;
+ }
+ try {
+ classLoader.loadClass(className.replace('/', '.'));
+ } catch (ClassNotFoundException e) {
+ throw new ClassNotLoadedException(className, e);
+ } catch (NoClassDefFoundError e) {
+ // We assume that this is caused by another class that is not going to able to be
+ // loaded, so we will skip this and let that class fail with ClassNotFoundException.
+ }
+ }
+
+ /**
+ * Given a .class file, see if every class referenced in the main class' constant pool can be
+ * loaded by the given ClassLoader.
+ *
+ * @param classReader .class file interface for reading the constant pool.
+ * @param classLoader classpath you wish to validate.
+ * @throws ClassNotLoadedException thrown if it can't load a certain class.
+ */
+ private static void validateClassPath(ClassReader classReader, ClassLoader classLoader)
+ throws ClassNotLoadedException {
+ char[] charBuffer = new char[classReader.getMaxStringLength()];
+ // According to the Java spec, the constant pool is indexed from 1 to constant_pool_count -
+ // 1. See https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4
+ for (int i = 1; i < classReader.getItemCount(); i++) {
+ int offset = classReader.getItem(i);
+ // Class entries correspond to 7 in the constant pool
+ // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4
+ if (offset > 0 && classReader.readByte(offset - 1) == 7) {
+ validateClass(classLoader, classReader.readUTF8(offset, charBuffer));
+ }
+ }
+ }
+
+ public void validateClassPathsAndOutput(ClassReader classReader,
+ ClassLoader directClassPathClassLoader, ClassLoader fullClassPathClassLoader,
+ Collection<String> jarsOnlyInFullClassPath, boolean isPrebuilt, boolean verbose)
+ throws ClassNotLoadedException {
+ if (isPrebuilt) {
+ // Prebuilts only need transitive dependencies checked, not direct dependencies.
+ try {
+ validateClassPath(classReader, fullClassPathClassLoader);
+ } catch (ClassNotLoadedException e) {
+ printAndQuit(e, classReader, verbose);
+ }
+ } else {
+ try {
+ validateClassPath(classReader, directClassPathClassLoader);
+ } catch (ClassNotLoadedException e) {
+ try {
+ validateClass(fullClassPathClassLoader, e.getClassName());
+ } catch (ClassNotLoadedException d) {
+ printAndQuit(d, classReader, verbose);
+ }
+ if (verbose) {
+ System.err.println("Class \"" + e.getClassName()
+ + "\" not found in direct dependencies,"
+ + " but found in indirect dependiences.");
+ }
+ // Iterating through all jars that are in the full classpath but not the direct
+ // classpath to find which one provides the class we are looking for.
+ for (String jarPath : jarsOnlyInFullClassPath) {
+ try {
+ ClassLoader smallLoader =
+ ByteCodeProcessor.loadJars(Collections.singletonList(jarPath));
+ validateClass(smallLoader, e.getClassName());
+ Map<String, Set<String>> failedClassesByMissingClass = mErrors.get(jarPath);
+ if (failedClassesByMissingClass == null) {
+ // TreeMap so that error messages have sorted list of classes.
+ failedClassesByMissingClass = new TreeMap<>();
+ mErrors.put(jarPath, failedClassesByMissingClass);
+ }
+ Set<String> failedClasses =
+ failedClassesByMissingClass.get(e.getClassName());
+ if (failedClasses == null) {
+ failedClasses = new TreeSet<>();
+ failedClassesByMissingClass.put(e.getClassName(), failedClasses);
+ }
+ failedClasses.add(classReader.getClassName());
+ break;
+ } catch (ClassNotLoadedException f) {
+ }
+ }
+ }
+ }
+ }
+
+ public Map<String, Map<String, Set<String>>> getErrors() {
+ return mErrors;
+ }
+
+ public boolean hasErrors() {
+ return !mErrors.isEmpty();
+ }
+}
diff --git a/deps/v8/build/android/bytecode/java/org/chromium/bytecode/CustomClassLoaderClassWriter.java b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/CustomClassLoaderClassWriter.java
new file mode 100644
index 0000000000..3a52c85d56
--- /dev/null
+++ b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/CustomClassLoaderClassWriter.java
@@ -0,0 +1,51 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassReader;
+import org.objectweb.asm.ClassWriter;
+
+/**
+ * A ClassWriter that uses a custom class loader.
+ */
+class CustomClassLoaderClassWriter extends ClassWriter {
+ private ClassLoader mClassLoader;
+
+ public CustomClassLoaderClassWriter(ClassLoader classLoader, ClassReader reader, int flags) {
+ super(reader, flags);
+ this.mClassLoader = classLoader;
+ }
+
+ /**
+ * The only modifications from the org.objectweb.asm.ClassWriter implementations is that this
+ * method is final and it uses a custom ClassLoader.
+ *
+ * See https://github.com/llbit/ow2-asm/blob/master/src/org/objectweb/asm/ClassWriter.java.
+ */
+ @Override
+ protected final String getCommonSuperClass(final String type1, final String type2) {
+ Class<?> c, d;
+ try {
+ c = Class.forName(type1.replace('/', '.'), false, mClassLoader);
+ d = Class.forName(type2.replace('/', '.'), false, mClassLoader);
+ } catch (Exception e) {
+ throw new RuntimeException(e.toString());
+ }
+ if (c.isAssignableFrom(d)) {
+ return type1;
+ }
+ if (d.isAssignableFrom(c)) {
+ return type2;
+ }
+ if (c.isInterface() || d.isInterface()) {
+ return "java/lang/Object";
+ } else {
+ do {
+ c = c.getSuperclass();
+ } while (!c.isAssignableFrom(d));
+ return c.getName().replace('.', '/');
+ }
+ }
+}
diff --git a/deps/v8/build/android/bytecode/java/org/chromium/bytecode/CustomResourcesClassAdapter.java b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/CustomResourcesClassAdapter.java
new file mode 100644
index 0000000000..96205b8815
--- /dev/null
+++ b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/CustomResourcesClassAdapter.java
@@ -0,0 +1,302 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import static org.objectweb.asm.Opcodes.ACC_PUBLIC;
+import static org.objectweb.asm.Opcodes.ACONST_NULL;
+import static org.objectweb.asm.Opcodes.ALOAD;
+import static org.objectweb.asm.Opcodes.ARETURN;
+import static org.objectweb.asm.Opcodes.ASM5;
+import static org.objectweb.asm.Opcodes.BIPUSH;
+import static org.objectweb.asm.Opcodes.GETSTATIC;
+import static org.objectweb.asm.Opcodes.IFNE;
+import static org.objectweb.asm.Opcodes.IF_ICMPGE;
+import static org.objectweb.asm.Opcodes.ILOAD;
+import static org.objectweb.asm.Opcodes.INVOKESPECIAL;
+import static org.objectweb.asm.Opcodes.INVOKESTATIC;
+import static org.objectweb.asm.Opcodes.INVOKEVIRTUAL;
+import static org.objectweb.asm.Opcodes.RETURN;
+
+import static org.chromium.bytecode.TypeUtils.ASSET_MANAGER;
+import static org.chromium.bytecode.TypeUtils.BOOLEAN;
+import static org.chromium.bytecode.TypeUtils.BUILD_HOOKS_ANDROID;
+import static org.chromium.bytecode.TypeUtils.CONFIGURATION;
+import static org.chromium.bytecode.TypeUtils.CONTEXT;
+import static org.chromium.bytecode.TypeUtils.CONTEXT_WRAPPER;
+import static org.chromium.bytecode.TypeUtils.INT;
+import static org.chromium.bytecode.TypeUtils.RESOURCES;
+import static org.chromium.bytecode.TypeUtils.STRING;
+import static org.chromium.bytecode.TypeUtils.THEME;
+import static org.chromium.bytecode.TypeUtils.VOID;
+
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.Label;
+import org.objectweb.asm.MethodVisitor;
+import org.objectweb.asm.Opcodes;
+
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * A ClassVisitor for providing access to custom resources via BuildHooksAndroid.
+ *
+ * The goal of this class is to provide hooks into all places where android resources
+ * are available so that they can be modified before use. This is done by rewriting the bytecode
+ * for all callable definitions of certain Context methods, specifically:
+ * - getResources
+ * - getAssets
+ * - getTheme
+ * - setTheme
+ * - createConfigurationContext
+ *
+ * Only classes at the framework boundary are rewritten since presumably all other indirect Context
+ * subclasses will end up calling their respective super methods (i.e. we bytecode rewrite
+ * BaseChromiumApplication since it extends Application, but not ContentApplication since it
+ * extends a non-framework subclass.
+ */
+class CustomResourcesClassAdapter extends ClassVisitor {
+ private static final String IS_ENABLED_METHOD = "isEnabled";
+ private static final String IS_ENABLED_DESCRIPTOR = TypeUtils.getMethodDescriptor(BOOLEAN);
+ // Cached since this is used so often.
+ private static final String GET_IDENTIFIER_DESCRIPTOR =
+ TypeUtils.getMethodDescriptor(INT, STRING, STRING, STRING);
+
+ // Existing methods are more difficult to handle, and not currently needed.
+ private static final List<String> PROHIBITED_METHODS = Arrays.asList(
+ TypeUtils.getMethodSignature("getResources", RESOURCES),
+ TypeUtils.getMethodSignature("getAssets", ASSET_MANAGER),
+ TypeUtils.getMethodSignature("getTheme", THEME),
+ TypeUtils.getMethodSignature("createConfigurationContext", CONTEXT, CONFIGURATION),
+ TypeUtils.getMethodSignature("setTheme", VOID, INT));
+
+ private boolean mShouldTransform;
+ private String mClassName;
+ private String mSuperClassName;
+ private ClassLoader mClassLoader;
+
+ CustomResourcesClassAdapter(ClassVisitor visitor, String className, String superClassName,
+ ClassLoader classLoader) {
+ super(ASM5, visitor);
+ this.mClassName = className;
+ this.mSuperClassName = superClassName;
+ this.mClassLoader = classLoader;
+ }
+
+ @Override
+ public void visit(int version, int access, String name, String signature, String superName,
+ String[] interfaces) {
+ super.visit(version, access, name, signature, superName, interfaces);
+ mShouldTransform = shouldTransform();
+ }
+
+ @Override
+ public MethodVisitor visitMethod(final int access, final String name, String desc,
+ String signature, String[] exceptions) {
+ if (mShouldTransform) {
+ String methodSignature = name + desc;
+ if (requiresModifyingExisting(methodSignature)) {
+ throw new RuntimeException("Rewriting existing methods not supported: " + mClassName
+ + "#" + methodSignature);
+ }
+ }
+ return new RewriteGetIdentifierMethodVisitor(
+ super.visitMethod(access, name, desc, signature, exceptions));
+ }
+
+ @Override
+ public void visitEnd() {
+ if (mShouldTransform) {
+ delegateCreateConfigurationContext();
+ delegateSetTheme();
+ delegateGet("getAssets", ASSET_MANAGER);
+ delegateGet("getTheme", THEME);
+ delegateGet("getResources", RESOURCES);
+ }
+ super.visitEnd();
+ }
+
+ private boolean requiresModifyingExisting(String methodDescriptor) {
+ return PROHIBITED_METHODS.contains(methodDescriptor);
+ }
+
+ private boolean shouldTransform() {
+ if (!isDescendantOfContext()) {
+ return false;
+ }
+ if (!superClassIsFrameworkClass()) {
+ return false;
+ }
+ return !superClassIsContextWrapper();
+ }
+
+ private boolean superClassIsFrameworkClass() {
+ return loadClass(mSuperClassName).getProtectionDomain().toString().contains("android.jar");
+ }
+
+ private boolean isDescendantOfContext() {
+ return isSubClass(mClassName, CONTEXT);
+ }
+
+ private boolean superClassIsContextWrapper() {
+ return mSuperClassName.equals(CONTEXT_WRAPPER);
+ }
+
+ private boolean isSubClass(String candidate, String other) {
+ Class<?> candidateClazz = loadClass(candidate);
+ Class<?> parentClazz = loadClass(other);
+ return parentClazz.isAssignableFrom(candidateClazz);
+ }
+
+ private Class<?> loadClass(String className) {
+ try {
+ return mClassLoader.loadClass(className.replace('/', '.'));
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Remaps Resources.getIdentifier() method calls to use BuildHooksAndroid.
+ *
+ * resourceObj.getIdentifier(String, String, String) becomes:
+ * BuildHooksAndroid.getIdentifier(resourceObj, String, String, String);
+ */
+ private static final class RewriteGetIdentifierMethodVisitor extends MethodVisitor {
+ RewriteGetIdentifierMethodVisitor(MethodVisitor mv) {
+ super(ASM5, mv);
+ }
+
+ @Override
+ public void visitMethodInsn(
+ int opcode, String owner, String name, String desc, boolean itf) {
+ String methodName = "getIdentifier";
+ if (opcode == INVOKEVIRTUAL && owner.equals(RESOURCES) && name.equals(methodName)
+ && desc.equals(GET_IDENTIFIER_DESCRIPTOR)) {
+ super.visitMethodInsn(INVOKESTATIC, BUILD_HOOKS_ANDROID, methodName,
+ TypeUtils.getMethodDescriptor(INT, RESOURCES, STRING, STRING, STRING), itf);
+ } else {
+ super.visitMethodInsn(opcode, owner, name, desc, itf);
+ }
+ }
+ }
+
+ /**
+ * Generates:
+ *
+ * <pre>
+ * public Context createConfigurationContext(Configuration configuration) {
+ * // createConfigurationContext does not exist before API level 17.
+ * if (Build.VERSION.SDK_INT < 17) return null;
+ * if (!BuildHooksAndroid.isEnabled()) return super.createConfigurationContext(configuration);
+ * return BuildHooksAndroid.createConfigurationContext(
+ * super.createConfigurationContext(configuration));
+ * }
+ * </pre>
+ * }
+ */
+ private void delegateCreateConfigurationContext() {
+ String methodName = "createConfigurationContext";
+ String methodDescriptor = TypeUtils.getMethodDescriptor(CONTEXT, CONFIGURATION);
+ MethodVisitor mv = super.visitMethod(ACC_PUBLIC, methodName, methodDescriptor, null, null);
+ mv.visitCode();
+ mv.visitFieldInsn(GETSTATIC, "android/os/Build$VERSION", "SDK_INT", INT);
+ mv.visitIntInsn(BIPUSH, 17);
+ Label l0 = new Label();
+ mv.visitJumpInsn(IF_ICMPGE, l0);
+ mv.visitInsn(ACONST_NULL);
+ mv.visitInsn(ARETURN);
+ mv.visitLabel(l0);
+ mv.visitFrame(Opcodes.F_SAME, 0, null, 0, null);
+ mv.visitMethodInsn(
+ INVOKESTATIC, BUILD_HOOKS_ANDROID, IS_ENABLED_METHOD, IS_ENABLED_DESCRIPTOR, false);
+ Label l1 = new Label();
+ mv.visitJumpInsn(IFNE, l1);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitVarInsn(ALOAD, 1);
+ mv.visitMethodInsn(INVOKESPECIAL, mSuperClassName, methodName, methodDescriptor, false);
+ mv.visitInsn(ARETURN);
+ mv.visitLabel(l1);
+ mv.visitFrame(Opcodes.F_SAME, 0, null, 0, null);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitVarInsn(ALOAD, 1);
+ mv.visitMethodInsn(INVOKESPECIAL, mSuperClassName, methodName, methodDescriptor, false);
+ mv.visitMethodInsn(INVOKESTATIC, BUILD_HOOKS_ANDROID, methodName,
+ TypeUtils.getMethodDescriptor(CONTEXT, CONTEXT), false);
+ mv.visitInsn(ARETURN);
+ mv.visitMaxs(2, 2);
+ mv.visitEnd();
+ }
+
+ /**
+ * Generates:
+ *
+ * <pre>
+ * public void setTheme(int theme) {
+ * if (!BuildHooksAndroid.isEnabled()) {
+ * super.setTheme(theme);
+ * return;
+ * }
+ * BuildHooksAndroid.setTheme(this, theme);
+ * }
+ * </pre>
+ */
+ private void delegateSetTheme() {
+ String methodName = "setTheme";
+ String methodDescriptor = TypeUtils.getMethodDescriptor(VOID, INT);
+ String buildHooksMethodDescriptor = TypeUtils.getMethodDescriptor(VOID, CONTEXT, INT);
+ MethodVisitor mv = super.visitMethod(ACC_PUBLIC, methodName, methodDescriptor, null, null);
+ mv.visitCode();
+ mv.visitMethodInsn(
+ INVOKESTATIC, BUILD_HOOKS_ANDROID, IS_ENABLED_METHOD, IS_ENABLED_DESCRIPTOR, false);
+ Label l0 = new Label();
+ mv.visitJumpInsn(IFNE, l0);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitVarInsn(ILOAD, 1);
+ mv.visitMethodInsn(INVOKESPECIAL, mSuperClassName, methodName, methodDescriptor, false);
+ mv.visitInsn(RETURN);
+ mv.visitLabel(l0);
+ mv.visitFrame(Opcodes.F_SAME, 0, null, 0, null);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitVarInsn(ILOAD, 1);
+ mv.visitMethodInsn(
+ INVOKESTATIC, BUILD_HOOKS_ANDROID, methodName, buildHooksMethodDescriptor, false);
+ mv.visitInsn(RETURN);
+ mv.visitMaxs(2, 2);
+ mv.visitEnd();
+ }
+
+ /**
+ * Generates:
+ *
+ * <pre>
+ * public returnType methodName() {
+ * if (!BuildHooksAndroid.isEnabled()) return super.methodName();
+ * return BuildHooksAndroid.methodName(this);
+ * }
+ * </pre>
+ */
+ private void delegateGet(String methodName, String returnType) {
+ String getMethodDescriptor = TypeUtils.getMethodDescriptor(returnType);
+ String buildHooksGetMethodDescriptor = TypeUtils.getMethodDescriptor(returnType, CONTEXT);
+ MethodVisitor mv =
+ super.visitMethod(ACC_PUBLIC, methodName, getMethodDescriptor, null, null);
+ mv.visitCode();
+ mv.visitMethodInsn(
+ INVOKESTATIC, BUILD_HOOKS_ANDROID, IS_ENABLED_METHOD, IS_ENABLED_DESCRIPTOR, false);
+ Label l0 = new Label();
+ mv.visitJumpInsn(IFNE, l0);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitMethodInsn(INVOKESPECIAL, mSuperClassName, methodName, getMethodDescriptor, false);
+ mv.visitInsn(ARETURN);
+ mv.visitLabel(l0);
+ mv.visitFrame(Opcodes.F_SAME, 0, null, 0, null);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitMethodInsn(INVOKESTATIC, BUILD_HOOKS_ANDROID, methodName,
+ buildHooksGetMethodDescriptor, false);
+ mv.visitInsn(ARETURN);
+ mv.visitMaxs(1, 1);
+ mv.visitEnd();
+ }
+}
diff --git a/deps/v8/build/android/bytecode/java/org/chromium/bytecode/SplitCompatClassAdapter.java b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/SplitCompatClassAdapter.java
new file mode 100644
index 0000000000..8d6ae69483
--- /dev/null
+++ b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/SplitCompatClassAdapter.java
@@ -0,0 +1,149 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import static org.objectweb.asm.Opcodes.ACC_PROTECTED;
+import static org.objectweb.asm.Opcodes.ALOAD;
+import static org.objectweb.asm.Opcodes.INVOKESPECIAL;
+import static org.objectweb.asm.Opcodes.INVOKESTATIC;
+import static org.objectweb.asm.Opcodes.RETURN;
+
+import static org.chromium.bytecode.TypeUtils.CONTEXT;
+import static org.chromium.bytecode.TypeUtils.VOID;
+
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.MethodVisitor;
+import org.objectweb.asm.Opcodes;
+
+import java.util.Set;
+
+/**
+ * A ClassVisitor for injecting ModuleInstaller.initActivity(activity) method call
+ * into Activity's attachBaseContext() method. The goal is to eventually invoke
+ * SplitCompat.install() method if running with the binary that has bundle support
+ * enabled. This needs to happen for activities that were not built with SplitCompat
+ * support.
+ */
+class SplitCompatClassAdapter extends ClassVisitor {
+ private static final String ANDROID_APP_ACTIVITY_CLASS_NAME = "android/app/Activity";
+ private static final String ATTACH_BASE_CONTEXT_METHOD_NAME = "attachBaseContext";
+ private static final String ATTACH_BASE_CONTEXT_DESCRIPTOR =
+ TypeUtils.getMethodDescriptor(VOID, CONTEXT);
+
+ private static final String MODULE_INSTALLER_CLASS_NAME =
+ "org/chromium/components/module_installer/ModuleInstaller";
+ private static final String INIT_ACTIVITY_METHOD_NAME = "initActivity";
+ private static final String INIT_ACTIVITY_DESCRIPTOR =
+ TypeUtils.getMethodDescriptor(VOID, CONTEXT);
+
+ private boolean mShouldTransform;
+
+ private Set<String> mClassNames;
+
+ private ClassLoader mClassLoader;
+
+ /**
+ * Creates instance of SplitCompatClassAdapter.
+ *
+ * @param visitor
+ * @param classNames Names of classes into which the attachBaseContext method will be
+ * injected. Currently, we'll only consider classes for bytecode rewriting only if
+ * they inherit directly from android.app.Activity & not already contain
+ * attachBaseContext method.
+ * @param classLoader
+ */
+ SplitCompatClassAdapter(ClassVisitor visitor, Set<String> classNames, ClassLoader classLoader) {
+ super(Opcodes.ASM5, visitor);
+
+ mShouldTransform = false;
+ mClassNames = classNames;
+ mClassLoader = classLoader;
+ }
+
+ @Override
+ public void visit(int version, int access, String name, String signature, String superName,
+ String[] interfaces) {
+ super.visit(version, access, name, signature, superName, interfaces);
+
+ if (mClassNames.contains(name)) {
+ if (!isSubclassOfActivity(name)) {
+ throw new RuntimeException(name
+ + " should be transformed but does not inherit from android.app.Activity");
+ }
+
+ mShouldTransform = true;
+ }
+ }
+
+ @Override
+ public MethodVisitor visitMethod(
+ int access, String name, String descriptor, String signature, String[] exceptions) {
+ // Check if current method matches attachBaseContext & we're supposed to emit code - if so,
+ // fail.
+ if (mShouldTransform && name.equals(ATTACH_BASE_CONTEXT_METHOD_NAME)) {
+ throw new RuntimeException(ATTACH_BASE_CONTEXT_METHOD_NAME + " method already exists");
+ }
+
+ return super.visitMethod(access, name, descriptor, signature, exceptions);
+ }
+
+ @Override
+ public void visitEnd() {
+ if (mShouldTransform) {
+ // If we reached this place, it means we're rewriting a class that inherits from
+ // Activity and there was no exception thrown due to existence of attachBaseContext
+ // method - emit code.
+ emitAttachBaseContext();
+ }
+
+ super.visitEnd();
+ }
+
+ /**
+ * Generates:
+ *
+ * <pre>
+ * protected void attachBaseContext(Context base) {
+ * super.attachBaseContext(base);
+ * ModuleInstaller.initActivity(this);
+ * }
+ * </pre>
+ */
+ private void emitAttachBaseContext() {
+ MethodVisitor mv = super.visitMethod(ACC_PROTECTED, ATTACH_BASE_CONTEXT_METHOD_NAME,
+ ATTACH_BASE_CONTEXT_DESCRIPTOR, null, null);
+ mv.visitCode();
+ mv.visitVarInsn(ALOAD, 0); // load "this" on stack
+ mv.visitVarInsn(ALOAD, 1); // load first method parameter on stack (Context)
+ mv.visitMethodInsn(INVOKESPECIAL, ANDROID_APP_ACTIVITY_CLASS_NAME,
+ ATTACH_BASE_CONTEXT_METHOD_NAME,
+ ATTACH_BASE_CONTEXT_DESCRIPTOR); // invoke super's attach base context
+ mv.visitVarInsn(ALOAD, 0); // load "this" on stack
+ mv.visitMethodInsn(INVOKESTATIC, MODULE_INSTALLER_CLASS_NAME, INIT_ACTIVITY_METHOD_NAME,
+ INIT_ACTIVITY_DESCRIPTOR);
+ mv.visitInsn(RETURN);
+ mv.visitMaxs(2, 2); // max stack size - 2, max locals - 2
+ mv.visitEnd();
+ }
+
+ /**
+ * Checks whether passed in class inherits from android.app.Activity.
+ * @param name Name of the class to be checked.
+ * @return true if class inherits from android.app.Activity, false otherwise.
+ */
+ private boolean isSubclassOfActivity(String name) {
+ Class<?> activityClass = loadClass(ANDROID_APP_ACTIVITY_CLASS_NAME);
+ Class<?> candidateClass = loadClass(name);
+ return activityClass.isAssignableFrom(candidateClass);
+ }
+
+ private Class<?> loadClass(String className) {
+ try {
+ return mClassLoader.loadClass(className.replace('/', '.'));
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException(e);
+ }
+ }
+}
diff --git a/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ThreadAssertionClassAdapter.java b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ThreadAssertionClassAdapter.java
new file mode 100644
index 0000000000..3f50b25f3e
--- /dev/null
+++ b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ThreadAssertionClassAdapter.java
@@ -0,0 +1,83 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import static org.objectweb.asm.Opcodes.ASM5;
+import static org.objectweb.asm.Opcodes.INVOKESTATIC;
+
+import org.objectweb.asm.AnnotationVisitor;
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.MethodVisitor;
+
+/**
+ * A ClassVisitor which adds calls to
+ * {@link org.chromium.base.ThreadUtils}'s assertOnUiThread/assertOnBackgroundThread when the
+ * corresponding {@link android.support.annotation.UiThread} or
+ * {@link android.support.annotation.WorkerThread} annotations are present. The function calls
+ * are placed at the start of the method.
+ */
+class ThreadAssertionClassAdapter extends ClassVisitor {
+ private static final String THREAD_UTILS_DESCRIPTOR = "org/chromium/base/ThreadUtils";
+ private static final String THREAD_UTILS_SIGNATURE = "()V";
+ private static final String UI_THREAD_ANNOTATION_DESCRIPTOR =
+ "Landroid/support/annotation/UiThread;";
+ private static final String WORKER_THREAD_ANNOTATION_DESCRIPTOR =
+ "Landroid/support/annotation/WorkerThread;";
+
+ ThreadAssertionClassAdapter(ClassVisitor visitor) {
+ super(ASM5, visitor);
+ }
+
+ @Override
+ public MethodVisitor visitMethod(final int access, final String name, String desc,
+ String signature, String[] exceptions) {
+ return new AddAssertMethodVisitor(
+ super.visitMethod(access, name, desc, signature, exceptions));
+ }
+
+ private static class AddAssertMethodVisitor extends MethodVisitor {
+ String mAssertMethodName = "";
+
+ AddAssertMethodVisitor(MethodVisitor mv) {
+ super(ASM5, mv);
+ }
+
+ /**
+ * Call for annotations on the method. Checks if the annotation is @UiThread
+ * or @WorkerThread, and if so will set the mAssertMethodName property to the name of the
+ * method to call in order to assert that a method is running on the intented thread.
+ *
+ * @param descriptor Annotation descriptor containing its name and package.
+ */
+ @Override
+ public AnnotationVisitor visitAnnotation(String descriptor, boolean visible) {
+ switch (descriptor) {
+ case UI_THREAD_ANNOTATION_DESCRIPTOR:
+ mAssertMethodName = "assertOnUiThread";
+ break;
+ case WORKER_THREAD_ANNOTATION_DESCRIPTOR:
+ mAssertMethodName = "assertOnBackgroundThread";
+ break;
+ default:
+ break;
+ }
+
+ return super.visitAnnotation(descriptor, visible);
+ }
+
+ /**
+ * Called to start visiting code. Will also insert the assertOnXThread methods at the start
+ * of the method if needed.
+ */
+ @Override
+ public void visitCode() {
+ super.visitCode();
+ if (!mAssertMethodName.equals("")) {
+ visitMethodInsn(INVOKESTATIC, THREAD_UTILS_DESCRIPTOR, mAssertMethodName,
+ THREAD_UTILS_SIGNATURE, false);
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/deps/v8/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java
new file mode 100644
index 0000000000..ed2dc2dc24
--- /dev/null
+++ b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java
@@ -0,0 +1,87 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.Type;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Utility methods for accessing {@link Type}s Strings.
+ *
+ * Useful definitions to keep in mind when using this class:
+ * Internal name - The fully qualified name for a type with dots replaced by slashes. Not really
+ * relevant for primitive types.
+ * Type descriptor - Single letters for primitive types, "L" + internal name + ";" for class types.
+ *
+ * The methods in this class accept internal names or primitive type descriptors.
+ */
+class TypeUtils {
+ static final String ASSERTION_ERROR = "java/lang/AssertionError";
+ static final String ASSET_MANAGER = "android/content/res/AssetManager";
+ static final String BUILD_HOOKS = "org/chromium/build/BuildHooks";
+ static final String BUILD_HOOKS_ANDROID = "org/chromium/build/BuildHooksAndroid";
+ static final String CONFIGURATION = "android/content/res/Configuration";
+ static final String CONTEXT = "android/content/Context";
+ static final String CONTEXT_WRAPPER = "android/content/ContextWrapper";
+ static final String RESOURCES = "android/content/res/Resources";
+ static final String STRING = "java/lang/String";
+ static final String THEME = "android/content/res/Resources$Theme";
+
+ static final String BOOLEAN = "Z";
+ static final String INT = "I";
+ static final String VOID = "V";
+ private static final Map<String, Type> PRIMITIVE_DESCRIPTORS;
+ static {
+ PRIMITIVE_DESCRIPTORS = new HashMap<>();
+ PRIMITIVE_DESCRIPTORS.put(Type.BOOLEAN_TYPE.toString(), Type.BOOLEAN_TYPE);
+ PRIMITIVE_DESCRIPTORS.put(Type.INT_TYPE.toString(), Type.INT_TYPE);
+ PRIMITIVE_DESCRIPTORS.put(Type.VOID_TYPE.toString(), Type.VOID_TYPE);
+ }
+
+ /**
+ * Returns the full method signature with internal names.
+ *
+ * @param methodName Name of the method (ex. "getResources").
+ * @param returnType Internal name for the return type.
+ * @param argumentTypes List of internal names for argument types.
+ * @return String representation of the method signature.
+ */
+ static String getMethodSignature(
+ String methodName, String returnType, String... argumentTypes) {
+ return methodName + getMethodDescriptor(returnType, argumentTypes);
+ }
+
+ /**
+ * Builds a method descriptor suitable for use with {@link org.objectweb.asm.MethodVisitor}.
+ *
+ * @param returnType Internal name for the return type of the method (primitive or class).
+ * @param argumentTypes Internal names for the argument types (primitive or class).
+ * @return The generated method descriptor.
+ */
+ static String getMethodDescriptor(String returnType, String... argumentTypes) {
+ Type[] typedArguments = new Type[argumentTypes.length];
+ for (int i = 0; i < argumentTypes.length; ++i) {
+ // Argument list should be empty in this case, not V (void).
+ assert !Type.VOID_TYPE.toString().equals(argumentTypes[i]);
+ typedArguments[i] = convert(argumentTypes[i]);
+ }
+ return Type.getMethodDescriptor(convert(returnType), typedArguments);
+ }
+
+ /**
+ * Converts an internal name for a type to a {@link Type}.
+ *
+ * @param type Internal name for a type (primitive or class).
+ * @return The resulting Type.
+ */
+ private static Type convert(String type) {
+ if (PRIMITIVE_DESCRIPTORS.containsKey(type)) {
+ return PRIMITIVE_DESCRIPTORS.get(type);
+ }
+ return Type.getObjectType(type);
+ }
+}
diff --git a/deps/v8/build/android/chromium-debug.keystore b/deps/v8/build/android/chromium-debug.keystore
new file mode 100644
index 0000000000..67eb0aa34c
--- /dev/null
+++ b/deps/v8/build/android/chromium-debug.keystore
Binary files differ
diff --git a/deps/v8/build/android/convert_dex_profile.py b/deps/v8/build/android/convert_dex_profile.py
new file mode 100755
index 0000000000..f9fdeb6793
--- /dev/null
+++ b/deps/v8/build/android/convert_dex_profile.py
@@ -0,0 +1,557 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import logging
+import re
+import subprocess
+import sys
+
+DEX_CLASS_NAME_RE = re.compile(r'\'L(?P<class_name>[^;]+);\'')
+DEX_METHOD_NAME_RE = re.compile(r'\'(?P<method_name>[^\']+)\'')
+DEX_METHOD_TYPE_RE = re.compile( # type descriptor method signature re
+ r'\''
+ r'\('
+ r'(?P<method_params>[^)]*)'
+ r'\)'
+ r'(?P<method_return_type>[^\']+)'
+ r'\'')
+DEX_METHOD_LINE_NR_RE = re.compile(r'line=(?P<line_number>\d+)')
+
+PROFILE_METHOD_RE = re.compile(
+ r'(?P<tags>[HSP]+)' # tags such as H/S/P
+ r'(?P<class_name>L[^;]+;)' # class name in type descriptor format
+ r'->(?P<method_name>[^(]+)'
+ r'\((?P<method_params>[^)]*)\)'
+ r'(?P<method_return_type>.+)')
+
+PROGUARD_CLASS_MAPPING_RE = re.compile(
+ r'(?P<original_name>[^ ]+)'
+ r' -> '
+ r'(?P<obfuscated_name>[^:]+):')
+PROGUARD_METHOD_MAPPING_RE = re.compile(
+ # line_start:line_end: (optional)
+ r'((?P<line_start>\d+):(?P<line_end>\d+):)?'
+ r'(?P<return_type>[^ ]+)' # original method return type
+ # original method class name (if exists)
+ r' (?:(?P<original_method_class>[a-zA-Z_\d.$]+)\.)?'
+ r'(?P<original_method_name>[^.\(]+)'
+ r'\((?P<params>[^\)]*)\)' # original method params
+ r'(?:[^ ]*)' # original method line numbers (ignored)
+ r' -> '
+ r'(?P<obfuscated_name>.+)') # obfuscated method name
+
+TYPE_DESCRIPTOR_RE = re.compile(
+ r'(?P<brackets>\[*)'
+ r'(?:'
+ r'(?P<class_name>L[^;]+;)'
+ r'|'
+ r'[VZBSCIJFD]'
+ r')')
+
+DOT_NOTATION_MAP = {
+ '': '',
+ 'boolean': 'Z',
+ 'byte': 'B',
+ 'void': 'V',
+ 'short': 'S',
+ 'char': 'C',
+ 'int': 'I',
+ 'long': 'J',
+ 'float': 'F',
+ 'double': 'D'
+}
+
+class Method(object):
+ def __init__(self, name, class_name, param_types=None, return_type=None):
+ self.name = name
+ self.class_name = class_name
+ self.param_types = param_types
+ self.return_type = return_type
+
+ def __str__(self):
+ return '{}->{}({}){}'.format(self.class_name, self.name,
+ self.param_types or '', self.return_type or '')
+
+ def __repr__(self):
+ return 'Method<{}->{}({}){}>'.format(self.class_name, self.name,
+ self.param_types or '', self.return_type or '')
+
+ def __cmp__(self, other):
+ return cmp((self.class_name, self.name, self.param_types, self.return_type),
+ (other.class_name, other.name, other.param_types, other.return_type))
+
+ def __hash__(self):
+ # only hash name and class_name since other fields may not be set yet.
+ return hash((self.name, self.class_name))
+
+
+class Class(object):
+ def __init__(self, name):
+ self.name = name
+ self._methods = []
+
+ def AddMethod(self, method, line_numbers):
+ self._methods.append((method, set(line_numbers)))
+
+ def FindMethodsAtLine(self, method_name, line_start, line_end=None):
+ """Searches through dex class for a method given a name and line numbers
+
+ The dex maps methods to line numbers, this method, given the a method name
+ in this class as well as a start line and an optional end line (which act as
+ hints as to which function in the class is being looked for), returns a list
+ of possible matches (or none if none are found).
+
+ Args:
+ method_name: name of method being searched for
+ line_start: start of hint range for lines in this method
+ line_end: end of hint range for lines in this method (optional)
+
+ Returns:
+ A list of Method objects that could match the hints given, or None if no
+ method is found.
+ """
+ found_methods = []
+ if line_end is None:
+ hint_lines = set([line_start])
+ else:
+ hint_lines = set(range(line_start, line_end+1))
+
+ named_methods = [(method, l) for method, l in self._methods
+ if method.name == method_name]
+
+ if len(named_methods) == 1:
+ return [method for method, l in named_methods]
+ if len(named_methods) == 0:
+ return None
+
+ for method, line_numbers in named_methods:
+ if not hint_lines.isdisjoint(line_numbers):
+ found_methods.append(method)
+
+ if len(found_methods) > 0:
+ if len(found_methods) > 1:
+ logging.warning('ambigous methods in dex %s at lines %s in class "%s"',
+ found_methods, hint_lines, self.name)
+ return found_methods
+
+ for method, line_numbers in named_methods:
+ if (max(hint_lines) >= min(line_numbers)
+ and min(hint_lines) <= max(line_numbers)):
+ found_methods.append(method)
+
+ if len(found_methods) > 0:
+ if len(found_methods) > 1:
+ logging.warning('ambigous methods in dex %s at lines %s in class "%s"',
+ found_methods, hint_lines, self.name)
+ return found_methods
+ else:
+ logging.warning('No method named "%s" in class "%s" is '
+ 'mapped to lines %s', method_name, self.name, hint_lines)
+ return None
+
+
+class Profile(object):
+ def __init__(self):
+ # {Method: set(char)}
+ self._methods = collections.defaultdict(set)
+ self._classes = []
+
+ def AddMethod(self, method, tags):
+ for tag in tags:
+ self._methods[method].add(tag)
+
+ def AddClass(self, cls):
+ self._classes.append(cls)
+
+ def WriteToFile(self, path):
+ with open(path, 'w') as output_profile:
+ for cls in sorted(self._classes):
+ output_profile.write(cls + '\n')
+ for method in sorted(self._methods):
+ tags = sorted(self._methods[method])
+ line = '{}{}\n'.format(''.join(tags), str(method))
+ output_profile.write(line)
+
+
+class ProguardMapping(object):
+ def __init__(self):
+ # {Method: set(Method)}
+ self._method_mapping = collections.defaultdict(set)
+ # {String: String} String is class name in type descriptor format
+ self._class_mapping = dict()
+
+ def AddMethodMapping(self, from_method, to_method):
+ self._method_mapping[from_method].add(to_method)
+
+ def AddClassMapping(self, from_class, to_class):
+ self._class_mapping[from_class] = to_class
+
+ def GetMethodMapping(self, from_method):
+ return self._method_mapping.get(from_method)
+
+ def GetClassMapping(self, from_class):
+ return self._class_mapping.get(from_class, from_class)
+
+ def MapTypeDescriptor(self, type_descriptor):
+ match = TYPE_DESCRIPTOR_RE.search(type_descriptor)
+ assert match is not None
+ class_name = match.group('class_name')
+ if class_name is not None:
+ return match.group('brackets') + self.GetClassMapping(class_name)
+ # just a native type, return as is
+ return match.group()
+
+ def MapTypeDescriptorList(self, type_descriptor_list):
+ return TYPE_DESCRIPTOR_RE.sub(
+ lambda match: self.MapTypeDescriptor(match.group()),
+ type_descriptor_list)
+
+
+class MalformedLineException(Exception):
+ def __init__(self, message, line_number):
+ super(MalformedLineException, self).__init__(message)
+ self.line_number = line_number
+
+ def __str__(self):
+ return self.message + ' at line {}'.format(self.line_number)
+
+
+class MalformedProguardMappingException(MalformedLineException):
+ pass
+
+
+class MalformedProfileException(MalformedLineException):
+ pass
+
+
+def _RunDexDump(dexdump_path, dex_file_path):
+ return subprocess.check_output([dexdump_path, dex_file_path]).splitlines()
+
+
+def _ReadFile(file_path):
+ with open(file_path, 'r') as f:
+ return f.readlines()
+
+
+def _ToTypeDescriptor(dot_notation):
+ """Parses a dot notation type and returns it in type descriptor format
+
+ eg:
+ org.chromium.browser.ChromeActivity -> Lorg/chromium/browser/ChromeActivity;
+ boolean -> Z
+ int[] -> [I
+
+ Args:
+ dot_notation: trimmed string with a single type in dot notation format
+
+ Returns:
+ A string with the type in type descriptor format
+ """
+ dot_notation = dot_notation.strip()
+ prefix = ''
+ while dot_notation.endswith('[]'):
+ prefix += '['
+ dot_notation = dot_notation[:-2]
+ if dot_notation in DOT_NOTATION_MAP:
+ return prefix + DOT_NOTATION_MAP[dot_notation]
+ return prefix + 'L' + dot_notation.replace('.', '/') + ';'
+
+
+def _DotNotationListToTypeDescriptorList(dot_notation_list_string):
+ """Parses a param list of dot notation format and returns it in type
+ descriptor format
+
+ eg:
+ org.chromium.browser.ChromeActivity,boolean,int[] ->
+ Lorg/chromium/browser/ChromeActivity;Z[I
+
+ Args:
+ dot_notation_list_string: single string with multiple comma separated types
+ in dot notation format
+
+ Returns:
+ A string with the param list in type descriptor format
+ """
+ return ''.join(_ToTypeDescriptor(param) for param in
+ dot_notation_list_string.split(','))
+
+
+def ProcessDex(dex_dump):
+ """Parses dexdump output returning a dict of class names to Class objects
+
+ Parses output of the dexdump command on a dex file and extracts information
+ about classes and their respective methods and which line numbers a method is
+ mapped to.
+
+ Methods that are not mapped to any line number are ignored and not listed
+ inside their respective Class objects.
+
+ Args:
+ dex_dump: An array of lines of dexdump output
+
+ Returns:
+ A dict that maps from class names in type descriptor format (but without the
+ surrounding 'L' and ';') to Class objects.
+ """
+ # class_name: Class
+ classes_by_name = {}
+ current_class = None
+ current_method = None
+ reading_positions = False
+ reading_methods = False
+ method_line_numbers = []
+ for line in dex_dump:
+ line = line.strip()
+ if line.startswith('Class descriptor'):
+ # New class started, no longer reading methods.
+ reading_methods = False
+ current_class = Class(DEX_CLASS_NAME_RE.search(line).group('class_name'))
+ classes_by_name[current_class.name] = current_class
+ elif (line.startswith('Direct methods')
+ or line.startswith('Virtual methods')):
+ reading_methods = True
+ elif reading_methods and line.startswith('name'):
+ assert current_class is not None
+ current_method = Method(
+ DEX_METHOD_NAME_RE.search(line).group('method_name'),
+ "L" + current_class.name + ";")
+ elif reading_methods and line.startswith('type'):
+ assert current_method is not None
+ match = DEX_METHOD_TYPE_RE.search(line)
+ current_method.param_types = match.group('method_params')
+ current_method.return_type = match.group('method_return_type')
+ elif line.startswith('positions'):
+ assert reading_methods
+ reading_positions = True
+ method_line_numbers = []
+ elif reading_positions and line.startswith('0x'):
+ line_number = DEX_METHOD_LINE_NR_RE.search(line).group('line_number')
+ method_line_numbers.append(int(line_number))
+ elif reading_positions and line.startswith('locals'):
+ if len(method_line_numbers) > 0:
+ current_class.AddMethod(current_method, method_line_numbers)
+ # finished reading method line numbers
+ reading_positions = False
+ return classes_by_name
+
+
+def ProcessProguardMapping(proguard_mapping_lines, dex):
+ """Parses a proguard mapping file
+
+ This takes proguard mapping file lines and then uses the obfuscated dex to
+ create a mapping of unobfuscated methods to obfuscated ones and vice versa.
+
+ The dex is used because the proguard mapping file only has the name of the
+ obfuscated methods but not their signature, thus the dex is read to look up
+ which method with a specific name was mapped to the lines mentioned in the
+ proguard mapping file.
+
+ Args:
+ proguard_mapping_lines: Array of strings, each is a line from the proguard
+ mapping file (in order).
+ dex: a dict of class name (in type descriptor format but without the
+ enclosing 'L' and ';') to a Class object.
+ Returns:
+ Two dicts the first maps from obfuscated methods to a set of non-obfuscated
+ ones. It also maps the obfuscated class names to original class names, both
+ in type descriptor format (with the enclosing 'L' and ';')
+ """
+ mapping = ProguardMapping()
+ reverse_mapping = ProguardMapping()
+ to_be_obfuscated = []
+ current_class_orig = None
+ current_class_obfs = None
+ for index, line in enumerate(proguard_mapping_lines):
+ if line.strip() == '':
+ continue
+ if not line.startswith(' '):
+ match = PROGUARD_CLASS_MAPPING_RE.search(line)
+ if match is None:
+ raise MalformedProguardMappingException(
+ 'Malformed class mapping', index)
+ current_class_orig = match.group('original_name')
+ current_class_obfs = match.group('obfuscated_name')
+ mapping.AddClassMapping(_ToTypeDescriptor(current_class_obfs),
+ _ToTypeDescriptor(current_class_orig))
+ reverse_mapping.AddClassMapping(_ToTypeDescriptor(current_class_orig),
+ _ToTypeDescriptor(current_class_obfs))
+ continue
+
+ assert current_class_orig is not None
+ assert current_class_obfs is not None
+ line = line.strip()
+ match = PROGUARD_METHOD_MAPPING_RE.search(line)
+ # check if is a method mapping (we ignore field mappings)
+ if match is not None:
+ # check if this line is an inlining by reading ahead 1 line.
+ if index + 1 < len(proguard_mapping_lines):
+ next_match = PROGUARD_METHOD_MAPPING_RE.search(
+ proguard_mapping_lines[index+1].strip())
+ if (next_match and match.group('line_start') is not None
+ and next_match.group('line_start') == match.group('line_start')
+ and next_match.group('line_end') == match.group('line_end')):
+ continue # This is an inlining, skip
+
+ original_method = Method(
+ match.group('original_method_name'),
+ _ToTypeDescriptor(
+ match.group('original_method_class') or current_class_orig),
+ _DotNotationListToTypeDescriptorList(match.group('params')),
+ _ToTypeDescriptor(match.group('return_type')))
+
+ if match.group('line_start') is not None:
+ obfs_methods = (dex[current_class_obfs.replace('.', '/')]
+ .FindMethodsAtLine(
+ match.group('obfuscated_name'),
+ int(match.group('line_start')),
+ int(match.group('line_end'))))
+
+ if obfs_methods is None:
+ continue
+
+ for obfs_method in obfs_methods:
+ mapping.AddMethodMapping(obfs_method, original_method)
+ reverse_mapping.AddMethodMapping(original_method, obfs_method)
+ else:
+ to_be_obfuscated.append(
+ (original_method, match.group('obfuscated_name')))
+
+ for original_method, obfuscated_name in to_be_obfuscated:
+ obfuscated_method = Method(
+ obfuscated_name,
+ reverse_mapping.GetClassMapping(original_method.class_name),
+ reverse_mapping.MapTypeDescriptorList(original_method.param_types),
+ reverse_mapping.MapTypeDescriptor(original_method.return_type))
+ mapping.AddMethodMapping(obfuscated_method, original_method)
+ reverse_mapping.AddMethodMapping(original_method, obfuscated_method)
+ return mapping, reverse_mapping
+
+
+def ProcessProfile(input_profile, proguard_mapping):
+ """Parses an android profile and uses the proguard mapping to (de)obfuscate it
+
+ This takes the android profile lines and for each method or class for the
+ profile, it uses the mapping to either obfuscate or deobfuscate (based on the
+ provided mapping) and returns a Profile object that stores this information.
+
+ Args:
+ input_profile: array of lines of the input profile
+ proguard_mapping: a proguard mapping that would map from the classes and
+ methods in the input profile to the classes and methods
+ that should be in the output profile.
+
+ Returns:
+ A Profile object that stores the information (ie list of mapped classes and
+ methods + tags)
+ """
+ profile = Profile()
+ for index, line in enumerate(input_profile):
+ line = line.strip()
+ if line.startswith('L'):
+ profile.AddClass(proguard_mapping.GetClassMapping(line))
+ continue
+ match = PROFILE_METHOD_RE.search(line)
+ if not match:
+ raise MalformedProfileException("Malformed line", index)
+
+ method = Method(
+ match.group('method_name'),
+ match.group('class_name'),
+ match.group('method_params'),
+ match.group('method_return_type'))
+
+ mapped_methods = proguard_mapping.GetMethodMapping(method)
+ if mapped_methods is None:
+ logging.warning('No method matching "%s" has been found in the proguard '
+ 'mapping file', method)
+ continue
+
+ for original_method in mapped_methods:
+ profile.AddMethod(original_method, match.group('tags'))
+
+ return profile
+
+
+def ObfuscateProfile(nonobfuscated_profile, dex_file, proguard_mapping,
+ dexdump_path, output_filename):
+ """Helper method for obfuscating a profile.
+
+ Args:
+ nonobfuscated_profile: a profile with nonobfuscated symbols.
+ dex_file: path to the dex file matching the mapping.
+ proguard_mapping: a mapping from nonobfuscated to obfuscated symbols used
+ in the dex file.
+ dexdump_path: path to the dexdump utility.
+ output_filename: output filename in which to write the obfuscated profile.
+ """
+ dexinfo = ProcessDex(_RunDexDump(dexdump_path, dex_file))
+ _, reverse_mapping = ProcessProguardMapping(
+ _ReadFile(proguard_mapping), dexinfo)
+ obfuscated_profile = ProcessProfile(
+ _ReadFile(nonobfuscated_profile), reverse_mapping)
+ obfuscated_profile.WriteToFile(output_filename)
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--dexdump-path',
+ required=True,
+ help='Path to dexdump binary.')
+ parser.add_argument(
+ '--dex-path',
+ required=True,
+ help='Path to dex file corresponding to the proguard mapping file.')
+ parser.add_argument(
+ '--proguard-mapping-path',
+ required=True,
+ help='Path to input proguard mapping file corresponding to the dex file.')
+ parser.add_argument(
+ '--output-profile-path',
+ required=True,
+ help='Path to output profile.')
+ parser.add_argument(
+ '--input-profile-path',
+ required=True,
+ help='Path to output profile.')
+ parser.add_argument(
+ '--verbose',
+ action='store_true',
+ default=False,
+ help='Print verbose output.')
+ obfuscation = parser.add_mutually_exclusive_group(required=True)
+ obfuscation.add_argument('--obfuscate', action='store_true',
+ help='Indicates to output an obfuscated profile given a deobfuscated '
+ 'one.')
+ obfuscation.add_argument('--deobfuscate', dest='obfuscate',
+ action='store_false', help='Indicates to output a deobfuscated profile '
+ 'given an obfuscated one.')
+ options = parser.parse_args(args)
+
+ if options.verbose:
+ log_level = logging.WARNING
+ else:
+ log_level = logging.ERROR
+ logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level)
+
+ dex = ProcessDex(_RunDexDump(options.dexdump_path, options.dex_path))
+ proguard_mapping, reverse_proguard_mapping = ProcessProguardMapping(
+ _ReadFile(options.proguard_mapping_path), dex)
+ if options.obfuscate:
+ profile = ProcessProfile(
+ _ReadFile(options.input_profile_path),
+ reverse_proguard_mapping)
+ else:
+ profile = ProcessProfile(
+ _ReadFile(options.input_profile_path),
+ proguard_mapping)
+ profile.WriteToFile(options.output_profile_path)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/convert_dex_profile_tests.py b/deps/v8/build/android/convert_dex_profile_tests.py
new file mode 100644
index 0000000000..0ddc5ce4a1
--- /dev/null
+++ b/deps/v8/build/android/convert_dex_profile_tests.py
@@ -0,0 +1,276 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for convert_dex_profile.
+
+Can be run from build/android/:
+ $ cd build/android
+ $ python convert_dex_profile_tests.py
+"""
+
+import os
+import sys
+import tempfile
+import unittest
+
+import convert_dex_profile as cp
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), 'gyp'))
+from util import build_utils
+
+cp.logging.disable(cp.logging.CRITICAL)
+
+# There are two obfuscations used in the tests below, each with the same
+# unobfuscated profile. The first, corresponding to DEX_DUMP, PROGUARD_MAPPING,
+# and OBFUSCATED_PROFILE, has an ambiguous method a() which is mapped to both
+# getInstance and initialize. The second, corresponding to DEX_DUMP_2,
+# PROGUARD_MAPPING_2 and OBFUSCATED_PROFILE_2, removes the ambiguity.
+
+DEX_DUMP = """
+
+Class descriptor : 'La;'
+ Direct methods -
+ #0 : (in La;)
+ name : '<clinit>'
+ type : '(Ljava/lang/String;)V'
+ code -
+ catches : 1
+ 0x000f - 0x001e
+ <any> -> 0x0093
+ positions :
+ 0x0001 line=310
+ 0x0057 line=313
+ locals :
+ #1 : (in La;)
+ name : '<init>'
+ type : '()V'
+ positions :
+ locals :
+ Virtual methods -
+ #0 : (in La;)
+ name : 'a'
+ type : '(Ljava/lang/String;)I'
+ positions :
+ 0x0000 line=2
+ 0x0003 line=3
+ 0x001b line=8
+ locals :
+ 0x0000 - 0x0021 reg=3 this La;
+ #1 : (in La;)
+ name : 'a'
+ type : '(Ljava/lang/Object;)I'
+ positions :
+ 0x0000 line=8
+ 0x0003 line=9
+ locals :
+ 0x0000 - 0x0021 reg=3 this La;
+ #2 : (in La;)
+ name : 'b'
+ type : '()La;'
+ positions :
+ 0x0000 line=1
+ locals :
+"""
+
+# pylint: disable=line-too-long
+PROGUARD_MAPPING = \
+"""org.chromium.Original -> a:
+ org.chromium.Original sDisplayAndroidManager -> e
+ org.chromium.Original another() -> b
+ 4:4:void inlined():237:237 -> a
+ 4:4:org.chromium.Original getInstance():203 -> a
+ 5:5:void org.chromium.Original$Subclass.<init>(org.chromium.Original,byte):130:130 -> a
+ 5:5:void initialize():237 -> a
+ 5:5:org.chromium.Original getInstance():203 -> a
+ 6:6:void initialize():237:237 -> a
+ 9:9:android.content.Context org.chromium.base.ContextUtils.getApplicationContext():49:49 -> a
+ 9:9:android.content.Context getContext():219 -> a
+ 9:9:void initialize():245 -> a
+ 9:9:org.chromium.Original getInstance():203 -> a"""
+
+OBFUSCATED_PROFILE = \
+"""La;
+PLa;->b()La;
+SLa;->a(Ljava/lang/Object;)I
+HPLa;->a(Ljava/lang/String;)I"""
+
+DEX_DUMP_2 = """
+
+Class descriptor : 'La;'
+ Direct methods -
+ #0 : (in La;)
+ name : '<clinit>'
+ type : '(Ljava/lang/String;)V'
+ code -
+ catches : 1
+ 0x000f - 0x001e
+ <any> -> 0x0093
+ positions :
+ 0x0001 line=310
+ 0x0057 line=313
+ locals :
+ #1 : (in La;)
+ name : '<init>'
+ type : '()V'
+ positions :
+ locals :
+ Virtual methods -
+ #0 : (in La;)
+ name : 'a'
+ type : '(Ljava/lang/String;)I'
+ positions :
+ 0x0000 line=2
+ 0x0003 line=3
+ 0x001b line=8
+ locals :
+ 0x0000 - 0x0021 reg=3 this La;
+ #1 : (in La;)
+ name : 'c'
+ type : '(Ljava/lang/Object;)I'
+ positions :
+ 0x0000 line=8
+ 0x0003 line=9
+ locals :
+ 0x0000 - 0x0021 reg=3 this La;
+ #2 : (in La;)
+ name : 'b'
+ type : '()La;'
+ positions :
+ 0x0000 line=1
+ locals :
+"""
+
+# pylint: disable=line-too-long
+PROGUARD_MAPPING_2 = \
+"""org.chromium.Original -> a:
+ org.chromium.Original sDisplayAndroidManager -> e
+ org.chromium.Original another() -> b
+ void initialize() -> c
+ org.chromium.Original getInstance():203 -> a
+ 4:4:void inlined():237:237 -> a"""
+
+OBFUSCATED_PROFILE_2 = \
+"""La;
+PLa;->b()La;
+HPSLa;->a()La;
+HPLa;->c()V"""
+
+UNOBFUSCATED_PROFILE = \
+"""Lorg/chromium/Original;
+PLorg/chromium/Original;->another()Lorg/chromium/Original;
+HPSLorg/chromium/Original;->getInstance()Lorg/chromium/Original;
+HPLorg/chromium/Original;->initialize()V"""
+
+class GenerateProfileTests(unittest.TestCase):
+ def testProcessDex(self):
+ dex = cp.ProcessDex(DEX_DUMP.splitlines())
+ self.assertIsNotNone(dex['a'])
+
+ self.assertEquals(len(dex['a'].FindMethodsAtLine('<clinit>', 311, 313)), 1)
+ self.assertEquals(len(dex['a'].FindMethodsAtLine('<clinit>', 309, 315)), 1)
+ clinit = dex['a'].FindMethodsAtLine('<clinit>', 311, 313)[0]
+ self.assertEquals(clinit.name, '<clinit>')
+ self.assertEquals(clinit.return_type, 'V')
+ self.assertEquals(clinit.param_types, 'Ljava/lang/String;')
+
+ self.assertEquals(len(dex['a'].FindMethodsAtLine('a', 8, None)), 2)
+ self.assertIsNone(dex['a'].FindMethodsAtLine('a', 100, None))
+
+# pylint: disable=protected-access
+ def testProcessProguardMapping(self):
+ dex = cp.ProcessDex(DEX_DUMP.splitlines())
+ mapping, reverse = cp.ProcessProguardMapping(
+ PROGUARD_MAPPING.splitlines(), dex)
+
+ self.assertEquals('La;', reverse.GetClassMapping('Lorg/chromium/Original;'))
+
+ getInstance = cp.Method(
+ 'getInstance', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+ initialize = cp.Method('initialize', 'Lorg/chromium/Original;', '', 'V')
+ another = cp.Method(
+ 'another', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+ subclassInit = cp.Method(
+ '<init>', 'Lorg/chromium/Original$Subclass;',
+ 'Lorg/chromium/Original;B', 'V')
+
+ mapped = mapping.GetMethodMapping(
+ cp.Method('a', 'La;', 'Ljava/lang/String;', 'I'))
+ self.assertEquals(len(mapped), 2)
+ self.assertIn(getInstance, mapped)
+ self.assertNotIn(subclassInit, mapped)
+ self.assertNotIn(
+ cp.Method('inlined', 'Lorg/chromium/Original;', '', 'V'), mapped)
+ self.assertIn(initialize, mapped)
+
+ mapped = mapping.GetMethodMapping(
+ cp.Method('a', 'La;', 'Ljava/lang/Object;', 'I'))
+ self.assertEquals(len(mapped), 1)
+ self.assertIn(getInstance, mapped)
+
+ mapped = mapping.GetMethodMapping(cp.Method('b', 'La;', '', 'La;'))
+ self.assertEquals(len(mapped), 1)
+ self.assertIn(another, mapped)
+
+ for from_method, to_methods in mapping._method_mapping.iteritems():
+ for to_method in to_methods:
+ self.assertIn(from_method, reverse.GetMethodMapping(to_method))
+ for from_class, to_class in mapping._class_mapping.iteritems():
+ self.assertEquals(from_class, reverse.GetClassMapping(to_class))
+
+ def testProcessProfile(self):
+ dex = cp.ProcessDex(DEX_DUMP.splitlines())
+ mapping, _ = cp.ProcessProguardMapping(PROGUARD_MAPPING.splitlines(), dex)
+ profile = cp.ProcessProfile(OBFUSCATED_PROFILE.splitlines(), mapping)
+
+ getInstance = cp.Method(
+ 'getInstance', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+ initialize = cp.Method('initialize', 'Lorg/chromium/Original;', '', 'V')
+ another = cp.Method(
+ 'another', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+
+ self.assertIn('Lorg/chromium/Original;', profile._classes)
+ self.assertIn(getInstance, profile._methods)
+ self.assertIn(initialize, profile._methods)
+ self.assertIn(another, profile._methods)
+
+ self.assertEquals(profile._methods[getInstance], set(['H', 'S', 'P']))
+ self.assertEquals(profile._methods[initialize], set(['H', 'P']))
+ self.assertEquals(profile._methods[another], set(['P']))
+
+ def testEndToEnd(self):
+ dex = cp.ProcessDex(DEX_DUMP.splitlines())
+ mapping, _ = cp.ProcessProguardMapping(PROGUARD_MAPPING.splitlines(), dex)
+
+ profile = cp.ProcessProfile(OBFUSCATED_PROFILE.splitlines(), mapping)
+ with tempfile.NamedTemporaryFile() as temp:
+ profile.WriteToFile(temp.name)
+ with open(temp.name, 'r') as f:
+ for a, b in zip(sorted(f), sorted(UNOBFUSCATED_PROFILE.splitlines())):
+ self.assertEquals(a.strip(), b.strip())
+
+ def testObfuscateProfile(self):
+ with build_utils.TempDir() as temp_dir:
+ # The dex dump is used as the dexfile, by passing /bin/cat as the dexdump
+ # program.
+ dex_path = os.path.join(temp_dir, 'dexdump')
+ with open(dex_path, 'w') as dex_file:
+ dex_file.write(DEX_DUMP_2)
+ mapping_path = os.path.join(temp_dir, 'mapping')
+ with open(mapping_path, 'w') as mapping_file:
+ mapping_file.write(PROGUARD_MAPPING_2)
+ unobfuscated_path = os.path.join(temp_dir, 'unobfuscated')
+ with open(unobfuscated_path, 'w') as unobfuscated_file:
+ unobfuscated_file.write(UNOBFUSCATED_PROFILE)
+ obfuscated_path = os.path.join(temp_dir, 'obfuscated')
+ cp.ObfuscateProfile(unobfuscated_path, dex_path, mapping_path, '/bin/cat',
+ obfuscated_path)
+ with open(obfuscated_path) as obfuscated_file:
+ obfuscated_profile = sorted(obfuscated_file.readlines())
+ for a, b in zip(
+ sorted(OBFUSCATED_PROFILE_2.splitlines()), obfuscated_profile):
+ self.assertEquals(a.strip(), b.strip())
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/devil_chromium.json b/deps/v8/build/android/devil_chromium.json
new file mode 100644
index 0000000000..6cb7608c9e
--- /dev/null
+++ b/deps/v8/build/android/devil_chromium.json
@@ -0,0 +1,130 @@
+{
+ "config_type": "BaseConfig",
+ "dependencies": {
+ "aapt": {
+ "file_info": {
+ "linux2_x86_64": {
+ "local_paths": [
+ "../../third_party/android_sdk/public/build-tools/27.0.3/aapt"
+ ]
+ }
+ }
+ },
+ "adb": {
+ "file_info": {
+ "linux2_x86_64": {
+ "local_paths": [
+ "../../third_party/android_sdk/public/platform-tools/adb"
+ ]
+ }
+ }
+ },
+ "android_build_tools_libc++": {
+ "file_info": {
+ "linux2_x86_64": {
+ "local_paths": [
+ "../../third_party/android_sdk/public/build-tools/27.0.3/lib64/libc++.so"
+ ]
+ }
+ }
+ },
+ "android_sdk": {
+ "file_info": {
+ "linux2_x86_64": {
+ "local_paths": [
+ "../../third_party/android_sdk/public"
+ ]
+ }
+ }
+ },
+ "dexdump": {
+ "file_info": {
+ "linux2_x86_64": {
+ "local_paths": [
+ "../../third_party/android_sdk/public/build-tools/27.0.3/dexdump"
+ ]
+ }
+ }
+ },
+ "split-select": {
+ "file_info": {
+ "linux2_x86_64": {
+ "local_paths": [
+ "../../third_party/android_sdk/public/build-tools/27.0.3/split-select"
+ ]
+ }
+ }
+ },
+ "pymock": {
+ "file_info": {
+ "darwin_x86_64": {
+ "local_paths": [
+ "../../third_party/pymock"
+ ]
+ },
+ "linux2_x86_64": {
+ "local_paths": [
+ "../../third_party/pymock"
+ ]
+ },
+ "win32_AMD64": {
+ "local_paths": [
+ "../../third_party/pymock"
+ ]
+ }
+ }
+ },
+ "simpleperf": {
+ "file_info": {
+ "android_armeabi-v7a": {
+ "local_paths": [
+ "../../third_party/android_ndk/simpleperf/bin/android/arm/simpleperf"
+ ]
+ },
+ "android_arm64-v8a": {
+ "local_paths": [
+ "../../third_party/android_ndk/simpleperf/bin/android/arm64/simpleperf"
+ ]
+ },
+ "android_x86": {
+ "local_paths": [
+ "../../third_party/android_ndk/simpleperf/bin/android/x86/simpleperf"
+ ]
+ },
+ "android_x86_64": {
+ "local_paths": [
+ "../../third_party/android_ndk/simpleperf/bin/android/x86_64/simpleperf"
+ ]
+ },
+ "linux_x86": {
+ "local_paths": [
+ "../../third_party/android_ndk/simpleperf/bin/linux/x86/simpleperf"
+ ]
+ },
+ "linux_x86_64": {
+ "local_paths": [
+ "../../third_party/android_ndk/simpleperf/bin/linux/x86_64/simpleperf"
+ ]
+ }
+ }
+ },
+ "simpleperf_scripts": {
+ "file_info": {
+ "default": {
+ "local_paths": [
+ "../../third_party/android_ndk/simpleperf"
+ ]
+ }
+ }
+ },
+ "llvm-symbolizer": {
+ "file_info": {
+ "default": {
+ "local_paths": [
+ "../../third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer"
+ ]
+ }
+ }
+ }
+ }
+}
diff --git a/deps/v8/build/android/devil_chromium.py b/deps/v8/build/android/devil_chromium.py
new file mode 100644
index 0000000000..d42402e40a
--- /dev/null
+++ b/deps/v8/build/android/devil_chromium.py
@@ -0,0 +1,170 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configures devil for use in chromium."""
+
+import os
+import sys
+
+from pylib.constants import host_paths
+
+if host_paths.DEVIL_PATH not in sys.path:
+ sys.path.append(host_paths.DEVIL_PATH)
+
+from devil import devil_env
+from devil.android.ndk import abis
+
+_DEVIL_CONFIG = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), 'devil_chromium.json'))
+
+_DEVIL_BUILD_PRODUCT_DEPS = {
+ 'chromium_commands': [
+ {
+ 'platform': 'linux2',
+ 'arch': 'x86_64',
+ 'path_components': ['lib.java', 'chromium_commands.dex.jar'],
+ }
+ ],
+ 'forwarder_device': [
+ {
+ 'platform': 'android',
+ 'arch': abis.ARM,
+ 'path_components': ['forwarder_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': abis.ARM_64,
+ 'path_components': ['forwarder_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': 'mips',
+ 'path_components': ['forwarder_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': 'mips64',
+ 'path_components': ['forwarder_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': abis.X86,
+ 'path_components': ['forwarder_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': abis.X86_64,
+ 'path_components': ['forwarder_dist'],
+ },
+ ],
+ 'forwarder_host': [
+ {
+ 'platform': 'linux2',
+ 'arch': 'x86_64',
+ 'path_components': ['host_forwarder'],
+ },
+ ],
+ 'md5sum_device': [
+ {
+ 'platform': 'android',
+ 'arch': abis.ARM,
+ 'path_components': ['md5sum_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': abis.ARM_64,
+ 'path_components': ['md5sum_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': 'mips',
+ 'path_components': ['md5sum_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': 'mips64',
+ 'path_components': ['md5sum_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': abis.X86,
+ 'path_components': ['md5sum_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': abis.X86_64,
+ 'path_components': ['md5sum_dist'],
+ },
+ ],
+ 'md5sum_host': [
+ {
+ 'platform': 'linux2',
+ 'arch': 'x86_64',
+ 'path_components': ['md5sum_bin_host'],
+ },
+ ],
+}
+
+
+def Initialize(output_directory=None, custom_deps=None, adb_path=None):
+ """Initializes devil with chromium's binaries and third-party libraries.
+
+ This includes:
+ - Libraries:
+ - the android SDK ("android_sdk")
+ - pymock ("pymock")
+ - Build products:
+ - host & device forwarder binaries
+ ("forwarder_device" and "forwarder_host")
+ - host & device md5sum binaries ("md5sum_device" and "md5sum_host")
+
+ Args:
+ output_directory: An optional path to the output directory. If not set,
+ no built dependencies are configured.
+ custom_deps: An optional dictionary specifying custom dependencies.
+ This should be of the form:
+
+ {
+ 'dependency_name': {
+ 'platform': 'path',
+ ...
+ },
+ ...
+ }
+ """
+
+ devil_dynamic_config = {
+ 'config_type': 'BaseConfig',
+ 'dependencies': {},
+ }
+ if output_directory:
+ output_directory = os.path.abspath(output_directory)
+ devil_dynamic_config['dependencies'] = {
+ dep_name: {
+ 'file_info': {
+ '%s_%s' % (dep_config['platform'], dep_config['arch']): {
+ 'local_paths': [
+ os.path.join(output_directory, *dep_config['path_components']),
+ ],
+ }
+ for dep_config in dep_configs
+ }
+ }
+ for dep_name, dep_configs in _DEVIL_BUILD_PRODUCT_DEPS.iteritems()
+ }
+ if custom_deps:
+ devil_dynamic_config['dependencies'].update(custom_deps)
+ if adb_path:
+ devil_dynamic_config['dependencies'].update({
+ 'adb': {
+ 'file_info': {
+ devil_env.GetPlatform(): {
+ 'local_paths': [adb_path]
+ }
+ }
+ }
+ })
+
+ devil_env.config.Initialize(
+ configs=[devil_dynamic_config], config_files=[_DEVIL_CONFIG])
diff --git a/deps/v8/build/android/devil_chromium.pydeps b/deps/v8/build/android/devil_chromium.pydeps
new file mode 100644
index 0000000000..ea8f0c2f8a
--- /dev/null
+++ b/deps/v8/build/android/devil_chromium.pydeps
@@ -0,0 +1,38 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android --output build/android/devil_chromium.pydeps build/android/devil_chromium.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/android/__init__.py
+../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/constants/__init__.py
+../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../third_party/catapult/devil/devil/devil_env.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/third_party/zipfile/zipfile_2_7_13.py
+devil_chromium.py
+pylib/__init__.py
+pylib/constants/__init__.py
+pylib/constants/host_paths.py
diff --git a/deps/v8/build/android/diff_resource_sizes.py b/deps/v8/build/android/diff_resource_sizes.py
new file mode 100755
index 0000000000..8066844fdd
--- /dev/null
+++ b/deps/v8/build/android/diff_resource_sizes.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs resource_sizes.py on two apks and outputs the diff."""
+
+import argparse
+import json
+import logging
+import os
+import subprocess
+import sys
+
+from pylib.constants import host_paths
+from pylib.utils import shared_preference_utils
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+ import perf_tests_results_helper # pylint: disable=import-error
+
+with host_paths.SysPath(host_paths.TRACING_PATH):
+ from tracing.value import convert_chart_json # pylint: disable=import-error
+
+_ANDROID_DIR = os.path.dirname(os.path.abspath(__file__))
+with host_paths.SysPath(os.path.join(_ANDROID_DIR, 'gyp')):
+ from util import build_utils # pylint: disable=import-error
+
+
+_BASE_CHART = {
+ 'format_version': '0.1',
+ 'benchmark_name': 'resource_sizes_diff',
+ 'benchmark_description': 'APK resource size diff information',
+ 'trace_rerun_options': [],
+ 'charts': {},
+}
+
+_CHARTJSON_FILENAME = 'results-chart.json'
+_HISTOGRAMS_FILENAME = 'perf_results.json'
+
+
+def DiffResults(chartjson, base_results, diff_results):
+ """Reports the diff between the two given results.
+
+ Args:
+ chartjson: A dictionary that chartjson results will be placed in, or None
+ to only print results.
+ base_results: The chartjson-formatted size results of the base APK.
+ diff_results: The chartjson-formatted size results of the diff APK.
+ """
+ for graph_title, graph in base_results['charts'].iteritems():
+ for trace_title, trace in graph.iteritems():
+ perf_tests_results_helper.ReportPerfResult(
+ chartjson, graph_title, trace_title,
+ diff_results['charts'][graph_title][trace_title]['value']
+ - trace['value'],
+ trace['units'], trace['improvement_direction'],
+ trace['important'])
+
+
+def AddIntermediateResults(chartjson, base_results, diff_results):
+ """Copies the intermediate size results into the output chartjson.
+
+ Args:
+ chartjson: A dictionary that chartjson results will be placed in.
+ base_results: The chartjson-formatted size results of the base APK.
+ diff_results: The chartjson-formatted size results of the diff APK.
+ """
+ for graph_title, graph in base_results['charts'].iteritems():
+ for trace_title, trace in graph.iteritems():
+ perf_tests_results_helper.ReportPerfResult(
+ chartjson, graph_title + '_base_apk', trace_title,
+ trace['value'], trace['units'], trace['improvement_direction'],
+ trace['important'])
+
+ # Both base_results and diff_results should have the same charts/traces, but
+ # loop over them separately in case they don't
+ for graph_title, graph in diff_results['charts'].iteritems():
+ for trace_title, trace in graph.iteritems():
+ perf_tests_results_helper.ReportPerfResult(
+ chartjson, graph_title + '_diff_apk', trace_title,
+ trace['value'], trace['units'], trace['improvement_direction'],
+ trace['important'])
+
+
+def _CreateArgparser():
+ def chromium_path(arg):
+ if arg.startswith('//'):
+ return os.path.join(host_paths.DIR_SOURCE_ROOT, arg[2:])
+ return arg
+
+ argparser = argparse.ArgumentParser(
+ description='Diff resource sizes of two APKs. Arguments not listed here '
+ 'will be passed on to both invocations of resource_sizes.py.')
+ argparser.add_argument('--chromium-output-directory-base',
+ dest='out_dir_base',
+ type=chromium_path,
+ help='Location of the build artifacts for the base '
+ 'APK, i.e. what the size increase/decrease will '
+ 'be measured from.')
+ argparser.add_argument('--chromium-output-directory-diff',
+ dest='out_dir_diff',
+ type=chromium_path,
+ help='Location of the build artifacts for the diff '
+ 'APK.')
+ argparser.add_argument('--chartjson',
+ action='store_true',
+ help='DEPRECATED. Use --output-format=chartjson '
+ 'instead.')
+ argparser.add_argument('--output-format',
+ choices=['chartjson', 'histograms'],
+ help='Output the results to a file in the given '
+ 'format instead of printing the results.')
+ argparser.add_argument('--include-intermediate-results',
+ action='store_true',
+ help='Include the results from the resource_sizes.py '
+ 'runs in the chartjson output.')
+ argparser.add_argument('--output-dir',
+ default='.',
+ type=chromium_path,
+ help='Directory to save chartjson to.')
+ argparser.add_argument('--base-apk',
+ required=True,
+ type=chromium_path,
+ help='Path to the base APK, i.e. what the size '
+ 'increase/decrease will be measured from.')
+ argparser.add_argument('--diff-apk',
+ required=True,
+ type=chromium_path,
+ help='Path to the diff APK, i.e. the APK whose size '
+ 'increase/decrease will be measured against the '
+ 'base APK.')
+ return argparser
+
+
+def main():
+ args, unknown_args = _CreateArgparser().parse_known_args()
+ # TODO(bsheedy): Remove this once all uses of --chartjson are removed.
+ if args.chartjson:
+ args.output_format = 'chartjson'
+
+ chartjson = _BASE_CHART.copy() if args.output_format else None
+
+ with build_utils.TempDir() as base_dir, build_utils.TempDir() as diff_dir:
+ # Run resource_sizes.py on the two APKs
+ resource_sizes_path = os.path.join(_ANDROID_DIR, 'resource_sizes.py')
+ shared_args = (['python', resource_sizes_path, '--output-format=chartjson']
+ + unknown_args)
+
+ base_args = shared_args + ['--output-dir', base_dir, args.base_apk]
+ if args.out_dir_base:
+ base_args += ['--chromium-output-directory', args.out_dir_base]
+ try:
+ subprocess.check_output(base_args, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ print e.output
+ raise
+
+ diff_args = shared_args + ['--output-dir', diff_dir, args.diff_apk]
+ if args.out_dir_diff:
+ diff_args += ['--chromium-output-directory', args.out_dir_diff]
+ try:
+ subprocess.check_output(diff_args, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ print e.output
+ raise
+
+ # Combine the separate results
+ base_file = os.path.join(base_dir, _CHARTJSON_FILENAME)
+ diff_file = os.path.join(diff_dir, _CHARTJSON_FILENAME)
+ base_results = shared_preference_utils.ExtractSettingsFromJson(base_file)
+ diff_results = shared_preference_utils.ExtractSettingsFromJson(diff_file)
+ DiffResults(chartjson, base_results, diff_results)
+ if args.include_intermediate_results:
+ AddIntermediateResults(chartjson, base_results, diff_results)
+
+ if args.output_format:
+ chartjson_path = os.path.join(os.path.abspath(args.output_dir),
+ _CHARTJSON_FILENAME)
+ logging.critical('Dumping diff chartjson to %s', chartjson_path)
+ with open(chartjson_path, 'w') as outfile:
+ json.dump(chartjson, outfile)
+
+ if args.output_format == 'histograms':
+ histogram_result = convert_chart_json.ConvertChartJson(chartjson_path)
+ if histogram_result.returncode != 0:
+ logging.error('chartjson conversion failed with error: %s',
+ histogram_result.stdout)
+ return 1
+
+ histogram_path = os.path.join(os.path.abspath(args.output_dir),
+ 'perf_results.json')
+ logging.critical('Dumping diff histograms to %s', histogram_path)
+ with open(histogram_path, 'w') as json_file:
+ json_file.write(histogram_result.stdout)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/docs/README.md b/deps/v8/build/android/docs/README.md
new file mode 100644
index 0000000000..b6f0a6e9c2
--- /dev/null
+++ b/deps/v8/build/android/docs/README.md
@@ -0,0 +1,11 @@
+# Android Build Docs
+
+* [android_app_bundles.md](android_app_bundles.md)
+* [build_config.md](build_config.md)
+* [coverage.md](coverage.md)
+* [lint.md](lint.md)
+* [life_of_a_resource.md](life_of_a_resource.md)
+* [../incremental_install/README.md](../incremental_install/README.md)
+
+See also:
+* [//build/README.md](../../README.md)
diff --git a/deps/v8/build/android/docs/android_app_bundles.md b/deps/v8/build/android/docs/android_app_bundles.md
new file mode 100644
index 0000000000..8934477195
--- /dev/null
+++ b/deps/v8/build/android/docs/android_app_bundles.md
@@ -0,0 +1,210 @@
+# Introduction
+
+This document describes how the Chromium build system supports Android app
+bundles.
+
+[TOC]
+
+# Overview of app bundles
+
+An Android app bundle is an alternative application distribution format for
+Android applications on the Google Play Store, that allows reducing the size
+of binaries sent for installation to individual devices that run on Android L
+and beyond. For more information about them, see the official Android
+[documentation](https://developer.android.com/guide/app-bundle/).
+
+For the context of this document, the most important points are:
+
+ - Unlike a regular APK (e.g. `foo.apk`), the bundle (e.g. `foo.aab`) cannot
+ be installed directly on a device.
+
+ - Instead, it must be processed into a set of installable split APKs, which
+ are stored inside a special zip archive (e.g. `foo.apks`).
+
+ - The splitting can be based on various criteria: e.g. language or screen
+ density for resources, or cpu ABI for native code.
+
+ - The bundle also uses the notion of dynamic features modules (DFMs) to
+ separate several application features. Each module has its own code, assets
+ and resources, and can be installed separately from the rest of the
+ application if needed.
+
+ - The main application itself is stored in the '`base`' module (this name
+ cannot be changed).
+
+
+# Declaring app bundles with GN templates
+
+Here's an example that shows how to declare a simple bundle that contains a
+single base module, which enables language-based splits:
+
+```gn
+
+ # First declare the first bundle module. The base module is the one
+ # that contains the main application's code, resources and assets.
+ android_app_bundle_module("foo_base_module") {
+ # Declaration are similar to android_apk here.
+ ...
+ }
+
+ # Second, declare the bundle itself.
+ android_app_bundle("foo_bundle") {
+ # Indicate the base module to use for this bundle
+ base_module_target = ":foo_base_module"
+
+ # The name of our bundle file (without any suffix). Default would
+ # be 'foo_bundle' otherwise.
+ bundle_name = "FooBundle"
+
+ # Signing your bundle is required to upload it to the Play Store
+ # but since signing is very slow, avoid doing it for non official
+ # builds. Signing the bundle is not required for local testing.
+ sign_bundle = is_official_build
+
+ # Enable language-based splits for this bundle. Which means that
+ # resources and assets specific to a given language will be placed
+ # into their own split APK in the final .apks archive.
+ enable_language_splits = true
+
+ # Proguard settings must be passed at the bundle, not module, target.
+ proguard_enabled = !is_java_debug
+ }
+```
+
+When generating the `foo_bundle` target with Ninja, you will end up with
+the following:
+
+ - The bundle file under `out/Release/apks/FooBundle.aab`
+
+ - A helper script called `out/Release/bin/foo_bundle`, which can be used
+ to install / launch / uninstall the bundle on local devices.
+
+ This works like an APK wrapper script (e.g. `foo_apk`). Use `--help`
+ to see all possible commands supported by the script.
+
+
+# Declaring dynamic feature modules with GN templates
+
+Please see
+[Dynamic Feature Modules](../../../docs/android_dynamic_feature_modules.md) for
+more details. In short, if you need more modules besides the base one, you
+will need to list all the extra ones using the extra_modules variable which
+takes a list of GN scopes, as in:
+
+```gn
+
+ android_app_bundle_module("foo_base_module") {
+ ...
+ }
+
+ android_app_bundle_module("foo_extra_module") {
+ ...
+ }
+
+ android_app_bundle("foo_bundle") {
+ base_module_target = ":foo_base_module"
+
+ extra_modules = [
+ { # NOTE: Scopes require one field per line, and no comma separators.
+ name = "my_module"
+ module_target = ":foo_extra_module"
+ }
+ ]
+
+ ...
+ }
+```
+
+Note that each extra module is identified by a unique name, which cannot
+be '`base`'.
+
+
+# Bundle signature issues
+
+Signing an app bundle is not necessary, unless you want to upload it to the
+Play Store. Since this process is very slow (it uses `jarsigner` instead of
+the much faster `apkbuilder`), you can control it with the `sign_bundle`
+variable, as described in the example above.
+
+The `.apks` archive however always contains signed split APKs. The keystore
+path/password/alias being used are the default ones, unless you use custom
+values when declaring the bundle itself, as in:
+
+```gn
+ android_app_bundle("foo_bundle") {
+ ...
+ keystore_path = "//path/to/keystore"
+ keystore_password = "K3y$t0Re-Pa$$w0rd"
+ keystore_name = "my-signing-key-name"
+ }
+```
+
+These values are not stored in the bundle itself, but in the wrapper script,
+which will use them to generate the `.apks` archive for you. This allows you
+to properly install updates on top of existing applications on any device.
+
+
+# Proguard and bundles
+
+When using an app bundle that is made of several modules, it is crucial to
+ensure that proguard, if enabled:
+
+- Keeps the obfuscated class names used by each module consistent.
+- Does not remove classes that are not used in one module, but referenced
+ by others.
+
+To achieve this, a special scheme called *synchronized proguarding* is
+performed, which consists of the following steps:
+
+- The list of unoptimized .jar files from all modules are sent to a single
+ proguard command. This generates a new temporary optimized *group* .jar file.
+
+- Each module extracts the optimized class files from the optimized *group*
+ .jar file, to generate its own, module-specific, optimized .jar.
+
+- Each module-specific optimized .jar is then sent to dex generation.
+
+This synchronized proguarding step is added by the `android_app_bundle()` GN
+template. In practice this means the following:
+
+ - If `proguard_enabled` and `proguard_jar_path` must be passed to
+ `android_app_bundle` targets, but not to `android_app_bundle_module` ones.
+
+ - `proguard_configs` can be still passed to individual modules, just
+ like regular APKs. All proguard configs will be merged during the
+ synchronized proguard step.
+
+
+# Manual generation and installation of .apks archives
+
+Note that the `foo_bundle` script knows how to generate the .apks archive
+from the bundle file, and install it to local devices for you. For example,
+to install and launch a bundle, use:
+
+```sh
+ out/Release/bin/foo_bundle run
+```
+
+If you want to manually look or use the `.apks` archive, use the following
+command to generate it:
+
+```sh
+ out/Release/bin/foo_bundle build-bundle-apks \
+ --output-apks=/tmp/BundleFoo.apks
+```
+
+All split APKs within the archive will be properly signed. And you will be
+able to look at its content (with `unzip -l`), or install it manually with:
+
+```sh
+ build/android/gyp/bundletool.py install-apks \
+ --apks=/tmp/BundleFoo.apks \
+ --adb=$(which adb)
+```
+
+The task of examining the manifest is simplified by running the following,
+which dumps the application manifest as XML to stdout:
+
+```sh
+ build/android/gyp/bundletool.py dump-manifest
+```
diff --git a/deps/v8/build/android/docs/build_config.md b/deps/v8/build/android/docs/build_config.md
new file mode 100644
index 0000000000..74af651af7
--- /dev/null
+++ b/deps/v8/build/android/docs/build_config.md
@@ -0,0 +1,170 @@
+# Introduction
+
+This document describes the `.build_config` files that are used by the
+Chromium build system for Android-specific targets like APK, resources,
+and more.
+
+[TOC]
+
+# I. Overview of .build_config files:
+
+The Android build requires performing computations about dependencies in
+various targets, which are not possible with the GN build language. To address
+this, `.build_config` files are written during the build to store the needed
+per-target information as JSON files.
+
+They are always written to `$target_gen_dir/${target_name}.build_config`.
+
+Many scripts under [`build/android/gyp/`](build/android_gyp/), which are used
+during the build, can also accept parameter arguments using
+`@FileArg references`, which look like:
+
+ --some-param=@FileArg(<filename>:<key1>:<key2>:..<keyN>)
+
+This placeholder will ensure that `<filename>` is read as a JSON file, then
+return the value at `[key1][key2]...[keyN]` for the `--some-param` option.
+
+Apart from that, the scripts do not need to know anything about the structure
+of `.build_config` files (but the GN rules that invoke them do and select
+which `@FileArg()` references to use).
+
+For a concrete example, consider the following GN fragment:
+
+```gn
+# From //ui/android/BUILD.gn:
+android_resources("ui_java_resources") {
+ custom_package = "org.chromium.ui"
+ resource_dirs = [ "java/res" ]
+ deps = [
+ ":ui_strings_grd",
+ ]
+}
+```
+
+This will end up generating the following JSON file under
+`$CHROMIUM_OUTPUT_DIR/gen/ui/android/ui_java_resources.build_config`:
+
+```json
+{
+ "deps_info": {
+ "deps_configs": [
+ "gen/ui/android/ui_strings_grd.build_config"
+ ],
+ "name": "ui_java_resources.build_config",
+ "package_name": "org.chromium.ui",
+ "path": "gen/ui/android/ui_java_resources.build_config",
+ "r_text": "gen/ui/android/ui_java_resources_R.txt",
+ "resources_dirs": [
+ "../../ui/android/java/res"
+ ],
+ "resources_zip": "resource_zips/ui/android/ui_java_resources.resources.zip",
+ "srcjar": "gen/ui/android/ui_java_resources.srcjar",
+ "type": "android_resources"
+ },
+ "gradle": {},
+ "resources": {
+ "dependency_zips": [
+ "resource_zips/ui/android/ui_strings_grd.resources.zip"
+ ],
+ "extra_package_names": [],
+ "extra_r_text_files": []
+ }
+}
+```
+
+NOTE: All path values in `.build_config` files are relative to your
+`$CHROMIUM_OUTPUT_DIR`.
+
+# II. Generation of .build_config files:
+
+They are generated by the GN [`write_build_config()`](gn_write_build_config)
+internal template, which ends up invoking
+[`write_build_config.py`](write_build_config_py). For our example above, this
+is with the following parameters:
+
+```
+python ../../build/android/gyp/write_build_config.py \
+ --type=android_resources \
+ --depfile gen/ui/android/ui_java_resources__build_config_crbug_908819.d \
+ --deps-configs=\[\"gen/ui/android/ui_strings_grd.build_config\"\] \
+ --build-config gen/ui/android/ui_java_resources.build_config \
+ --resources-zip resource_zips/ui/android/ui_java_resources.resources.zip \
+ --package-name org.chromium.ui \
+ --r-text gen/ui/android/ui_java_resources_R.txt \
+ --resource-dirs=\[\"../../ui/android/java/res\"\] \
+ --srcjar gen/ui/android/ui_java_resources.srcjar
+```
+
+Note that *most* of the content of the JSON file comes from command-line
+parameters, but not all of it.
+
+In particular, the `resources['dependency_zips']` entry was computed by
+inspecting the content of all dependencies (here, only
+`ui_string_grd.build_config`), and collecting their
+`deps_configs['resources_zip']` values.
+
+Because a target's `.build_config` file will always be generated after
+that of all of its dependencies,
+[`write_build_config.py`](write_build_config_py) can traverse the
+whole (transitive) set of direct *and* indirect dependencies for a given target
+and extract useful information out of it.
+
+This is the kind of processing that cannot be done at the GN language level,
+and is very powerful for Android builds.
+
+
+# III. Usage of .build_config files:
+
+In addition to being parsed by `write_build_config.py`, when they are listed
+in the `--deps-configs` of a given target, the `.build_config` files are used
+by other scripts under [build/android/gyp/] to build stuff.
+
+For example, the GN `android_resources` template uses it to invoke the
+[`process_resources.py`] script with the following command, in order to
+generate various related files (e.g. `ui_java_resources_R.txt`):
+
+```sh
+python ../../build/android/gyp/process_resources.py \
+ --depfile gen/ui/android/ui_java_resources_1.d \
+ --android-sdk-jar ../../third_party/android_sdk/public/platforms/android-28/android.jar \
+ --aapt-path ../../third_party/android_sdk/public/build-tools/27.0.3/aapt \
+ --dependencies-res-zips=@FileArg\(gen/ui/android/ui_java_resources.build_config:resources:dependency_zips\) \
+ --extra-res-packages=@FileArg\(gen/ui/android/ui_java_resources.build_config:resources:extra_package_names\) \
+ --extra-r-text-files=@FileArg\(gen/ui/android/ui_java_resources.build_config:resources:extra_r_text_files\) \
+ --resource-dirs=\[\"../../ui/android/java/res\"\] \
+ --debuggable \
+ --resource-zip-out resource_zips/ui/android/ui_java_resources.resources.zip \
+ --r-text-out gen/ui/android/ui_java_resources_R.txt \
+ --srcjar-out gen/ui/android/ui_java_resources.srcjar \
+ --non-constant-id \
+ --custom-package org.chromium.ui \
+ --shared-resources
+```
+
+Note the use of `@FileArg()` references here, to tell the script where to find
+the information it needs.
+
+
+# IV. Format of .build_config files:
+
+Thanks to `@FileArg()` references, Python build scripts under
+[`build/android/gyp/`](build/android/gyp/) do not need to know anything
+about the internal format of `.build_config` files.
+
+This format is decided between internal GN build rules and
+[`write_build_config.py`][write_build_config_py]. Since these changes rather
+often, the format documentation is kept inside the Python script itself, but
+can be extracted as a Markdown file and visualized with the following commands:
+
+```sh
+# Extract .build_config format documentation
+build/android/gyp/write_build_config.py \
+ --generate-markdown-format-doc > /tmp/format.md
+
+# Launch a browser to visualize the format documentation.
+python tools/md_browser/md_browser.py -d /tmp /tmp/format.md
+```
+
+[build/android/gyp/]: https://chromium.googlesource.com/chromium/src/build/+/master/android/gyp/
+[gn_write_build_config]: https://cs.chromium.org/chromium/src/build/config/android/internal_rules.gni?q=write_build_config&sq=package:chromium
+[write_build_config_py]: https://chromium.googlesource.com/chromium/src/build/+/master/android/gyp/write_build_config.py
diff --git a/deps/v8/build/android/docs/coverage.md b/deps/v8/build/android/docs/coverage.md
new file mode 100644
index 0000000000..14dbef6ac9
--- /dev/null
+++ b/deps/v8/build/android/docs/coverage.md
@@ -0,0 +1,56 @@
+# Android code coverage instructions
+
+These are instructions for collecting code coverage data for android
+instrumentation and junit tests.
+
+[TOC]
+
+## How EMMA coverage works
+
+In order to use EMMA code coverage, we need to create build time **.em** files
+and runtime **.ec** files. Then we need to process them using the
+build/android/generate_emma_html.py script.
+
+## How to collect EMMA coverage data
+
+1. Use the following GN build arguments:
+
+```gn
+target_os = "android"
+emma_coverage = true
+emma_filter = "org.chromium.chrome.browser.ntp.*,-*Test*,-*Fake*,-*Mock*"
+```
+
+The filter syntax is as documented for the [EMMA coverage
+filters](http://emma.sourceforge.net/reference/ch02s06s02.html).
+
+Now when building, **.em** files will be created in the build directory.
+
+2. Run tests, with option `--coverage-dir <directory>`, to specify where to save
+ the .ec file. For example, you can run chrome junit tests:
+ `out/Debug/bin/run_chrome_junit_tests --coverage-dir /tmp/coverage`.
+
+3. Turn off strict mode when running instrumentation tests by adding
+ `--strict-mode=off` because the EMMA code causes strict mode violations by
+ accessing disk.
+
+4. Use a pre-L Android OS (running Dalvik) because code coverage is not
+ supported in ART.
+
+5. The coverage results of junit and instrumentation tests will be merged
+ automatically if they are in the same directory.
+
+6. Now we have both .em and .ec files. We can create a html report using
+ `generate_emma_html.py`, for example:
+
+ ```shell
+ build/android/generate_emma_html.py \
+ --coverage-dir /tmp/coverage/ \
+ --metadata-dir out/Debug/ \
+ --output example.html
+ ```
+ Then an example.html containing coverage info will be created:
+
+ ```
+ EMMA: writing [html] report to [<your_current_directory>/example.html] ...
+ ```
diff --git a/deps/v8/build/android/docs/life_of_a_resource.md b/deps/v8/build/android/docs/life_of_a_resource.md
new file mode 100644
index 0000000000..bd1ffcd994
--- /dev/null
+++ b/deps/v8/build/android/docs/life_of_a_resource.md
@@ -0,0 +1,233 @@
+# Life of an Android Resource
+
+[TOC]
+
+## Overview
+
+This document describes how [Android Resources][android resources]
+are built in Chromium's build system. It does not mention native resources
+which are [processed differently][native resources].
+
+[android resources]: https://developer.android.com/guide/topics/resources/providing-resources
+[native resources]: https://www.chromium.org/developers/tools-we-use-in-chromium/grit/grit-users-guide
+
+The steps consume the following files as inputs:
+* AndroidManifest.xml
+ * Including AndroidManifest.xml files from libraries, which get merged
+ together
+* res/ directories
+
+The steps produce the following intermediate files:
+* R.srcjar (contains R.java files)
+* R.txt
+* .resources.zip
+
+The steps produce the following files within an .apk:
+* AndroidManifest.xml (a binary xml file)
+* resources.arsc (contains all values and configuration metadata)
+* res/** (drawables and layouts)
+* classes.dex (just a small portion of classes from generated R.java files)
+
+
+## The Build Steps
+
+Whenever you try to compile an apk or library target, resources go through the
+following steps:
+
+### 1. Constructs .build\_config files:
+
+Inputs:
+* GN target metadata
+* Other .build_config files
+
+Outputs:
+* Target-specific .build_config file
+
+write_build_config.py is run to record target metadata needed by future steps.
+For more details, see [build_config.md](build_config.md).
+
+
+### 2. Prepares resources:
+
+Inputs:
+* Target-specific build\_config file
+* Target-specific Resource dirs (res/ directories)
+* resources.zip files from dependencies (used to generate the R.txt/java files)
+
+Outputs:
+* Target-specific resources.zip (containing only resources in the
+ target-specific resource dirs, no dependant resources here).
+* Target-specific R.txt
+ * Contains a list of resources and their ids (including of dependencies).
+* Target-specific R.java .srcjar
+ * See [What are R.java files and how are they generated](
+ #how-r_java-files-are-generated)
+
+prepare\_resources.py zips up the target-specific resource dirs and generates
+R.txt and R.java .srcjars. No optimizations, crunching, etc are done on the
+resources.
+
+**The following steps apply only to apk targets (not library targets).**
+
+### 3. Finalizes apk resources:
+
+Inputs:
+* Target-specific build\_config file
+* Dependencies' resources.zip files
+
+Output:
+* Packaged resources zip (named foo.ap_) containing:
+ * AndroidManifest.xml (as binary xml)
+ * resources.arsc
+ * res/**
+* Final R.txt
+ * Contains a list of resources and their ids (including of dependencies).
+* Final R.java .srcjar
+ * See [What are R.java files and how are they generated](
+ #how-r_java-files-are-generated)
+
+
+#### 3(a). Compiles resources:
+
+For each library / resources target your apk depends on, the following happens:
+* Use a regex (defined in the apk target) to remove select resources (optional).
+* Convert png images to webp for binary size (optional).
+* Move drawables in mdpi to non-mdpi directory ([why?](http://crbug.com/289843))
+* Use `aapt2 compile` to compile xml resources to binary xml (references to
+ other resources will now use the id rather than the name for faster lookup at
+ runtime).
+* `aapt2 compile` adds headers/metadata to 9-patch images about which parts of
+ the image are stretchable vs static.
+* `aapt2 compile` outputs a zip with the compiled resources (one for each
+ dependency).
+
+
+#### 3(b). Links resources:
+
+After each dependency is compiled into an intermediate .zip, all those zips are
+linked by the aapt2 link command which does the following:
+* Use the order of dependencies supplied so that some resources clober each
+ other.
+* Compile the AndroidManifest.xml to binary xml (references to resources are now
+ using ids rather than the string names)
+* Create a resources.arsc file that has the name and values of string
+ resources as well as the name and path of non-string resources (ie. layouts
+ and drawables).
+* Combine the compiled resources into one packaged resources apk (a zip file
+ with an .ap\_ extension) that has all the resources related files.
+
+
+#### 3(c). Optimizes resources:
+
+This step obfuscates / strips resources names from the resources.arsc so that
+they can be looked up only by their numeric ids (assigned in the compile
+resources step). Access to resources via `Resources.getIdentifier()` no longer
+work unless resources are [whitelisted](#adding-resources-to-the-whitelist).
+
+## App Bundles and Modules:
+
+Processing resources for bundles and modules is slightly different. Each module
+has its resources compiled and linked separately (ie: it goes through the
+entire process for each module). The modules are then combined to form a
+bundle. Moreover, during "Finalizing the apk resources" step, bundle modules
+produce a `resources.proto` file instead of a `resources.arsc` file.
+
+Resources in a dynamic feature module may reference resources in the base
+module. During the link step for feature module resources, the linked resources
+of the base module are passed in. However, linking against resources currently
+works only with `resources.arsc` format. Thus, when building the base module,
+resources are compiled as both `resources.arsc` and `resources.proto`.
+
+## Debugging resource related errors when resource names are obfuscated
+
+An example message from a stacktrace could be something like this:
+```
+java.lang.IllegalStateException: Could not find CoordinatorLayout descendant
+view with id org.chromium.chrome:id/0_resource_name_obfuscated to anchor view
+android.view.ViewStub{be192d5 G.E...... ......I. 0,0-0,0 #7f0a02ad
+app:id/0_resource_name_obfuscated}
+```
+
+`0_resource_name_obfuscated` is the resource name for all resources that had
+their name obfuscated/stripped during the optimize resources step. To help with
+debugging, the `R.txt` file is archived. The `R.txt` file contains a mapping
+from resource ids to resource names and can be used to get the original resource
+name from the id. In the above message the id is `0x7f0a02ad`.
+
+For local builds, `R.txt` files are output in the `out/*/apks` directory.
+
+For official builds, Googlers can get archived `R.txt` files next to archived
+apks.
+
+### Adding resources to the whitelist
+
+If a resource is accessed via `getIdentifier()` it needs to be whitelisted in an
+aapt2 resources config file. The config file looks like this:
+
+```
+<resource type>/<resource name>#no_obfuscate
+```
+eg:
+```
+string/app_name#no_obfuscate
+id/toolbar#no_obfuscate
+```
+
+The aapt2 config file is passed to the ninja target through the
+`resources_config_path` variable. To add a resource to the whitelist, check
+where the config is for your target and add a new line for your resource. If
+none exist, create a new config file and pass its path in your target.
+
+### Webview resource ids
+
+The first two bytes of a resource id is the package id. For regular apks, this
+is `0x7f`. However, Webview is a shared library which gets loaded into other
+apks. The package id for webview resources is assigned dynamically at runtime.
+When webview is loaded it [rewrites all resources][ResourceRewriter.java] to
+have the correct package id. When deobfuscating webview resource ids, disregard
+the first two bytes in the id when looking it up in the `R.txt` file.
+
+Monochrome, when loaded as webview, rewrites the package ids of resources used
+by the webview portion to the correct value at runtime, otherwise, its resources
+have package id `0x7f` when run as a regular apk.
+
+[ResourceRewriter.java]: https://cs.chromium.org/chromium/src/out/android-Debug/gen/android_webview/glue/glue/generated_java/com/android/webview/chromium/ResourceRewriter.java
+
+## How R.java files are generated
+
+This is how a sample R.java file looks like:
+
+```java
+package org.chromium.ui;
+
+public final class R {
+ public static final class attr {
+ public static final int buttonAlignment = 0x7f030038;
+ public static final int buttonColor = 0x7f03003e;
+ public static final int layout = 0x7f030094;
+ public static final int roundedfillColor = 0x7f0300bf;
+ public static final int secondaryButtonText = 0x7f0300c4;
+ public static final int stackedMargin = 0x7f0300d4;
+ }
+ public static final class id {
+ public static final int apart = 0x7f080021;
+ public static final int dropdown_body_footer_divider = 0x7f08003d;
+ public static final int dropdown_body_list = 0x7f08003e;
+ public static final int dropdown_footer = 0x7f08003f;
+ }
+ public static final class layout {
+ public static final int dropdown_item = 0x7f0a0022;
+ public static final int dropdown_window = 0x7f0a0023;
+ }
+}
+```
+
+R.java is a list of static classes, each with multiple static fields containing
+ids. These ids are used in java code to reference resources in the apk. The
+R.java file generated via the prepare resources step above has temporary ids
+which are not marked `final`. That R.java file is only used so that javac can
+compile the java code that references R.*.
+
+The R.java generated during the finalize apk resources step has
+permanent ids. These ids are marked as `final` (except webview resources that
+need to be [rewritten at runtime](#webview-resource-ids)).
diff --git a/deps/v8/build/android/docs/lint.md b/deps/v8/build/android/docs/lint.md
new file mode 100644
index 0000000000..37f35502e5
--- /dev/null
+++ b/deps/v8/build/android/docs/lint.md
@@ -0,0 +1,91 @@
+# Lint
+
+Android's [**lint**](http://developer.android.com/tools/help/lint.html) is a static
+analysis tool that Chromium uses to catch possible issues in Java code.
+
+[TOC]
+
+## How Chromium uses lint
+
+Chromium runs lint on a per-target basis for all targets using any of the
+following templates if they are marked as Chromium code (i.e.,
+`chromium_code = true`):
+
+ - `android_apk`
+ - `android_library`
+ - `instrumentation_test_apk`
+ - `unittest_apk`
+
+Chromium also runs lint on a per-target basis for all targets using any of the
+following templates if they are marked as Chromium code and they support
+Android (i.e., `supports_android = true`):
+
+ - `java_library`
+
+This is implemented in the
+[`android_lint`](https://code.google.com/p/chromium/codesearch#chromium/src/build/config/android/internal_rules.gni&q=android_lint%20file:internal_rules%5C.gni)
+gn template.
+
+## My code has a lint error
+
+If lint reports an issue in your code, there are several possible remedies.
+In descending order of preference:
+
+### Fix it
+
+While this isn't always the right response, fixing the lint error or warning
+should be the default.
+
+### Suppress it in code
+
+Android provides an annotation,
+[`@SuppressLint`](http://developer.android.com/reference/android/annotation/SuppressLint.html),
+that tells lint to ignore the annotated element. It can be used on classes,
+constructors, methods, parameters, fields, or local variables, though usage
+in Chromium is typically limited to the first three.
+
+Like many suppression annotations, `@SuppressLint` takes a value that tells **lint**
+what to ignore. It can be a single `String`:
+
+```java
+@SuppressLint("NewApi")
+public void foo() {
+ a.methodThatRequiresHighSdkLevel();
+}
+```
+
+It can also be a list of `String`s:
+
+```java
+@SuppressLint({
+ "NewApi",
+ "UseSparseArrays"
+ })
+public Map<Integer, FakeObject> bar() {
+ Map<Integer, FakeObject> shouldBeASparseArray = new HashMap<Integer, FakeObject>();
+ another.methodThatRequiresHighSdkLevel(shouldBeASparseArray);
+ return shouldBeASparseArray;
+}
+```
+
+This is the preferred way of suppressing warnings in a limited scope.
+
+### Suppress it in the suppressions XML file
+
+**lint** can be given an XML configuration containing warnings or errors that
+should be ignored. Chromium's lint suppression XML file can be found in
+[`build/android/lint/suppressions.xml`](https://chromium.googlesource.com/chromium/src/+/master/build/android/lint/suppressions.xml).
+It can be updated to suppress current warnings by running:
+
+```bash
+$ python build/android/lint/suppress.py <result.xml file>
+```
+
+e.g., to suppress lint errors found in `media_java`:
+
+```bash
+$ python build/android/lint/suppress.py out/Debug/gen/media/base/android/media_java__lint/result.xml
+```
+
+**This mechanism should only be used for disabling warnings across the entire code base; class-specific lint warnings should be disabled inline.**
+
diff --git a/deps/v8/build/android/download_doclava.py b/deps/v8/build/android/download_doclava.py
new file mode 100755
index 0000000000..f9b3af635d
--- /dev/null
+++ b/deps/v8/build/android/download_doclava.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Minimal tool to download doclava from Google storage when building for
+Android."""
+
+import os
+import subprocess
+import sys
+
+
+def main():
+ # Some Windows bots inadvertently have third_party/android_sdk installed,
+ # but are unable to run download_from_google_storage because depot_tools
+ # is not in their path, so avoid failure and bail.
+ if sys.platform == 'win32':
+ return 0
+ subprocess.check_call([
+ 'download_from_google_storage',
+ '--no_resume',
+ '--no_auth',
+ '--bucket', 'chromium-doclava',
+ '--extract',
+ '-s',
+ os.path.join('src', 'buildtools', 'android', 'doclava.tar.gz.sha1')])
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/dump_apk_resource_strings.py b/deps/v8/build/android/dump_apk_resource_strings.py
new file mode 100755
index 0000000000..51e01f39f0
--- /dev/null
+++ b/deps/v8/build/android/dump_apk_resource_strings.py
@@ -0,0 +1,662 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A script to parse and dump localized strings in resource.arsc files."""
+
+import argparse
+import collections
+import contextlib
+import cProfile
+import os
+import re
+import subprocess
+import sys
+import zipfile
+
+# pylint: disable=bare-except
+
+# Assuming this script is located under build/android, try to import
+# build/android/gyp/bundletool.py to get the default path to the bundletool
+# jar file. If this fail, using --bundletool-path will be required to parse
+# bundles, allowing this script to be relocated or reused somewhere else.
+try:
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'gyp'))
+ import bundletool
+
+ _DEFAULT_BUNDLETOOL_PATH = bundletool.BUNDLETOOL_JAR_PATH
+except:
+ _DEFAULT_BUNDLETOOL_PATH = None
+
+# Try to get the path of the aapt build tool from catapult/devil.
+try:
+ import devil_chromium # pylint: disable=unused-import
+ from devil.android.sdk import build_tools
+ _AAPT_DEFAULT_PATH = build_tools.GetPath('aapt')
+except:
+ _AAPT_DEFAULT_PATH = None
+
+
+def AutoIndentStringList(lines, indentation=2):
+ """Auto-indents a input list of text lines, based on open/closed braces.
+
+ For example, the following input text:
+
+ 'Foo {',
+ 'Bar {',
+ 'Zoo',
+ '}',
+ '}',
+
+ Will return the following:
+
+ 'Foo {',
+ ' Bar {',
+ ' Zoo',
+ ' }',
+ '}',
+
+ The rules are pretty simple:
+ - A line that ends with an open brace ({) increments indentation.
+ - A line that starts with a closing brace (}) decrements it.
+
+ The main idea is to make outputting structured text data trivial,
+ since it can be assumed that the final output will be passed through
+ this function to make it human-readable.
+
+ Args:
+ lines: an iterator over input text lines. They should not contain
+ line terminator (e.g. '\n').
+ Returns:
+ A new list of text lines, properly auto-indented.
+ """
+ margin = ''
+ result = []
+ # NOTE: Intentional but significant speed optimizations in this function:
+ # - |line and line[0] == <char>| instead of |line.startswith(<char>)|.
+ # - |line and line[-1] == <char>| instead of |line.endswith(<char>)|.
+ for line in lines:
+ if line and line[0] == '}':
+ margin = margin[:-indentation]
+ result.append(margin + line)
+ if line and line[-1] == '{':
+ margin += ' ' * indentation
+
+ return result
+
+
+# pylint: disable=line-too-long
+
+# NOTE: aapt dump will quote the following characters only: \n, \ and "
+# see https://android.googlesource.com/platform/frameworks/base/+/master/libs/androidfw/ResourceTypes.cpp#7270
+
+# pylint: enable=line-too-long
+
+
+def UnquoteString(s):
+ """Unquote a given string from aapt dump.
+
+ Args:
+ s: An UTF-8 encoded string that contains backslashes for quotes, as found
+ in the output of 'aapt dump resources --values'.
+ Returns:
+ The unquoted version of the input string.
+ """
+ if not '\\' in s:
+ return s
+
+ result = ''
+ start = 0
+ size = len(s)
+ while start < size:
+ pos = s.find('\\', start)
+ if pos < 0:
+ break
+
+ result += s[start:pos]
+ count = 1
+ while pos + count < size and s[pos + count] == '\\':
+ count += 1
+
+ result += '\\' * (count / 2)
+ start = pos + count
+ if count & 1:
+ if start < size:
+ ch = s[start]
+ if ch == 'n': # \n is the only non-printable character supported.
+ ch = '\n'
+ result += ch
+ start += 1
+ else:
+ result += '\\'
+
+ result += s[start:]
+ return result
+
+
+assert UnquoteString(r'foo bar') == 'foo bar'
+assert UnquoteString(r'foo\nbar') == 'foo\nbar'
+assert UnquoteString(r'foo\\nbar') == 'foo\\nbar'
+assert UnquoteString(r'foo\\\nbar') == 'foo\\\nbar'
+assert UnquoteString(r'foo\n\nbar') == 'foo\n\nbar'
+assert UnquoteString(r'foo\\bar') == r'foo\bar'
+
+
+def QuoteString(s):
+ """Quote a given string for external output.
+
+ Args:
+ s: An input UTF-8 encoded string.
+ Returns:
+ A quoted version of the string, using the same rules as 'aapt dump'.
+ """
+ # NOTE: Using repr() would escape all non-ASCII bytes in the string, which
+ # is undesirable.
+ return s.replace('\\', r'\\').replace('"', '\\"').replace('\n', '\\n')
+
+
+assert QuoteString(r'foo "bar"') == 'foo \\"bar\\"'
+assert QuoteString('foo\nbar') == 'foo\\nbar'
+
+
+def ReadStringMapFromRTxt(r_txt_path):
+ """Read all string resource IDs and names from an R.txt file.
+
+ Args:
+ r_txt_path: Input file path.
+ Returns:
+ A {res_id -> res_name} dictionary corresponding to the string resources
+ from the input R.txt file.
+ """
+ # NOTE: Typical line of interest looks like:
+ # int string AllowedDomainsForAppsTitle 0x7f130001
+ result = {}
+ prefix = 'int string '
+ with open(r_txt_path) as f:
+ for line in f:
+ line = line.rstrip()
+ if line.startswith(prefix):
+ res_name, res_id = line[len(prefix):].split(' ')
+ result[int(res_id, 0)] = res_name
+ return result
+
+
+class ResourceStringValues(object):
+ """Models all possible values for a named string."""
+
+ def __init__(self):
+ self.res_name = None
+ self.res_values = {}
+
+ def AddValue(self, res_name, res_config, res_value):
+ """Add a new value to this entry.
+
+ Args:
+ res_name: Resource name. If this is not the first time this method
+ is called with the same resource name, then |res_name| should match
+ previous parameters for sanity checking.
+ res_config: Config associated with this value. This can actually be
+ anything that can be converted to a string.
+ res_value: UTF-8 encoded string value.
+ """
+ if res_name is not self.res_name and res_name != self.res_name:
+ if self.res_name is None:
+ self.res_name = res_name
+ else:
+ # Sanity check: the resource name should be the same for all chunks.
+ # Resource ID is redefined with a different name!!
+ print 'WARNING: Resource key ignored (%s, should be %s)' % (
+ res_name, self.res_name)
+
+ if self.res_values.setdefault(res_config, res_value) is not res_value:
+ print 'WARNING: Duplicate value definition for [config %s]: %s ' \
+ '(already has %s)' % (
+ res_config, res_value, self.res_values[res_config])
+
+ def ToStringList(self, res_id):
+ """Convert entry to string list for human-friendly output."""
+ values = sorted(
+ [(str(config), value) for config, value in self.res_values.iteritems()])
+ if res_id is None:
+ # res_id will be None when the resource ID should not be part
+ # of the output.
+ result = ['name=%s count=%d {' % (self.res_name, len(values))]
+ else:
+ result = [
+ 'res_id=0x%08x name=%s count=%d {' % (res_id, self.res_name,
+ len(values))
+ ]
+ for config, value in values:
+ result.append('%-16s "%s"' % (config, QuoteString(value)))
+ result.append('}')
+ return result
+
+
+class ResourceStringMap(object):
+ """Convenience class to hold the set of all localized strings in a table.
+
+ Usage is the following:
+ 1) Create new (empty) instance.
+ 2) Call AddValue() repeatedly to add new values.
+ 3) Eventually call RemapResourceNames() to remap resource names.
+ 4) Call ToStringList() to convert the instance to a human-readable
+ list of strings that can later be used with AutoIndentStringList()
+ for example.
+ """
+
+ def __init__(self):
+ self._res_map = collections.defaultdict(ResourceStringValues)
+
+ def AddValue(self, res_id, res_name, res_config, res_value):
+ self._res_map[res_id].AddValue(res_name, res_config, res_value)
+
+ def RemapResourceNames(self, id_name_map):
+ """Rename all entries according to a given {res_id -> res_name} map."""
+ for res_id, res_name in id_name_map.iteritems():
+ if res_id in self._res_map:
+ self._res_map[res_id].res_name = res_name
+
+ def ToStringList(self, omit_ids=False):
+ """Dump content to a human-readable string list.
+
+ Note that the strings are ordered by their resource name first, and
+ resource id second.
+
+ Args:
+ omit_ids: If True, do not put resource IDs in the result. This might
+ be useful when comparing the outputs of two different builds of the
+ same APK, or two related APKs (e.g. ChromePublic.apk vs Chrome.apk)
+ where the resource IDs might be slightly different, but not the
+ string contents.
+ Return:
+ A list of strings that can later be sent to AutoIndentStringList().
+ """
+ result = ['Resource strings (count=%d) {' % len(self._res_map)]
+ res_map = self._res_map
+
+ # A small function to compare two (res_id, values) tuples
+ # by resource name first, then resource ID.
+ def cmp_id_name(a, b):
+ result = cmp(a[1].res_name, b[1].res_name)
+ if result == 0:
+ result = cmp(a[0], b[0])
+ return result
+
+ for res_id, _ in sorted(res_map.iteritems(), cmp=cmp_id_name):
+ result += res_map[res_id].ToStringList(None if omit_ids else res_id)
+ result.append('} # Resource strings')
+ return result
+
+
+@contextlib.contextmanager
+def ManagedOutput(output_file):
+ """Create an output File object that will be closed on exit if necessary.
+
+ Args:
+ output_file: Optional output file path.
+ Yields:
+ If |output_file| is empty, this simply yields sys.stdout. Otherwise, this
+ opens the file path for writing text, and yields its File object. The
+ context will ensure that the object is always closed on scope exit.
+ """
+ close_output = False
+ if output_file:
+ output = open(output_file, 'wt')
+ close_output = True
+ else:
+ output = sys.stdout
+ try:
+ yield output
+ finally:
+ if close_output:
+ output.close()
+
+
+@contextlib.contextmanager
+def ManagedPythonProfiling(enable_profiling, sort_key='tottime'):
+ """Enable Python profiling if needed.
+
+ Args:
+ enable_profiling: Boolean flag. True to enable python profiling.
+ sort_key: Sorting key for the final stats dump.
+ Yields:
+ If |enable_profiling| is False, this yields False. Otherwise, this
+ yields a new Profile instance just after enabling it. The manager
+ ensures that profiling stops and prints statistics on scope exit.
+ """
+ pr = None
+ if enable_profiling:
+ pr = cProfile.Profile()
+ pr.enable()
+ try:
+ yield pr
+ finally:
+ if pr:
+ pr.disable()
+ pr.print_stats(sort=sort_key)
+
+
+def IsFilePathABundle(input_file):
+ """Return True iff |input_file| holds an Android app bundle."""
+ try:
+ with zipfile.ZipFile(input_file) as input_zip:
+ _ = input_zip.getinfo('BundleConfig.pb')
+ return True
+ except:
+ return False
+
+
+# Example output from 'bundletool dump resources --values' corresponding
+# to strings:
+#
+# 0x7F1200A0 - string/abc_action_menu_overflow_description
+# (default) - [STR] "More options"
+# locale: "ca" - [STR] "Més opcions"
+# locale: "da" - [STR] "Flere muligheder"
+# locale: "fa" - [STR] " گزینه<U+200C>های بیشتر"
+# locale: "ja" - [STR] "その他のオプション"
+# locale: "ta" - [STR] "மேலும் விருப்பங்கள்"
+# locale: "nb" - [STR] "Flere alternativer"
+# ...
+#
+# Fun fact #1: Bundletool uses <lang>-<REGION> instead of <lang>-r<REGION>
+# for locales!
+#
+# Fun fact #2: The <U+200C> is terminal output for \u200c, the output is
+# really UTF-8 encoded when it is read by this script.
+#
+# Fun fact #3: Bundletool quotes \n, \\ and \" just like aapt since 0.8.0.
+#
+_RE_BUNDLE_STRING_RESOURCE_HEADER = re.compile(
+ r'^0x([0-9A-F]+)\s\-\sstring/(\w+)$')
+assert _RE_BUNDLE_STRING_RESOURCE_HEADER.match(
+ '0x7F1200A0 - string/abc_action_menu_overflow_description')
+
+_RE_BUNDLE_STRING_DEFAULT_VALUE = re.compile(
+ r'^\s+\(default\) - \[STR\] "(.*)"$')
+assert _RE_BUNDLE_STRING_DEFAULT_VALUE.match(
+ ' (default) - [STR] "More options"')
+assert _RE_BUNDLE_STRING_DEFAULT_VALUE.match(
+ ' (default) - [STR] "More options"').group(1) == "More options"
+
+_RE_BUNDLE_STRING_LOCALIZED_VALUE = re.compile(
+ r'^\s+locale: "([0-9a-zA-Z-]+)" - \[STR\] "(.*)"$')
+assert _RE_BUNDLE_STRING_LOCALIZED_VALUE.match(
+ u' locale: "ar" - [STR] "گزینه\u200cهای بیشتر"'.encode('utf-8'))
+
+
+def ParseBundleResources(bundle_tool_jar_path, bundle_path):
+ """Use bundletool to extract the localized strings of a given bundle.
+
+ Args:
+ bundle_tool_jar_path: Path to bundletool .jar executable.
+ bundle_path: Path to input bundle.
+ Returns:
+ A new ResourceStringMap instance populated with the bundle's content.
+ """
+ cmd_args = [
+ 'java', '-jar', bundle_tool_jar_path, 'dump', 'resources', '--bundle',
+ bundle_path, '--values'
+ ]
+ p = subprocess.Popen(cmd_args, bufsize=1, stdout=subprocess.PIPE)
+ res_map = ResourceStringMap()
+ current_resource_id = None
+ current_resource_name = None
+ keep_parsing = True
+ need_value = False
+ while keep_parsing:
+ line = p.stdout.readline()
+ if not line:
+ break
+ # Do not use rstrip(), since this should only remove trailing newlines
+ # but not trailing whitespace that happen to be embedded in the string
+ # value for some reason.
+ line = line.rstrip('\n\r')
+ m = _RE_BUNDLE_STRING_RESOURCE_HEADER.match(line)
+ if m:
+ current_resource_id = int(m.group(1), 16)
+ current_resource_name = m.group(2)
+ need_value = True
+ continue
+
+ if not need_value:
+ continue
+
+ resource_config = None
+ m = _RE_BUNDLE_STRING_DEFAULT_VALUE.match(line)
+ if m:
+ resource_config = 'config (default)'
+ resource_value = m.group(1)
+ else:
+ m = _RE_BUNDLE_STRING_LOCALIZED_VALUE.match(line)
+ if m:
+ resource_config = 'config %s' % m.group(1)
+ resource_value = m.group(2)
+
+ if resource_config is None:
+ need_value = False
+ continue
+
+ res_map.AddValue(current_resource_id, current_resource_name,
+ resource_config, UnquoteString(resource_value))
+ return res_map
+
+
+# Name of the binary resources table file inside an APK.
+RESOURCES_FILENAME = 'resources.arsc'
+
+
+def IsFilePathAnApk(input_file):
+ """Returns True iff a ZipFile instance is for a regular APK."""
+ try:
+ with zipfile.ZipFile(input_file) as input_zip:
+ _ = input_zip.getinfo(RESOURCES_FILENAME)
+ return True
+ except:
+ return False
+
+
+# pylint: disable=line-too-long
+
+# Example output from 'aapt dump resources --values' corresponding
+# to strings:
+#
+# config zh-rHK
+# resource 0x7f12009c org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000caa9 (s=0x0008 r=0x00)
+# (string8) "瀏覽首頁"
+# resource 0x7f12009d org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000c8e0 (s=0x0008 r=0x00)
+# (string8) "向上瀏覽"
+#
+
+# The following are compiled regular expressions used to recognize each
+# of line and extract relevant information.
+#
+_RE_AAPT_CONFIG = re.compile(r'^\s+config (.+):$')
+assert _RE_AAPT_CONFIG.match(' config (default):')
+assert _RE_AAPT_CONFIG.match(' config zh-rTW:')
+
+# Match an ISO 639-1 or ISO 639-2 locale.
+_RE_AAPT_ISO_639_LOCALE = re.compile(r'^[a-z]{2,3}(-r[A-Z]{2,3})?$')
+assert _RE_AAPT_ISO_639_LOCALE.match('de')
+assert _RE_AAPT_ISO_639_LOCALE.match('zh-rTW')
+assert _RE_AAPT_ISO_639_LOCALE.match('fil')
+assert not _RE_AAPT_ISO_639_LOCALE.match('land')
+
+_RE_AAPT_BCP47_LOCALE = re.compile(r'^b\+[a-z][a-zA-Z0-9\+]+$')
+assert _RE_AAPT_BCP47_LOCALE.match('b+sr')
+assert _RE_AAPT_BCP47_LOCALE.match('b+sr+Latn')
+assert _RE_AAPT_BCP47_LOCALE.match('b+en+US')
+assert not _RE_AAPT_BCP47_LOCALE.match('b+')
+assert not _RE_AAPT_BCP47_LOCALE.match('b+1234')
+
+_RE_AAPT_STRING_RESOURCE_HEADER = re.compile(
+ r'^\s+resource 0x([0-9a-f]+) [a-zA-Z][a-zA-Z0-9.]+:string/(\w+):.*$')
+assert _RE_AAPT_STRING_RESOURCE_HEADER.match(
+ r' resource 0x7f12009c org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000caa9 (s=0x0008 r=0x00)'
+)
+
+_RE_AAPT_STRING_RESOURCE_VALUE = re.compile(r'^\s+\(string8\) "(.*)"$')
+assert _RE_AAPT_STRING_RESOURCE_VALUE.match(r' (string8) "瀏覽首頁"')
+
+# pylint: enable=line-too-long
+
+
+def _ConvertAaptLocaleToBcp47(locale):
+ """Convert a locale name from 'aapt dump' to its BCP-47 form."""
+ if locale.startswith('b+'):
+ return '-'.join(locale[2:].split('+'))
+ lang, _, region = locale.partition('-r')
+ if region:
+ return '%s-%s' % (lang, region)
+ return lang
+
+
+assert _ConvertAaptLocaleToBcp47('(default)') == '(default)'
+assert _ConvertAaptLocaleToBcp47('en') == 'en'
+assert _ConvertAaptLocaleToBcp47('en-rUS') == 'en-US'
+assert _ConvertAaptLocaleToBcp47('en-US') == 'en-US'
+assert _ConvertAaptLocaleToBcp47('fil') == 'fil'
+assert _ConvertAaptLocaleToBcp47('b+sr+Latn') == 'sr-Latn'
+
+
+def ParseApkResources(aapt_path, apk_path):
+ """Use aapt to extract the localized strings of a given bundle.
+
+ Args:
+ bundle_tool_jar_path: Path to bundletool .jar executable.
+ bundle_path: Path to input bundle.
+ Returns:
+ A new ResourceStringMap instance populated with the bundle's content.
+ """
+ cmd_args = [aapt_path, 'dump', '--values', 'resources', apk_path]
+ p = subprocess.Popen(cmd_args, bufsize=1, stdout=subprocess.PIPE)
+
+ res_map = ResourceStringMap()
+ current_locale = None
+ current_resource_id = None
+ current_resource_name = None
+ need_value = False
+ while True:
+ line = p.stdout.readline().rstrip()
+ if not line:
+ break
+ m = _RE_AAPT_CONFIG.match(line)
+ if m:
+ locale = None
+ aapt_locale = m.group(1)
+ if aapt_locale == '(default)':
+ locale = aapt_locale
+ elif _RE_AAPT_ISO_639_LOCALE.match(aapt_locale):
+ locale = aapt_locale
+ elif _RE_AAPT_BCP47_LOCALE.match(aapt_locale):
+ locale = aapt_locale
+ if locale is not None:
+ current_locale = _ConvertAaptLocaleToBcp47(locale)
+ continue
+
+ if current_locale is None:
+ continue
+
+ if need_value:
+ m = _RE_AAPT_STRING_RESOURCE_VALUE.match(line)
+ if not m:
+ # Should not happen
+ sys.stderr.write('WARNING: Missing value for string ID 0x%08x "%s"' %
+ (current_resource_id, current_resource_name))
+ resource_value = '<MISSING_STRING_%08x>' % current_resource_id
+ else:
+ resource_value = UnquoteString(m.group(1))
+
+ res_map.AddValue(current_resource_id, current_resource_name,
+ 'config %s' % current_locale, resource_value)
+ need_value = False
+ else:
+ m = _RE_AAPT_STRING_RESOURCE_HEADER.match(line)
+ if m:
+ current_resource_id = int(m.group(1), 16)
+ current_resource_name = m.group(2)
+ need_value = True
+
+ return res_map
+
+
+def main(args):
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+ parser.add_argument(
+ 'input_file',
+ help='Input file path. This can be either an APK, or an app bundle.')
+ parser.add_argument('--output', help='Optional output file path.')
+ parser.add_argument(
+ '--omit-ids',
+ action='store_true',
+ help='Omit resource IDs in the output. This is useful '
+ 'to compare the contents of two distinct builds of the '
+ 'same APK.')
+ parser.add_argument(
+ '--aapt-path',
+ default=_AAPT_DEFAULT_PATH,
+ help='Path to aapt executable. Optional for APKs.')
+ parser.add_argument(
+ '--r-txt-path',
+ help='Path to an optional input R.txt file used to translate resource '
+ 'IDs to string names. Useful when resources names in the input files '
+ 'were obfuscated. NOTE: If ${INPUT_FILE}.R.txt exists, if will be used '
+ 'automatically by this script.')
+ parser.add_argument(
+ '--bundletool-path',
+ default=_DEFAULT_BUNDLETOOL_PATH,
+ help='Path to alternate bundletool .jar file. Only used for bundles.')
+ parser.add_argument(
+ '--profile', action='store_true', help='Enable Python profiling.')
+
+ options = parser.parse_args(args)
+
+ # Create a {res_id -> res_name} map for unobfuscation, if needed.
+ res_id_name_map = {}
+ r_txt_path = options.r_txt_path
+ if not r_txt_path:
+ candidate_r_txt_path = options.input_file + '.R.txt'
+ if os.path.exists(candidate_r_txt_path):
+ r_txt_path = candidate_r_txt_path
+
+ if r_txt_path:
+ res_id_name_map = ReadStringMapFromRTxt(r_txt_path)
+
+ # Create a helper lambda that creates a new ResourceStringMap instance
+ # based on the input file's type.
+ if IsFilePathABundle(options.input_file):
+ if not options.bundletool_path:
+ parser.error(
+ '--bundletool-path <BUNDLETOOL_JAR> is required to parse bundles.')
+
+ # use bundletool to parse the bundle resources.
+ def create_string_map():
+ return ParseBundleResources(options.bundletool_path, options.input_file)
+
+ elif IsFilePathAnApk(options.input_file):
+ if not options.aapt_path:
+ parser.error('--aapt-path <AAPT> is required to parse APKs.')
+
+ # Use aapt dump to parse the APK resources.
+ def create_string_map():
+ return ParseApkResources(options.aapt_path, options.input_file)
+
+ else:
+ parser.error('Unknown file format: %s' % options.input_file)
+
+ # Print everything now.
+ with ManagedOutput(options.output) as output:
+ with ManagedPythonProfiling(options.profile):
+ res_map = create_string_map()
+ res_map.RemapResourceNames(res_id_name_map)
+ lines = AutoIndentStringList(res_map.ToStringList(options.omit_ids))
+ for line in lines:
+ output.write(line)
+ output.write('\n')
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/emma_coverage_stats.py b/deps/v8/build/android/emma_coverage_stats.py
new file mode 100755
index 0000000000..fe1775a8a2
--- /dev/null
+++ b/deps/v8/build/android/emma_coverage_stats.py
@@ -0,0 +1,479 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates incremental code coverage reports for Java code in Chromium.
+
+Usage:
+
+ build/android/emma_coverage_stats.py -v --out <output file path> --emma-dir
+ <EMMA file directory> --lines-for-coverage-file
+ <path to file containing lines for coverage>
+
+ Creates a JSON representation of the overall and file coverage stats and saves
+ this information to the specified output file.
+"""
+
+import argparse
+import collections
+import json
+import logging
+import os
+import re
+import sys
+from xml.etree import ElementTree
+
+import devil_chromium
+from devil.utils import run_tests_helper
+
+NOT_EXECUTABLE = -1
+NOT_COVERED = 0
+COVERED = 1
+PARTIALLY_COVERED = 2
+
+# Coverage information about a single line of code.
+LineCoverage = collections.namedtuple(
+ 'LineCoverage',
+ ['lineno', 'source', 'covered_status', 'fractional_line_coverage'])
+
+
+class _EmmaHtmlParser(object):
+ """Encapsulates HTML file parsing operations.
+
+ This class contains all operations related to parsing HTML files that were
+ produced using the EMMA code coverage tool.
+
+ Example HTML:
+
+ Package links:
+ <a href="_files/1.html">org.chromium.chrome</a>
+ This is returned by the selector |XPATH_SELECT_PACKAGE_ELEMENTS|.
+
+ Class links:
+ <a href="1e.html">DoActivity.java</a>
+ This is returned by the selector |XPATH_SELECT_CLASS_ELEMENTS|.
+
+ Line coverage data:
+ <tr class="p">
+ <td class="l" title="78% line coverage (7 out of 9)">108</td>
+ <td title="78% line coverage (7 out of 9 instructions)">
+ if (index < 0 || index = mSelectors.size()) index = 0;</td>
+ </tr>
+ <tr>
+ <td class="l">109</td>
+ <td> </td>
+ </tr>
+ <tr class="c">
+ <td class="l">110</td>
+ <td> if (mSelectors.get(index) != null) {</td>
+ </tr>
+ <tr class="z">
+ <td class="l">111</td>
+ <td> for (int i = 0; i < mSelectors.size(); i++) {</td>
+ </tr>
+ Each <tr> element is returned by the selector |XPATH_SELECT_LOC|.
+
+ We can parse this to get:
+ 1. Line number
+ 2. Line of source code
+ 3. Coverage status (c, z, or p)
+ 4. Fractional coverage value (% out of 100 if PARTIALLY_COVERED)
+ """
+ # Selector to match all <a> elements within the rows that are in the table
+ # that displays all of the different packages.
+ _XPATH_SELECT_PACKAGE_ELEMENTS = './/BODY/TABLE[4]/TR/TD/A'
+
+ # Selector to match all <a> elements within the rows that are in the table
+ # that displays all of the different classes within a package.
+ _XPATH_SELECT_CLASS_ELEMENTS = './/BODY/TABLE[3]/TR/TD/A'
+
+ # Selector to match all <tr> elements within the table containing Java source
+ # code in an EMMA HTML file.
+ _XPATH_SELECT_LOC = './/BODY/TABLE[4]/TR'
+
+ # Children of HTML elements are represented as a list in ElementTree. These
+ # constants represent list indices corresponding to relevant child elements.
+
+ # Child 1 contains percentage covered for a line.
+ _ELEMENT_PERCENT_COVERED = 1
+
+ # Child 1 contains the original line of source code.
+ _ELEMENT_CONTAINING_SOURCE_CODE = 1
+
+ # Child 0 contains the line number.
+ _ELEMENT_CONTAINING_LINENO = 0
+
+ # Maps CSS class names to corresponding coverage constants.
+ _CSS_TO_STATUS = {'c': COVERED, 'p': PARTIALLY_COVERED, 'z': NOT_COVERED}
+
+ # UTF-8 no break space.
+ _NO_BREAK_SPACE = '\xc2\xa0'
+
+ def __init__(self, emma_file_base_dir):
+ """Initializes _EmmaHtmlParser.
+
+ Args:
+ emma_file_base_dir: Path to the location where EMMA report files are
+ stored. Should be where index.html is stored.
+ """
+ self._base_dir = emma_file_base_dir
+ self._emma_files_path = os.path.join(self._base_dir, '_files')
+ self._index_path = os.path.join(self._base_dir, 'index.html')
+
+ def GetLineCoverage(self, emma_file_path):
+ """Returns a list of LineCoverage objects for the given EMMA HTML file.
+
+ Args:
+ emma_file_path: String representing the path to the EMMA HTML file.
+
+ Returns:
+ A list of LineCoverage objects.
+ """
+ line_tr_elements = self._FindElements(
+ emma_file_path, self._XPATH_SELECT_LOC)
+ line_coverage = []
+ for tr in line_tr_elements:
+ # Get the coverage status.
+ coverage_status = self._CSS_TO_STATUS.get(tr.get('CLASS'), NOT_EXECUTABLE)
+ # Get the fractional coverage value.
+ if coverage_status == PARTIALLY_COVERED:
+ title_attribute = (tr[self._ELEMENT_PERCENT_COVERED].get('TITLE'))
+ # Parse string that contains percent covered: "83% line coverage ...".
+ percent_covered = title_attribute.split('%')[0]
+ fractional_coverage = int(percent_covered) / 100.0
+ else:
+ fractional_coverage = 1.0
+
+ # Get the line number.
+ lineno_element = tr[self._ELEMENT_CONTAINING_LINENO]
+ # Handles oddly formatted HTML (where there is an extra <a> tag).
+ lineno = int(lineno_element.text or
+ lineno_element[self._ELEMENT_CONTAINING_LINENO].text)
+ # Get the original line of Java source code.
+ raw_source = tr[self._ELEMENT_CONTAINING_SOURCE_CODE].text
+ utf8_source = raw_source.encode('UTF-8')
+ source = utf8_source.replace(self._NO_BREAK_SPACE, ' ')
+
+ line = LineCoverage(lineno, source, coverage_status, fractional_coverage)
+ line_coverage.append(line)
+
+ return line_coverage
+
+ def GetPackageNameToEmmaFileDict(self):
+ """Returns a dict mapping Java packages to EMMA HTML coverage files.
+
+ Parses the EMMA index.html file to get a list of packages, then parses each
+ package HTML file to get a list of classes for that package, and creates
+ a dict with this info.
+
+ Returns:
+ A dict mapping string representation of Java packages (with class
+ names appended) to the corresponding file paths of EMMA HTML files.
+ """
+ # These <a> elements contain each package name and the path of the file
+ # where all classes within said package are listed.
+ package_link_elements = self._FindElements(
+ self._index_path, self._XPATH_SELECT_PACKAGE_ELEMENTS)
+ # Maps file path of package directory (EMMA generated) to package name.
+ # Example: emma_dir/f.html: org.chromium.chrome.
+ package_links = {
+ os.path.join(self._base_dir, link.attrib['HREF']): link.text
+ for link in package_link_elements if 'HREF' in link.attrib
+ }
+
+ package_to_emma = {}
+ for package_emma_file_path, package_name in package_links.iteritems():
+ # These <a> elements contain each class name in the current package and
+ # the path of the file where the coverage info is stored for each class.
+ coverage_file_link_elements = self._FindElements(
+ package_emma_file_path, self._XPATH_SELECT_CLASS_ELEMENTS)
+
+ for class_name_element in coverage_file_link_elements:
+ emma_coverage_file_path = os.path.join(
+ self._emma_files_path, class_name_element.attrib['HREF'])
+ full_package_name = '%s.%s' % (package_name, class_name_element.text)
+ package_to_emma[full_package_name] = emma_coverage_file_path
+
+ return package_to_emma
+
+ # pylint: disable=no-self-use
+ def _FindElements(self, file_path, xpath_selector):
+ """Reads a HTML file and performs an XPath match.
+
+ Args:
+ file_path: String representing the path to the HTML file.
+ xpath_selector: String representing xpath search pattern.
+
+ Returns:
+ A list of ElementTree.Elements matching the given XPath selector.
+ Returns an empty list if there is no match.
+ """
+ with open(file_path) as f:
+ file_contents = f.read().decode('ISO-8859-1').encode('UTF-8')
+ root = ElementTree.fromstring(file_contents)
+ return root.findall(xpath_selector)
+
+
+class _EmmaCoverageStats(object):
+ """Computes code coverage stats for Java code using the coverage tool EMMA.
+
+ This class provides an API that allows users to capture absolute code coverage
+ and code coverage on a subset of lines for each Java source file. Coverage
+ reports are generated in JSON format.
+ """
+ # Regular expression to get package name from Java package statement.
+ RE_PACKAGE_MATCH_GROUP = 'package'
+ RE_PACKAGE = re.compile(r'package (?P<%s>[\w.]*);' % RE_PACKAGE_MATCH_GROUP)
+
+ def __init__(self, emma_file_base_dir, files_for_coverage):
+ """Initialize _EmmaCoverageStats.
+
+ Args:
+ emma_file_base_dir: String representing the path to the base directory
+ where EMMA HTML coverage files are stored, i.e. parent of index.html.
+ files_for_coverage: A list of Java source code file paths to get EMMA
+ coverage for.
+ """
+ self._emma_parser = _EmmaHtmlParser(emma_file_base_dir)
+ self._source_to_emma = self._GetSourceFileToEmmaFileDict(files_for_coverage)
+
+ def GetCoverageDict(self, lines_for_coverage):
+ """Returns a dict containing detailed coverage information.
+
+ Gets detailed coverage stats for each file specified in the
+ |lines_for_coverage| dict and the total incremental number of lines covered
+ and executable for all files in |lines_for_coverage|.
+
+ Args:
+ lines_for_coverage: A dict mapping Java source file paths to lists of line
+ numbers.
+
+ Returns:
+ A dict containing coverage stats for the given dict of files and lines.
+ Contains absolute coverage stats for each file, coverage stats for each
+ file's lines specified in |lines_for_coverage|, line by line coverage
+ for each file, and overall coverage stats for the lines specified in
+ |lines_for_coverage|.
+ """
+ file_coverage = {}
+ for file_path, line_numbers in lines_for_coverage.iteritems():
+ file_coverage_dict = self.GetCoverageDictForFile(file_path, line_numbers)
+ if file_coverage_dict:
+ file_coverage[file_path] = file_coverage_dict
+ else:
+ logging.warning(
+ 'No code coverage data for %s, skipping.', file_path)
+
+ covered_statuses = [s['incremental'] for s in file_coverage.itervalues()]
+ num_covered_lines = sum(s['covered'] for s in covered_statuses)
+ num_total_lines = sum(s['total'] for s in covered_statuses)
+ return {
+ 'files': file_coverage,
+ 'patch': {
+ 'incremental': {
+ 'covered': num_covered_lines,
+ 'total': num_total_lines
+ }
+ }
+ }
+
+ def GetCoverageDictForFile(self, file_path, line_numbers):
+ """Returns a dict containing detailed coverage info for the given file.
+
+ Args:
+ file_path: The path to the Java source file that we want to create the
+ coverage dict for.
+ line_numbers: A list of integer line numbers to retrieve additional stats
+ for.
+
+ Returns:
+ A dict containing absolute, incremental, and line by line coverage for
+ a file.
+ """
+ if file_path not in self._source_to_emma:
+ return None
+ emma_file = self._source_to_emma[file_path]
+ total_line_coverage = self._emma_parser.GetLineCoverage(emma_file)
+ incremental_line_coverage = [line for line in total_line_coverage
+ if line.lineno in line_numbers]
+ line_by_line_coverage = [
+ {
+ 'line': line.source,
+ 'coverage': line.covered_status,
+ 'changed': line.lineno in line_numbers,
+ 'fractional_coverage': line.fractional_line_coverage,
+ }
+ for line in total_line_coverage
+ ]
+ total_covered_lines, total_lines = (
+ self.GetSummaryStatsForLines(total_line_coverage))
+ incremental_covered_lines, incremental_total_lines = (
+ self.GetSummaryStatsForLines(incremental_line_coverage))
+
+ file_coverage_stats = {
+ 'absolute': {
+ 'covered': total_covered_lines,
+ 'total': total_lines
+ },
+ 'incremental': {
+ 'covered': incremental_covered_lines,
+ 'total': incremental_total_lines
+ },
+ 'source': line_by_line_coverage,
+ }
+ return file_coverage_stats
+
+ # pylint: disable=no-self-use
+ def GetSummaryStatsForLines(self, line_coverage):
+ """Gets summary stats for a given list of LineCoverage objects.
+
+ Args:
+ line_coverage: A list of LineCoverage objects.
+
+ Returns:
+ A tuple containing the number of lines that are covered and the total
+ number of lines that are executable, respectively
+ """
+ partially_covered_sum = 0
+ covered_status_totals = {COVERED: 0, NOT_COVERED: 0, PARTIALLY_COVERED: 0}
+ for line in line_coverage:
+ status = line.covered_status
+ if status == NOT_EXECUTABLE:
+ continue
+ covered_status_totals[status] += 1
+ if status == PARTIALLY_COVERED:
+ partially_covered_sum += line.fractional_line_coverage
+
+ total_covered = covered_status_totals[COVERED] + partially_covered_sum
+ total_lines = sum(covered_status_totals.values())
+ return total_covered, total_lines
+
+ def _GetSourceFileToEmmaFileDict(self, files):
+ """Gets a dict used to correlate Java source files with EMMA HTML files.
+
+ This method gathers the information needed to correlate EMMA HTML
+ files with Java source files. EMMA XML and plain text reports do not provide
+ line by line coverage data, so HTML reports must be used instead.
+ Unfortunately, the HTML files that are created are given garbage names
+ (i.e 1.html) so we need to manually correlate EMMA HTML files
+ with the original Java source files.
+
+ Args:
+ files: A list of file names for which coverage information is desired.
+
+ Returns:
+ A dict mapping Java source file paths to EMMA HTML file paths.
+ """
+ # Maps Java source file paths to package names.
+ # Example: /usr/code/file.java -> org.chromium.file.java.
+ source_to_package = {}
+ for file_path in files:
+ package = self.GetPackageNameFromFile(file_path)
+ if package:
+ source_to_package[file_path] = package
+ else:
+ logging.warning("Skipping %s because it doesn\'t have a package "
+ "statement.", file_path)
+
+ # Maps package names to EMMA report HTML files.
+ # Example: org.chromium.file.java -> out/coverage/1a.html.
+ package_to_emma = self._emma_parser.GetPackageNameToEmmaFileDict()
+ # Finally, we have a dict mapping Java file paths to EMMA report files.
+ # Example: /usr/code/file.java -> out/coverage/1a.html.
+ source_to_emma = {source: package_to_emma[package]
+ for source, package in source_to_package.iteritems()
+ if package in package_to_emma}
+ return source_to_emma
+
+ @staticmethod
+ def NeedsCoverage(file_path):
+ """Checks to see if the file needs to be analyzed for code coverage.
+
+ Args:
+ file_path: A string representing path to the file.
+
+ Returns:
+ True for Java files that exist, False for all others.
+ """
+ if os.path.splitext(file_path)[1] == '.java' and os.path.exists(file_path):
+ return True
+ else:
+ logging.info('Skipping file %s, cannot compute code coverage.', file_path)
+ return False
+
+ @staticmethod
+ def GetPackageNameFromFile(file_path):
+ """Gets the full package name including the file name for a given file path.
+
+ Args:
+ file_path: String representing the path to the Java source file.
+
+ Returns:
+ A string representing the full package name with file name appended or
+ None if there is no package statement in the file.
+ """
+ with open(file_path) as f:
+ file_content = f.read()
+ package_match = re.search(_EmmaCoverageStats.RE_PACKAGE, file_content)
+ if package_match:
+ package = package_match.group(_EmmaCoverageStats.RE_PACKAGE_MATCH_GROUP)
+ file_name = os.path.basename(file_path)
+ return '%s.%s' % (package, file_name)
+ else:
+ return None
+
+
+def GenerateCoverageReport(line_coverage_file, out_file_path, coverage_dir):
+ """Generates a coverage report for a given set of lines.
+
+ Writes the results of the coverage analysis to the file specified by
+ |out_file_path|.
+
+ Args:
+ line_coverage_file: The path to a file which contains a dict mapping file
+ names to lists of line numbers. Example: {file1: [1, 2, 3], ...} means
+ that we should compute coverage information on lines 1 - 3 for file1.
+ out_file_path: A string representing the location to write the JSON report.
+ coverage_dir: A string representing the file path where the EMMA
+ HTML coverage files are located (i.e. folder where index.html is located).
+ """
+ with open(line_coverage_file) as f:
+ potential_files_for_coverage = json.load(f)
+
+ files_for_coverage = {f: lines
+ for f, lines in potential_files_for_coverage.iteritems()
+ if _EmmaCoverageStats.NeedsCoverage(f)}
+
+ coverage_results = {}
+ if files_for_coverage:
+ code_coverage = _EmmaCoverageStats(coverage_dir, files_for_coverage.keys())
+ coverage_results = code_coverage.GetCoverageDict(files_for_coverage)
+ else:
+ logging.info('No Java files requiring coverage were included in %s.',
+ line_coverage_file)
+
+ with open(out_file_path, 'w+') as out_status_file:
+ json.dump(coverage_results, out_status_file)
+
+
+def main():
+ argparser = argparse.ArgumentParser()
+ argparser.add_argument('--out', required=True, type=str,
+ help='Report output file path.')
+ argparser.add_argument('--emma-dir', required=True, type=str,
+ help='EMMA HTML report directory.')
+ argparser.add_argument('--lines-for-coverage-file', required=True, type=str,
+ help='File containing a JSON object. Should contain a '
+ 'dict mapping file names to lists of line numbers of '
+ 'code for which coverage information is desired.')
+ argparser.add_argument('-v', '--verbose', action='count',
+ help='Print verbose log information.')
+ args = argparser.parse_args()
+ run_tests_helper.SetLogLevel(args.verbose)
+ devil_chromium.Initialize()
+ GenerateCoverageReport(args.lines_for_coverage_file, args.out, args.emma_dir)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/emma_coverage_stats_test.py b/deps/v8/build/android/emma_coverage_stats_test.py
new file mode 100755
index 0000000000..44f6dc3586
--- /dev/null
+++ b/deps/v8/build/android/emma_coverage_stats_test.py
@@ -0,0 +1,563 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import unittest
+from xml.etree import ElementTree
+
+import emma_coverage_stats
+from pylib.constants import host_paths
+
+with host_paths.SysPath(host_paths.PYMOCK_PATH):
+ import mock # pylint: disable=import-error
+
+EMPTY_COVERAGE_STATS_DICT = {
+ 'files': {},
+ 'patch': {
+ 'incremental': {
+ 'covered': 0, 'total': 0
+ }
+ }
+}
+
+
+class _EmmaHtmlParserTest(unittest.TestCase):
+ """Tests for _EmmaHtmlParser.
+
+ Uses modified EMMA report HTML that contains only the subset of tags needed
+ for test verification.
+ """
+
+ def setUp(self):
+ self.emma_dir = 'fake/dir/'
+ self.parser = emma_coverage_stats._EmmaHtmlParser(self.emma_dir)
+ self.simple_html = '<TR><TD CLASS="p">Test HTML</TD></TR>'
+ self.index_html = (
+ '<HTML>'
+ '<BODY>'
+ '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '<TABLE CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '<TABLE CLASS="it" CELLSPACING="0">'
+ '</TABLE>'
+ '<TABLE CELLSPACING="0" WIDTH="100%">'
+ '<TR>'
+ '<TH CLASS="f">name</TH>'
+ '<TH>class, %</TH>'
+ '<TH>method, %</TH>'
+ '<TH>block, %</TH>'
+ '<TH>line, %</TH>'
+ '</TR>'
+ '<TR CLASS="o">'
+ '<TD><A HREF="_files/0.html"'
+ '>org.chromium.chrome.browser</A></TD>'
+ '<TD CLASS="h">0% (0/3)</TD>'
+ '</TR>'
+ '<TR>'
+ '<TD><A HREF="_files/1.html"'
+ '>org.chromium.chrome.browser.tabmodel</A></TD>'
+ '<TD CLASS="h">0% (0/8)</TD>'
+ '</TR>'
+ '</TABLE>'
+ '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '</BODY>'
+ '</HTML>'
+ )
+ self.package_1_class_list_html = (
+ '<HTML>'
+ '<BODY>'
+ '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '<TABLE CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '<TABLE CELLSPACING="0" WIDTH="100%">'
+ '<TR>'
+ '<TH CLASS="f">name</TH>'
+ '<TH>class, %</TH>'
+ '<TH>method, %</TH>'
+ '<TH>block, %</TH>'
+ '<TH>line, %</TH>'
+ '</TR>'
+ '<TR CLASS="o">'
+ '<TD><A HREF="1e.html">IntentHelper.java</A></TD>'
+ '<TD CLASS="h">0% (0/3)</TD>'
+ '<TD CLASS="h">0% (0/9)</TD>'
+ '<TD CLASS="h">0% (0/97)</TD>'
+ '<TD CLASS="h">0% (0/26)</TD>'
+ '</TR>'
+ '</TABLE>'
+ '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '</BODY>'
+ '</HTML>'
+ )
+ self.package_2_class_list_html = (
+ '<HTML>'
+ '<BODY>'
+ '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '<TABLE CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '<TABLE CELLSPACING="0" WIDTH="100%">'
+ '<TR>'
+ '<TH CLASS="f">name</TH>'
+ '<TH>class, %</TH>'
+ '<TH>method, %</TH>'
+ '<TH>block, %</TH>'
+ '<TH>line, %</TH>'
+ '</TR>'
+ '<TR CLASS="o">'
+ '<TD><A HREF="1f.html">ContentSetting.java</A></TD>'
+ '<TD CLASS="h">0% (0/1)</TD>'
+ '</TR>'
+ '<TR>'
+ '<TD><A HREF="20.html">DevToolsServer.java</A></TD>'
+ '</TR>'
+ '<TR CLASS="o">'
+ '<TD><A HREF="21.html">FileProviderHelper.java</A></TD>'
+ '</TR>'
+ '<TR>'
+ '<TD><A HREF="22.html">ContextualMenuBar.java</A></TD>'
+ '</TR>'
+ '<TR CLASS="o">'
+ '<TD><A HREF="23.html">AccessibilityUtil.java</A></TD>'
+ '</TR>'
+ '<TR>'
+ '<TD><A HREF="24.html">NavigationPopup.java</A></TD>'
+ '</TR>'
+ '</TABLE>'
+ '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '</BODY>'
+ '</HTML>'
+ )
+ self.partially_covered_tr_html = (
+ '<TR CLASS="p">'
+ '<TD CLASS="l" TITLE="78% line coverage (7 out of 9)">108</TD>'
+ '<TD TITLE="78% line coverage (7 out of 9 instructions)">'
+ 'if (index &lt; 0 || index = mSelectors.size()) index = 0;</TD>'
+ '</TR>'
+ )
+ self.covered_tr_html = (
+ '<TR CLASS="c">'
+ '<TD CLASS="l">110</TD>'
+ '<TD> if (mSelectors.get(index) != null) {</TD>'
+ '</TR>'
+ )
+ self.not_executable_tr_html = (
+ '<TR>'
+ '<TD CLASS="l">109</TD>'
+ '<TD> </TD>'
+ '</TR>'
+ )
+ self.tr_with_extra_a_tag = (
+ '<TR CLASS="z">'
+ '<TD CLASS="l">'
+ '<A name="1f">54</A>'
+ '</TD>'
+ '<TD> }</TD>'
+ '</TR>'
+ )
+
+ def testInit(self):
+ emma_dir = self.emma_dir
+ parser = emma_coverage_stats._EmmaHtmlParser(emma_dir)
+ self.assertEqual(parser._base_dir, emma_dir)
+ self.assertEqual(parser._emma_files_path, 'fake/dir/_files')
+ self.assertEqual(parser._index_path, 'fake/dir/index.html')
+
+ def testFindElements_basic(self):
+ read_values = [self.simple_html]
+ found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
+ file_path='fake', xpath_selector='.//TD')
+ self.assertIs(type(found), list)
+ self.assertIs(type(found[0]), ElementTree.Element)
+ self.assertEqual(found[0].text, 'Test HTML')
+
+ def testFindElements_multipleElements(self):
+ multiple_trs = self.not_executable_tr_html + self.covered_tr_html
+ read_values = ['<div>' + multiple_trs + '</div>']
+ found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
+ file_path='fake', xpath_selector='.//TR')
+ self.assertEquals(2, len(found))
+
+ def testFindElements_noMatch(self):
+ read_values = [self.simple_html]
+ found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
+ file_path='fake', xpath_selector='.//TR')
+ self.assertEqual(found, [])
+
+ def testFindElements_badFilePath(self):
+ with self.assertRaises(IOError):
+ with mock.patch('os.path.exists', return_value=False):
+ self.parser._FindElements('fake', xpath_selector='//tr')
+
+ def testGetPackageNameToEmmaFileDict_basic(self):
+ expected_dict = {
+ 'org.chromium.chrome.browser.AccessibilityUtil.java':
+ 'fake/dir/_files/23.html',
+ 'org.chromium.chrome.browser.ContextualMenuBar.java':
+ 'fake/dir/_files/22.html',
+ 'org.chromium.chrome.browser.tabmodel.IntentHelper.java':
+ 'fake/dir/_files/1e.html',
+ 'org.chromium.chrome.browser.ContentSetting.java':
+ 'fake/dir/_files/1f.html',
+ 'org.chromium.chrome.browser.DevToolsServer.java':
+ 'fake/dir/_files/20.html',
+ 'org.chromium.chrome.browser.NavigationPopup.java':
+ 'fake/dir/_files/24.html',
+ 'org.chromium.chrome.browser.FileProviderHelper.java':
+ 'fake/dir/_files/21.html'}
+
+ read_values = [self.index_html, self.package_1_class_list_html,
+ self.package_2_class_list_html]
+ return_dict, mock_open = MockOpenForFunction(
+ self.parser.GetPackageNameToEmmaFileDict, read_values)
+
+ self.assertDictEqual(return_dict, expected_dict)
+ self.assertEqual(mock_open.call_count, 3)
+ calls = [mock.call('fake/dir/index.html'),
+ mock.call('fake/dir/_files/1.html'),
+ mock.call('fake/dir/_files/0.html')]
+ mock_open.assert_has_calls(calls)
+
+ def testGetPackageNameToEmmaFileDict_noPackageElements(self):
+ self.parser._FindElements = mock.Mock(return_value=[])
+ return_dict = self.parser.GetPackageNameToEmmaFileDict()
+ self.assertDictEqual({}, return_dict)
+
+ def testGetLineCoverage_status_basic(self):
+ line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
+ self.assertEqual(line_coverage[0].covered_status,
+ emma_coverage_stats.COVERED)
+
+ def testGetLineCoverage_status_statusMissing(self):
+ line_coverage = self.GetLineCoverageWithFakeElements(
+ [self.not_executable_tr_html])
+ self.assertEqual(line_coverage[0].covered_status,
+ emma_coverage_stats.NOT_EXECUTABLE)
+
+ def testGetLineCoverage_fractionalCoverage_basic(self):
+ line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
+ self.assertEqual(line_coverage[0].fractional_line_coverage, 1.0)
+
+ def testGetLineCoverage_fractionalCoverage_partial(self):
+ line_coverage = self.GetLineCoverageWithFakeElements(
+ [self.partially_covered_tr_html])
+ self.assertEqual(line_coverage[0].fractional_line_coverage, 0.78)
+
+ def testGetLineCoverage_lineno_basic(self):
+ line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
+ self.assertEqual(line_coverage[0].lineno, 110)
+
+ def testGetLineCoverage_lineno_withAlternativeHtml(self):
+ line_coverage = self.GetLineCoverageWithFakeElements(
+ [self.tr_with_extra_a_tag])
+ self.assertEqual(line_coverage[0].lineno, 54)
+
+ def testGetLineCoverage_source(self):
+ self.parser._FindElements = mock.Mock(
+ return_value=[ElementTree.fromstring(self.covered_tr_html)])
+ line_coverage = self.parser.GetLineCoverage('fake_path')
+ self.assertEqual(line_coverage[0].source,
+ ' if (mSelectors.get(index) != null) {')
+
+ def testGetLineCoverage_multipleElements(self):
+ line_coverage = self.GetLineCoverageWithFakeElements(
+ [self.covered_tr_html, self.partially_covered_tr_html,
+ self.tr_with_extra_a_tag])
+ self.assertEqual(len(line_coverage), 3)
+
+ def GetLineCoverageWithFakeElements(self, html_elements):
+ """Wraps GetLineCoverage so mock HTML can easily be used.
+
+ Args:
+ html_elements: List of strings each representing an HTML element.
+
+ Returns:
+ A list of LineCoverage objects.
+ """
+ elements = [ElementTree.fromstring(string) for string in html_elements]
+ with mock.patch('emma_coverage_stats._EmmaHtmlParser._FindElements',
+ return_value=elements):
+ return self.parser.GetLineCoverage('fake_path')
+
+
+class _EmmaCoverageStatsTest(unittest.TestCase):
+ """Tests for _EmmaCoverageStats."""
+
+ def setUp(self):
+ self.good_source_to_emma = {
+ '/path/to/1/File1.java': '/emma/1.html',
+ '/path/2/File2.java': '/emma/2.html',
+ '/path/2/File3.java': '/emma/3.html'
+ }
+ self.line_coverage = [
+ emma_coverage_stats.LineCoverage(
+ 1, '', emma_coverage_stats.COVERED, 1.0),
+ emma_coverage_stats.LineCoverage(
+ 2, '', emma_coverage_stats.COVERED, 1.0),
+ emma_coverage_stats.LineCoverage(
+ 3, '', emma_coverage_stats.NOT_EXECUTABLE, 1.0),
+ emma_coverage_stats.LineCoverage(
+ 4, '', emma_coverage_stats.NOT_COVERED, 1.0),
+ emma_coverage_stats.LineCoverage(
+ 5, '', emma_coverage_stats.PARTIALLY_COVERED, 0.85),
+ emma_coverage_stats.LineCoverage(
+ 6, '', emma_coverage_stats.PARTIALLY_COVERED, 0.20)
+ ]
+ self.lines_for_coverage = [1, 3, 5, 6]
+ with mock.patch('emma_coverage_stats._EmmaHtmlParser._FindElements',
+ return_value=[]):
+ self.simple_coverage = emma_coverage_stats._EmmaCoverageStats(
+ 'fake_dir', {})
+
+ def testInit(self):
+ coverage_stats = self.simple_coverage
+ self.assertIsInstance(coverage_stats._emma_parser,
+ emma_coverage_stats._EmmaHtmlParser)
+ self.assertIsInstance(coverage_stats._source_to_emma, dict)
+
+ def testNeedsCoverage_withExistingJavaFile(self):
+ test_file = '/path/to/file/File.java'
+ with mock.patch('os.path.exists', return_value=True):
+ self.assertTrue(
+ emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
+
+ def testNeedsCoverage_withNonJavaFile(self):
+ test_file = '/path/to/file/File.c'
+ with mock.patch('os.path.exists', return_value=True):
+ self.assertFalse(
+ emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
+
+ def testNeedsCoverage_fileDoesNotExist(self):
+ test_file = '/path/to/file/File.java'
+ with mock.patch('os.path.exists', return_value=False):
+ self.assertFalse(
+ emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
+
+ def testGetPackageNameFromFile_basic(self):
+ test_file_text = """// Test Copyright
+ package org.chromium.chrome.browser;
+ import android.graphics.RectF;"""
+ result_package, _ = MockOpenForFunction(
+ emma_coverage_stats._EmmaCoverageStats.GetPackageNameFromFile,
+ [test_file_text], file_path='/path/to/file/File.java')
+ self.assertEqual(result_package, 'org.chromium.chrome.browser.File.java')
+
+ def testGetPackageNameFromFile_noPackageStatement(self):
+ result_package, _ = MockOpenForFunction(
+ emma_coverage_stats._EmmaCoverageStats.GetPackageNameFromFile,
+ ['not a package statement'], file_path='/path/to/file/File.java')
+ self.assertIsNone(result_package)
+
+ def testGetSummaryStatsForLines_basic(self):
+ covered, total = self.simple_coverage.GetSummaryStatsForLines(
+ self.line_coverage)
+ self.assertEqual(covered, 3.05)
+ self.assertEqual(total, 5)
+
+ def testGetSourceFileToEmmaFileDict(self):
+ package_names = {
+ '/path/to/1/File1.java': 'org.fake.one.File1.java',
+ '/path/2/File2.java': 'org.fake.File2.java',
+ '/path/2/File3.java': 'org.fake.File3.java'
+ }
+ package_to_emma = {
+ 'org.fake.one.File1.java': '/emma/1.html',
+ 'org.fake.File2.java': '/emma/2.html',
+ 'org.fake.File3.java': '/emma/3.html'
+ }
+ with mock.patch('os.path.exists', return_value=True):
+ coverage_stats = self.simple_coverage
+ coverage_stats._emma_parser.GetPackageNameToEmmaFileDict = mock.MagicMock(
+ return_value=package_to_emma)
+ coverage_stats.GetPackageNameFromFile = lambda x: package_names[x]
+ result_dict = coverage_stats._GetSourceFileToEmmaFileDict(
+ package_names.keys())
+ self.assertDictEqual(result_dict, self.good_source_to_emma)
+
+ def testGetCoverageDictForFile(self):
+ line_coverage = self.line_coverage
+ self.simple_coverage._emma_parser.GetLineCoverage = lambda x: line_coverage
+ self.simple_coverage._source_to_emma = {'/fake/src': 'fake/emma'}
+ lines = self.lines_for_coverage
+ expected_dict = {
+ 'absolute': {
+ 'covered': 3.05,
+ 'total': 5
+ },
+ 'incremental': {
+ 'covered': 2.05,
+ 'total': 3
+ },
+ 'source': [
+ {
+ 'line': line_coverage[0].source,
+ 'coverage': line_coverage[0].covered_status,
+ 'changed': True,
+ 'fractional_coverage': line_coverage[0].fractional_line_coverage,
+ },
+ {
+ 'line': line_coverage[1].source,
+ 'coverage': line_coverage[1].covered_status,
+ 'changed': False,
+ 'fractional_coverage': line_coverage[1].fractional_line_coverage,
+ },
+ {
+ 'line': line_coverage[2].source,
+ 'coverage': line_coverage[2].covered_status,
+ 'changed': True,
+ 'fractional_coverage': line_coverage[2].fractional_line_coverage,
+ },
+ {
+ 'line': line_coverage[3].source,
+ 'coverage': line_coverage[3].covered_status,
+ 'changed': False,
+ 'fractional_coverage': line_coverage[3].fractional_line_coverage,
+ },
+ {
+ 'line': line_coverage[4].source,
+ 'coverage': line_coverage[4].covered_status,
+ 'changed': True,
+ 'fractional_coverage': line_coverage[4].fractional_line_coverage,
+ },
+ {
+ 'line': line_coverage[5].source,
+ 'coverage': line_coverage[5].covered_status,
+ 'changed': True,
+ 'fractional_coverage': line_coverage[5].fractional_line_coverage,
+ }
+ ]
+ }
+ result_dict = self.simple_coverage.GetCoverageDictForFile(
+ '/fake/src', lines)
+ self.assertDictEqual(result_dict, expected_dict)
+
+ def testGetCoverageDictForFile_emptyCoverage(self):
+ expected_dict = {
+ 'absolute': {'covered': 0, 'total': 0},
+ 'incremental': {'covered': 0, 'total': 0},
+ 'source': []
+ }
+ self.simple_coverage._emma_parser.GetLineCoverage = lambda x: []
+ self.simple_coverage._source_to_emma = {'fake_dir': 'fake/emma'}
+ result_dict = self.simple_coverage.GetCoverageDictForFile('fake_dir', {})
+ self.assertDictEqual(result_dict, expected_dict)
+
+ def testGetCoverageDictForFile_missingCoverage(self):
+ self.simple_coverage._source_to_emma = {}
+ result_dict = self.simple_coverage.GetCoverageDictForFile('fake_file', {})
+ self.assertIsNone(result_dict)
+
+ def testGetCoverageDict_basic(self):
+ files_for_coverage = {
+ '/path/to/1/File1.java': [1, 3, 4],
+ '/path/2/File2.java': [1, 2]
+ }
+ self.simple_coverage._source_to_emma = {
+ '/path/to/1/File1.java': 'emma_1',
+ '/path/2/File2.java': 'emma_2'
+ }
+ coverage_info = {
+ 'emma_1': [
+ emma_coverage_stats.LineCoverage(
+ 1, '', emma_coverage_stats.COVERED, 1.0),
+ emma_coverage_stats.LineCoverage(
+ 2, '', emma_coverage_stats.PARTIALLY_COVERED, 0.5),
+ emma_coverage_stats.LineCoverage(
+ 3, '', emma_coverage_stats.NOT_EXECUTABLE, 1.0),
+ emma_coverage_stats.LineCoverage(
+ 4, '', emma_coverage_stats.COVERED, 1.0)
+ ],
+ 'emma_2': [
+ emma_coverage_stats.LineCoverage(
+ 1, '', emma_coverage_stats.NOT_COVERED, 1.0),
+ emma_coverage_stats.LineCoverage(
+ 2, '', emma_coverage_stats.COVERED, 1.0)
+ ]
+ }
+ expected_dict = {
+ 'files': {
+ '/path/2/File2.java': {
+ 'absolute': {'covered': 1, 'total': 2},
+ 'incremental': {'covered': 1, 'total': 2},
+ 'source': [{'changed': True, 'coverage': 0,
+ 'line': '', 'fractional_coverage': 1.0},
+ {'changed': True, 'coverage': 1,
+ 'line': '', 'fractional_coverage': 1.0}]
+ },
+ '/path/to/1/File1.java': {
+ 'absolute': {'covered': 2.5, 'total': 3},
+ 'incremental': {'covered': 2, 'total': 2},
+ 'source': [{'changed': True, 'coverage': 1,
+ 'line': '', 'fractional_coverage': 1.0},
+ {'changed': False, 'coverage': 2,
+ 'line': '', 'fractional_coverage': 0.5},
+ {'changed': True, 'coverage': -1,
+ 'line': '', 'fractional_coverage': 1.0},
+ {'changed': True, 'coverage': 1,
+ 'line': '', 'fractional_coverage': 1.0}]
+ }
+ },
+ 'patch': {'incremental': {'covered': 3, 'total': 4}}
+ }
+ # Return the relevant coverage info for each file.
+ self.simple_coverage._emma_parser.GetLineCoverage = (
+ lambda x: coverage_info[x])
+ result_dict = self.simple_coverage.GetCoverageDict(files_for_coverage)
+ self.assertDictEqual(result_dict, expected_dict)
+
+ def testGetCoverageDict_noCoverage(self):
+ result_dict = self.simple_coverage.GetCoverageDict({})
+ self.assertDictEqual(result_dict, EMPTY_COVERAGE_STATS_DICT)
+
+
+class EmmaCoverageStatsGenerateCoverageReport(unittest.TestCase):
+ """Tests for GenerateCoverageReport."""
+
+ def testGenerateCoverageReport_missingJsonFile(self):
+ with self.assertRaises(IOError):
+ with mock.patch('os.path.exists', return_value=False):
+ emma_coverage_stats.GenerateCoverageReport('', '', '')
+
+ def testGenerateCoverageReport_invalidJsonFile(self):
+ with self.assertRaises(ValueError):
+ with mock.patch('os.path.exists', return_value=True):
+ MockOpenForFunction(emma_coverage_stats.GenerateCoverageReport, [''],
+ line_coverage_file='', out_file_path='',
+ coverage_dir='')
+
+
+def MockOpenForFunction(func, side_effects, **kwargs):
+ """Allows easy mock open and read for callables that open multiple files.
+
+ Will mock the python open function in a way such that each time read() is
+ called on an open file, the next element in |side_effects| is returned. This
+ makes it easier to test functions that call open() multiple times.
+
+ Args:
+ func: The callable to invoke once mock files are setup.
+ side_effects: A list of return values for each file to return once read.
+ Length of list should be equal to the number calls to open in |func|.
+ **kwargs: Keyword arguments to be passed to |func|.
+
+ Returns:
+ A tuple containing the return value of |func| and the MagicMock object used
+ to mock all calls to open respectively.
+ """
+ mock_open = mock.mock_open()
+ mock_open.side_effect = [mock.mock_open(read_data=side_effect).return_value
+ for side_effect in side_effects]
+ with mock.patch('__builtin__.open', mock_open):
+ return func(**kwargs), mock_open
+
+
+if __name__ == '__main__':
+ # Suppress logging messages.
+ unittest.main(buffer=True)
diff --git a/deps/v8/build/android/empty/.keep b/deps/v8/build/android/empty/.keep
new file mode 100644
index 0000000000..1fd038b8cf
--- /dev/null
+++ b/deps/v8/build/android/empty/.keep
@@ -0,0 +1,2 @@
+# This empty res folder can be passed to aapt while building Java libraries or
+# APKs that don't have any resources.
diff --git a/deps/v8/build/android/empty_proguard.flags b/deps/v8/build/android/empty_proguard.flags
new file mode 100644
index 0000000000..53484fe815
--- /dev/null
+++ b/deps/v8/build/android/empty_proguard.flags
@@ -0,0 +1 @@
+# Used for apk targets that do not need proguard. See build/java_apk.gypi.
diff --git a/deps/v8/build/android/envsetup.sh b/deps/v8/build/android/envsetup.sh
new file mode 100755
index 0000000000..49041a4e70
--- /dev/null
+++ b/deps/v8/build/android/envsetup.sh
@@ -0,0 +1,29 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Adds Android SDK tools and related helpers to PATH, useful for development.
+# Not used on bots, nor required for any commands to succeed.
+# Use like: source build/android/envsetup.sh
+
+# Make sure we're being sourced.
+if [[ -n "$BASH_VERSION" && "${BASH_SOURCE:-$0}" == "$0" ]]; then
+ echo "ERROR: envsetup must be sourced."
+ exit 1
+fi
+
+# This only exists to set local variables. Don't call this manually.
+android_envsetup_main() {
+ local SCRIPT_PATH="$1"
+ local SCRIPT_DIR="$(dirname "$SCRIPT_PATH")"
+ local CHROME_SRC="$(readlink -f "${SCRIPT_DIR}/../../")"
+ local ANDROID_SDK_ROOT="${CHROME_SRC}/third_party/android_sdk/public"
+
+ export PATH=$PATH:${ANDROID_SDK_ROOT}/platform-tools
+ export PATH=$PATH:${ANDROID_SDK_ROOT}/tools/
+ export PATH=$PATH:${CHROME_SRC}/build/android
+}
+# In zsh, $0 is the name of the file being sourced.
+android_envsetup_main "${BASH_SOURCE:-$0}"
+unset -f android_envsetup_main
diff --git a/deps/v8/build/android/generate_emma_html.py b/deps/v8/build/android/generate_emma_html.py
new file mode 100755
index 0000000000..dab3992c45
--- /dev/null
+++ b/deps/v8/build/android/generate_emma_html.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Aggregates EMMA coverage files to produce html output."""
+
+import fnmatch
+import json
+import optparse
+import os
+import sys
+
+import devil_chromium
+from devil.utils import cmd_helper
+from pylib import constants
+from pylib.constants import host_paths
+
+
+def _GetFilesWithExt(root_dir, ext):
+ """Gets all files with a given extension.
+
+ Args:
+ root_dir: Directory in which to search for files.
+ ext: Extension to look for (including dot)
+
+ Returns:
+ A list of absolute paths to files that match.
+ """
+ files = []
+ for root, _, filenames in os.walk(root_dir):
+ basenames = fnmatch.filter(filenames, '*.' + ext)
+ files.extend([os.path.join(root, basename)
+ for basename in basenames])
+
+ return files
+
+
+def main():
+ option_parser = optparse.OptionParser()
+ option_parser.add_option('--output', help='HTML output filename.')
+ option_parser.add_option('--coverage-dir', default=None,
+ help=('Root of the directory in which to search for '
+ 'coverage data (.ec) files.'))
+ option_parser.add_option('--metadata-dir', default=None,
+ help=('Root of the directory in which to search for '
+ 'coverage metadata (.em) files.'))
+ option_parser.add_option('--cleanup', action='store_true',
+ help=('If set, removes coverage files generated at '
+ 'runtime.'))
+ options, _ = option_parser.parse_args()
+
+ devil_chromium.Initialize()
+
+ if not (options.coverage_dir and options.metadata_dir and options.output):
+ option_parser.error('One or more mandatory options are missing.')
+
+ coverage_files = _GetFilesWithExt(options.coverage_dir, 'ec')
+ metadata_files = _GetFilesWithExt(options.metadata_dir, 'em')
+ # Filter out zero-length files. These are created by emma_instr.py when a
+ # target has no classes matching the coverage filter.
+ metadata_files = [f for f in metadata_files if os.path.getsize(f)]
+ print 'Found coverage files: %s' % str(coverage_files)
+ print 'Found metadata files: %s' % str(metadata_files)
+
+ sources = []
+ for f in metadata_files:
+ sources_file = os.path.splitext(f)[0] + '_sources.txt'
+ with open(sources_file, 'r') as sf:
+ sources.extend(json.load(sf))
+
+ # Source paths should be passed to EMMA in a way that the relative file paths
+ # reflect the class package name.
+ PARTIAL_PACKAGE_NAMES = ['com/google', 'org/chromium', 'com/chrome']
+ fixed_source_paths = set()
+
+ for path in sources:
+ for partial in PARTIAL_PACKAGE_NAMES:
+ if partial in path:
+ fixed_path = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, path[:path.index(partial)])
+ fixed_source_paths.add(fixed_path)
+ break
+
+ sources = list(fixed_source_paths)
+
+ input_args = []
+ for f in coverage_files + metadata_files:
+ input_args.append('-in')
+ input_args.append(f)
+
+ output_args = ['-Dreport.html.out.file', options.output,
+ '-Dreport.html.out.encoding', 'UTF-8']
+ source_args = ['-sp', ','.join(sources)]
+
+ exit_code = cmd_helper.RunCmd(
+ ['java', '-cp',
+ os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'lib', 'emma.jar'),
+ 'emma', 'report', '-r', 'html']
+ + input_args + output_args + source_args)
+
+ if options.cleanup:
+ for f in coverage_files:
+ os.remove(f)
+
+ # Command tends to exit with status 0 when it actually failed.
+ if not exit_code and not os.path.exists(options.output):
+ exit_code = 1
+
+ return exit_code
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gradle/AndroidManifest.xml b/deps/v8/build/android/gradle/AndroidManifest.xml
new file mode 100644
index 0000000000..f3e50e0c93
--- /dev/null
+++ b/deps/v8/build/android/gradle/AndroidManifest.xml
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ Copyright 2018 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+-->
+
+<!--
+ This is a dummy manifest which is required by Android Studio's _all target.
+ No <uses-sdk> is allowed due to https://crbug.com/841529.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="org.dummy">
+</manifest>
diff --git a/deps/v8/build/android/gradle/OWNERS b/deps/v8/build/android/gradle/OWNERS
new file mode 100644
index 0000000000..d1f94845f4
--- /dev/null
+++ b/deps/v8/build/android/gradle/OWNERS
@@ -0,0 +1,4 @@
+agrieve@chromium.org
+wnwen@chromium.org
+
+# COMPONENT: Build
diff --git a/deps/v8/build/android/gradle/android.jinja b/deps/v8/build/android/gradle/android.jinja
new file mode 100644
index 0000000000..4a7da2961e
--- /dev/null
+++ b/deps/v8/build/android/gradle/android.jinja
@@ -0,0 +1,114 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+{% macro expand_sourceset(variables, prefix) %}
+{% if variables is defined %}
+ {{ prefix }} {
+{% if variables.android_manifest is defined %}
+ manifest.srcFile "{{ variables.android_manifest }}"
+{% endif %}
+{% if variables.java_dirs is defined %}
+ java.srcDirs = [
+{% for path in variables.java_dirs %}
+ "{{ path }}",
+{% endfor %}
+ ]
+{% endif %}
+{% if variables.java_excludes is defined %}
+ java.filter.exclude(
+{% for path in variables.java_excludes %}
+ "{{ path }}",
+{% endfor %}
+ )
+{% endif %}
+{% if variables.jni_libs is defined %}
+ jniLibs.srcDirs = [
+{% for path in variables.jni_libs %}
+ "{{ path }}",
+{% endfor %}
+ ]
+{% endif %}
+{% if variables.res_dirs is defined %}
+ res.srcDirs = [
+{% for path in variables.res_dirs %}
+ "{{ path }}",
+{% endfor %}
+ ]
+{% endif %}
+ }
+{% endif %}
+{% endmacro %}
+// Generated by //build/android/generate_gradle.py
+
+{% if template_type in ('android_library', 'android_junit') %}
+apply plugin: "com.android.library"
+{% elif template_type == 'android_apk' %}
+apply plugin: "com.android.application"
+{% endif %}
+
+android {
+ compileSdkVersion "{{ compile_sdk_version }}"
+
+ defaultConfig {
+ vectorDrawables.useSupportLibrary = true
+ minSdkVersion 19
+ targetSdkVersion {{ target_sdk_version }}
+ }
+
+ compileOptions {
+ sourceCompatibility JavaVersion.VERSION_1_8
+ targetCompatibility JavaVersion.VERSION_1_8
+ }
+
+{% if native is defined %}
+ externalNativeBuild {
+ cmake {
+ path "CMakeLists.txt"
+ }
+ }
+{% endif %}
+
+ sourceSets {
+{% for name in ['main', 'test', 'androidTest', 'debug', 'release'] %}
+ {{ name }} {
+ aidl.srcDirs = []
+ assets.srcDirs = []
+ java.srcDirs = []
+ jni.srcDirs = []
+ renderscript.srcDirs = []
+ res.srcDirs = []
+ resources.srcDirs = []
+ }
+{% endfor %}
+
+{{ expand_sourceset(main, 'main') }}
+{{ expand_sourceset(test, 'test') }}
+{% if android_test is defined %}
+{% for t in android_test %}
+{{ expand_sourceset(t, 'androidTest') }}
+{% endfor %}
+{% endif %}
+ }
+}
+
+{% include 'dependencies.jinja' %}
+
+afterEvaluate {
+ def tasksToDisable = tasks.findAll {
+ return (it.name.equals('generateDebugSources') // causes unwanted AndroidManifest.java
+ || it.name.equals('generateReleaseSources')
+ || it.name.endsWith('BuildConfig') // causes unwanted BuildConfig.java
+ || it.name.equals('preDebugAndroidTestBuild')
+{% if not use_gradle_process_resources %}
+ || it.name.endsWith('Assets')
+ || it.name.endsWith('Resources')
+ || it.name.endsWith('ResValues')
+{% endif %}
+ || it.name.endsWith('Aidl')
+ || it.name.endsWith('Renderscript')
+ || it.name.endsWith('Shaders'))
+ }
+ tasksToDisable.each { Task task ->
+ task.enabled = false
+ }
+}
diff --git a/deps/v8/build/android/gradle/cmake.jinja b/deps/v8/build/android/gradle/cmake.jinja
new file mode 100644
index 0000000000..996a5218f6
--- /dev/null
+++ b/deps/v8/build/android/gradle/cmake.jinja
@@ -0,0 +1,26 @@
+{# Copyright 2018 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+# Generated by //build/android/generate_gradle.py
+
+cmake_minimum_required(VERSION 3.4.1)
+
+project(chrome C CXX)
+
+{% if native.includes is defined %}
+include_directories(
+{% for path in native.includes %}
+ {{ path }}
+{% endfor %}
+)
+{% endif %}
+
+{% for name, target in native.targets.iteritems() %}
+{% if target.sources is defined %}
+add_library("{{ name }}"
+{% for path in target.sources %}
+ {{ path }}
+{% endfor %}
+)
+{% endif %}
+{% endfor %}
diff --git a/deps/v8/build/android/gradle/dependencies.jinja b/deps/v8/build/android/gradle/dependencies.jinja
new file mode 100644
index 0000000000..87bc312853
--- /dev/null
+++ b/deps/v8/build/android/gradle/dependencies.jinja
@@ -0,0 +1,28 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+{% macro expand_deps(variables, prefix) %}
+{% if variables is defined %}
+{% if variables.prebuilts is defined %}
+{% for path in variables.prebuilts %}
+ {{ prefix }} files("{{ path }}")
+{% endfor %}
+{% endif %}
+{% if variables.java_project_deps is defined %}
+{% for proj in variables.java_project_deps %}
+ {{ prefix }} project(":{{ proj }}")
+{% endfor %}
+{% endif %}
+{% if variables.android_project_deps is defined %}
+{% for proj in variables.android_project_deps %}
+ {{ prefix }} project(path: ":{{ proj }}")
+{% endfor %}
+{% endif %}
+{% endif %}
+{% endmacro %}
+
+dependencies {
+{{ expand_deps(main, 'implementation') }}
+{{ expand_deps(test, 'testImplementation') }}
+{{ expand_deps(android_test, 'androidTestImplementation') }}
+}
diff --git a/deps/v8/build/android/gradle/generate_gradle.py b/deps/v8/build/android/gradle/generate_gradle.py
new file mode 100755
index 0000000000..f2bcec5e80
--- /dev/null
+++ b/deps/v8/build/android/gradle/generate_gradle.py
@@ -0,0 +1,974 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates an Android Studio project from a GN target."""
+
+import argparse
+import codecs
+import collections
+import glob
+import json
+import logging
+import os
+import re
+import shutil
+import subprocess
+import sys
+import zipfile
+
+_BUILD_ANDROID = os.path.join(os.path.dirname(__file__), os.pardir)
+sys.path.append(_BUILD_ANDROID)
+import devil_chromium
+from devil.utils import run_tests_helper
+from pylib import constants
+from pylib.constants import host_paths
+
+sys.path.append(os.path.join(_BUILD_ANDROID, 'gyp'))
+import jinja_template
+from util import build_utils
+
+_DEPOT_TOOLS_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party',
+ 'depot_tools')
+_DEFAULT_ANDROID_MANIFEST_PATH = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'gradle',
+ 'AndroidManifest.xml')
+_FILE_DIR = os.path.dirname(__file__)
+_SRCJARS_SUBDIR = 'extracted-srcjars'
+_JNI_LIBS_SUBDIR = 'symlinked-libs'
+_ARMEABI_SUBDIR = 'armeabi'
+_RES_SUBDIR = 'extracted-res'
+_GRADLE_BUILD_FILE = 'build.gradle'
+_CMAKE_FILE = 'CMakeLists.txt'
+# This needs to come first alphabetically among all modules.
+_MODULE_ALL = '_all'
+_SRC_INTERNAL = os.path.join(
+ os.path.dirname(host_paths.DIR_SOURCE_ROOT), 'src-internal')
+
+_DEFAULT_TARGETS = [
+ '//android_webview/test/embedded_test_server:aw_net_test_support_apk',
+ '//android_webview/test:webview_instrumentation_apk',
+ '//android_webview/test:webview_instrumentation_test_apk',
+ '//base:base_junit_tests',
+ '//chrome/android:chrome_junit_tests',
+ '//chrome/android:chrome_public_apk',
+ '//chrome/android:chrome_public_test_apk',
+ '//content/public/android:content_junit_tests',
+ '//content/shell/android:content_shell_apk',
+ # Below must be included even with --all since they are libraries.
+ '//base/android/jni_generator:jni_processor',
+ '//tools/android/errorprone_plugin:errorprone_plugin_java',
+]
+
+_EXCLUDED_PREBUILT_JARS = [
+ # Android Studio already provides Desugar runtime.
+ # Including it would cause linking error because of a duplicate class.
+ 'lib.java/third_party/bazel/desugar/Desugar-runtime.jar'
+]
+
+
+def _TemplatePath(name):
+ return os.path.join(_FILE_DIR, '{}.jinja'.format(name))
+
+
+def _RebasePath(path_or_list, new_cwd=None, old_cwd=None):
+ """Makes the given path(s) relative to new_cwd, or absolute if not specified.
+
+ If new_cwd is not specified, absolute paths are returned.
+ If old_cwd is not specified, constants.GetOutDirectory() is assumed.
+ """
+ if path_or_list is None:
+ return []
+ if not isinstance(path_or_list, basestring):
+ return [_RebasePath(p, new_cwd, old_cwd) for p in path_or_list]
+ if old_cwd is None:
+ old_cwd = constants.GetOutDirectory()
+ old_cwd = os.path.abspath(old_cwd)
+ if new_cwd:
+ new_cwd = os.path.abspath(new_cwd)
+ return os.path.relpath(os.path.join(old_cwd, path_or_list), new_cwd)
+ return os.path.abspath(os.path.join(old_cwd, path_or_list))
+
+
+def _IsSubpathOf(child, parent):
+ """Returns whether |child| is a subpath of |parent|."""
+ return not os.path.relpath(child, parent).startswith(os.pardir)
+
+
+def _WriteFile(path, data):
+ """Writes |data| to |path|, constucting parent directories if necessary."""
+ logging.info('Writing %s', path)
+ dirname = os.path.dirname(path)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+ with codecs.open(path, 'w', 'utf-8') as output_file:
+ output_file.write(data)
+
+
+def _ReadPropertiesFile(path):
+ with open(path) as f:
+ return dict(l.rstrip().split('=', 1) for l in f if '=' in l)
+
+
+def _RunGnGen(output_dir, args=None):
+ cmd = [
+ os.path.join(_DEPOT_TOOLS_PATH, 'gn'),
+ 'gen',
+ output_dir,
+ ]
+ if args:
+ cmd.extend(args)
+ logging.info('Running: %r', cmd)
+ subprocess.check_call(cmd)
+
+
+def _RunNinja(output_dir, args, j):
+ cmd = [
+ os.path.join(_DEPOT_TOOLS_PATH, 'ninja'),
+ '-C',
+ output_dir,
+ '-j{}'.format(j),
+ ]
+ cmd.extend(args)
+ logging.info('Running: %r', cmd)
+ subprocess.check_call(cmd)
+
+
+def _QueryForAllGnTargets(output_dir):
+ # Query ninja rather than GN since it's faster.
+ cmd = [
+ os.path.join(_DEPOT_TOOLS_PATH, 'ninja'),
+ '-C',
+ output_dir,
+ '-t',
+ 'targets',
+ ]
+ logging.info('Running: %r', cmd)
+ ninja_output = build_utils.CheckOutput(cmd)
+ ret = []
+ SUFFIX_LEN = len('__build_config_crbug_908819')
+ for line in ninja_output.splitlines():
+ ninja_target = line.rsplit(':', 1)[0]
+ # Ignore root aliases by ensure a : exists.
+ if ':' in ninja_target and ninja_target.endswith(
+ '__build_config_crbug_908819'):
+ ret.append('//' + ninja_target[:-SUFFIX_LEN])
+ return ret
+
+
+class _ProjectEntry(object):
+ """Helper class for project entries."""
+
+ _cached_entries = {}
+
+ def __init__(self, gn_target):
+ # Use _ProjectEntry.FromGnTarget instead for caching.
+ self._gn_target = gn_target
+ self._build_config = None
+ self._java_files = None
+ self._all_entries = None
+ self.android_test_entries = []
+
+ @classmethod
+ def FromGnTarget(cls, gn_target):
+ assert gn_target.startswith('//'), gn_target
+ if ':' not in gn_target:
+ gn_target = '%s:%s' % (gn_target, os.path.basename(gn_target))
+ if gn_target not in cls._cached_entries:
+ cls._cached_entries[gn_target] = cls(gn_target)
+ return cls._cached_entries[gn_target]
+
+ @classmethod
+ def FromBuildConfigPath(cls, path):
+ prefix = 'gen/'
+ suffix = '.build_config'
+ assert path.startswith(prefix) and path.endswith(suffix), path
+ subdir = path[len(prefix):-len(suffix)]
+ gn_target = '//%s:%s' % (os.path.split(subdir))
+ return cls.FromGnTarget(gn_target)
+
+ def __hash__(self):
+ return hash(self._gn_target)
+
+ def __eq__(self, other):
+ return self._gn_target == other.GnTarget()
+
+ def GnTarget(self):
+ return self._gn_target
+
+ def NinjaTarget(self):
+ return self._gn_target[2:]
+
+ def GnBuildConfigTarget(self):
+ return '%s__build_config_crbug_908819' % self._gn_target
+
+ def NinjaBuildConfigTarget(self):
+ return '%s__build_config_crbug_908819' % self.NinjaTarget()
+
+ def GradleSubdir(self):
+ """Returns the output subdirectory."""
+ ninja_target = self.NinjaTarget()
+ # Support targets at the root level. e.g. //:foo
+ if ninja_target[0] == ':':
+ ninja_target = ninja_target[1:]
+ return ninja_target.replace(':', os.path.sep)
+
+ def ProjectName(self):
+ """Returns the Gradle project name."""
+ return self.GradleSubdir().replace(os.path.sep, '.')
+
+ def BuildConfig(self):
+ """Reads and returns the project's .build_config JSON."""
+ if not self._build_config:
+ path = os.path.join('gen', self.GradleSubdir() + '.build_config')
+ with open(_RebasePath(path)) as jsonfile:
+ self._build_config = json.load(jsonfile)
+ return self._build_config
+
+ def DepsInfo(self):
+ return self.BuildConfig()['deps_info']
+
+ def Gradle(self):
+ return self.BuildConfig()['gradle']
+
+ def Javac(self):
+ return self.BuildConfig()['javac']
+
+ def GetType(self):
+ """Returns the target type from its .build_config."""
+ return self.DepsInfo()['type']
+
+ def IsValid(self):
+ return self.GetType() in (
+ 'android_apk',
+ 'java_library',
+ "java_annotation_processor",
+ 'java_binary',
+ 'junit_binary',
+ )
+
+ def ResZips(self):
+ return self.DepsInfo().get('owned_resources_zips', [])
+
+ def ResDirs(self):
+ return self.DepsInfo().get('owned_resources_dirs', [])
+
+ def JavaFiles(self):
+ if self._java_files is None:
+ java_sources_file = self.DepsInfo().get('java_sources_file')
+ java_files = []
+ if java_sources_file:
+ java_sources_file = _RebasePath(java_sources_file)
+ java_files = build_utils.ReadSourcesList(java_sources_file)
+ self._java_files = java_files
+ return self._java_files
+
+ def GeneratedJavaFiles(self):
+ return [p for p in self.JavaFiles() if not p.startswith('..')]
+
+ def PrebuiltJars(self):
+ all_jars = self.Gradle().get('dependent_prebuilt_jars', [])
+ return [i for i in all_jars if i not in _EXCLUDED_PREBUILT_JARS]
+
+ def AllEntries(self):
+ """Returns a list of all entries that the current entry depends on.
+
+ This includes the entry itself to make iterating simpler."""
+ if self._all_entries is None:
+ logging.debug('Generating entries for %s', self.GnTarget())
+ deps = [_ProjectEntry.FromBuildConfigPath(p)
+ for p in self.Gradle()['dependent_android_projects']]
+ deps.extend(_ProjectEntry.FromBuildConfigPath(p)
+ for p in self.Gradle()['dependent_java_projects'])
+ all_entries = set()
+ for dep in deps:
+ all_entries.update(dep.AllEntries())
+ all_entries.add(self)
+ self._all_entries = list(all_entries)
+ return self._all_entries
+
+
+class _ProjectContextGenerator(object):
+ """Helper class to generate gradle build files"""
+ def __init__(self, project_dir, build_vars, use_gradle_process_resources,
+ jinja_processor, split_projects, channel):
+ self.project_dir = project_dir
+ self.build_vars = build_vars
+ self.use_gradle_process_resources = use_gradle_process_resources
+ self.jinja_processor = jinja_processor
+ self.split_projects = split_projects
+ self.channel = channel
+ self.processed_java_dirs = set()
+ self.processed_prebuilts = set()
+ self.processed_res_dirs = set()
+
+ def _GenJniLibs(self, root_entry):
+ libraries = []
+ for entry in self._GetEntries(root_entry):
+ libraries += entry.BuildConfig().get('native', {}).get('libraries', [])
+ if libraries:
+ return _CreateJniLibsDir(constants.GetOutDirectory(),
+ self.EntryOutputDir(root_entry), libraries)
+ return []
+
+ def _GenJavaDirs(self, root_entry):
+ java_files = []
+ for entry in self._GetEntries(root_entry):
+ java_files += entry.JavaFiles()
+ java_dirs, excludes = _ComputeJavaSourceDirsAndExcludes(
+ constants.GetOutDirectory(), java_files)
+ return java_dirs, excludes
+
+ def _GenCustomManifest(self, entry):
+ """Returns the path to the generated AndroidManifest.xml.
+
+ Gradle uses package id from manifest when generating R.class. So, we need
+ to generate a custom manifest if we let gradle process resources. We cannot
+ simply set android.defaultConfig.applicationId because it is not supported
+ for library targets."""
+ resource_packages = entry.Javac().get('resource_packages')
+ if not resource_packages:
+ logging.debug('Target ' + entry.GnTarget() + ' includes resources from '
+ 'unknown package. Unable to process with gradle.')
+ return _DEFAULT_ANDROID_MANIFEST_PATH
+ elif len(resource_packages) > 1:
+ logging.debug('Target ' + entry.GnTarget() + ' includes resources from '
+ 'multiple packages. Unable to process with gradle.')
+ return _DEFAULT_ANDROID_MANIFEST_PATH
+
+ variables = {'package': resource_packages[0]}
+ data = self.jinja_processor.Render(_TemplatePath('manifest'), variables)
+ output_file = os.path.join(
+ self.EntryOutputDir(entry), 'AndroidManifest.xml')
+ _WriteFile(output_file, data)
+
+ return output_file
+
+ def _Relativize(self, entry, paths):
+ return _RebasePath(paths, self.EntryOutputDir(entry))
+
+ def _Srcjars(self, entry):
+ srcjars = _RebasePath(entry.Gradle().get('bundled_srcjars', []))
+ if not self.use_gradle_process_resources:
+ srcjars += _RebasePath(entry.DepsInfo().get('owned_resource_srcjars', []))
+ return srcjars
+
+ def _GetEntries(self, entry):
+ if self.split_projects:
+ return [entry]
+ return entry.AllEntries()
+
+ def EntryOutputDir(self, entry):
+ return os.path.join(self.project_dir, entry.GradleSubdir())
+
+ def AllSrcjars(self, root_entry):
+ srcjars = []
+ for entry in self._GetEntries(root_entry):
+ srcjars += self._Srcjars(entry)
+ return set(srcjars)
+
+ def AllResZips(self, root_entry):
+ res_zips = []
+ for entry in self._GetEntries(root_entry):
+ res_zips += entry.ResZips()
+ return set(_RebasePath(res_zips))
+
+ def GeneratedInputs(self, root_entry, fast=None):
+ generated_inputs = set()
+ if not fast:
+ generated_inputs.update(self.AllResZips(root_entry))
+ generated_inputs.update(self.AllSrcjars(root_entry))
+ for entry in self._GetEntries(root_entry):
+ generated_inputs.update(entry.GeneratedJavaFiles())
+ generated_inputs.update(entry.PrebuiltJars())
+ return generated_inputs
+
+ def GeneratedZips(self, root_entry, fast=None):
+ entry_output_dir = self.EntryOutputDir(root_entry)
+ tuples = []
+ if not fast:
+ tuples.extend((s, os.path.join(entry_output_dir, _SRCJARS_SUBDIR))
+ for s in self.AllSrcjars(root_entry))
+ tuples.extend((s, os.path.join(entry_output_dir, _RES_SUBDIR))
+ for s in self.AllResZips(root_entry))
+ return tuples
+
+ def GenerateManifest(self, root_entry):
+ android_manifest = root_entry.DepsInfo().get('android_manifest')
+ if not android_manifest:
+ android_manifest = self._GenCustomManifest(root_entry)
+ return self._Relativize(root_entry, android_manifest)
+
+ def Generate(self, root_entry):
+ # TODO(agrieve): Add an option to use interface jars and see if that speeds
+ # things up at all.
+ variables = {}
+ java_dirs, excludes = self._GenJavaDirs(root_entry)
+ java_dirs.append(
+ os.path.join(self.EntryOutputDir(root_entry), _SRCJARS_SUBDIR))
+ self.processed_java_dirs.update(java_dirs)
+ java_dirs.sort()
+ variables['java_dirs'] = self._Relativize(root_entry, java_dirs)
+ variables['java_excludes'] = excludes
+ variables['jni_libs'] = self._Relativize(
+ root_entry, set(self._GenJniLibs(root_entry)))
+ prebuilts = set(
+ p for e in self._GetEntries(root_entry) for p in e.PrebuiltJars())
+ self.processed_prebuilts.update(prebuilts)
+ variables['prebuilts'] = self._Relativize(root_entry, prebuilts)
+ res_dirs = set(
+ p for e in self._GetEntries(root_entry) for p in e.ResDirs())
+ # Do not add generated resources for the all module since it creates many
+ # duplicates, and currently resources are only used for editing.
+ self.processed_res_dirs.update(res_dirs)
+ res_dirs.add(
+ os.path.join(self.EntryOutputDir(root_entry), _RES_SUBDIR))
+ variables['res_dirs'] = self._Relativize(root_entry, res_dirs)
+ if self.split_projects:
+ deps = [_ProjectEntry.FromBuildConfigPath(p)
+ for p in root_entry.Gradle()['dependent_android_projects']]
+ variables['android_project_deps'] = [d.ProjectName() for d in deps]
+ deps = [_ProjectEntry.FromBuildConfigPath(p)
+ for p in root_entry.Gradle()['dependent_java_projects']]
+ variables['java_project_deps'] = [d.ProjectName() for d in deps]
+ return variables
+
+
+def _ComputeJavaSourceDirs(java_files):
+ """Returns a dictionary of source dirs with each given files in one."""
+ found_roots = {}
+ for path in java_files:
+ path_root = path
+ # Recognize these tokens as top-level.
+ while True:
+ path_root = os.path.dirname(path_root)
+ basename = os.path.basename(path_root)
+ assert basename, 'Failed to find source dir for ' + path
+ if basename in ('java', 'src'):
+ break
+ if basename in ('javax', 'org', 'com'):
+ path_root = os.path.dirname(path_root)
+ break
+ if path_root not in found_roots:
+ found_roots[path_root] = []
+ found_roots[path_root].append(path)
+ return found_roots
+
+
+def _ComputeExcludeFilters(wanted_files, unwanted_files, parent_dir):
+ """Returns exclude patters to exclude unwanted files but keep wanted files.
+
+ - Shortens exclude list by globbing if possible.
+ - Exclude patterns are relative paths from the parent directory.
+ """
+ excludes = []
+ files_to_include = set(wanted_files)
+ files_to_exclude = set(unwanted_files)
+ while files_to_exclude:
+ unwanted_file = files_to_exclude.pop()
+ target_exclude = os.path.join(
+ os.path.dirname(unwanted_file), '*.java')
+ found_files = set(glob.glob(target_exclude))
+ valid_files = found_files & files_to_include
+ if valid_files:
+ excludes.append(os.path.relpath(unwanted_file, parent_dir))
+ else:
+ excludes.append(os.path.relpath(target_exclude, parent_dir))
+ files_to_exclude -= found_files
+ return excludes
+
+
+def _ComputeJavaSourceDirsAndExcludes(output_dir, java_files):
+ """Computes the list of java source directories and exclude patterns.
+
+ 1. Computes the root java source directories from the list of files.
+ 2. Compute exclude patterns that exclude all extra files only.
+ 3. Returns the list of java source directories and exclude patterns.
+ """
+ java_dirs = []
+ excludes = []
+ if java_files:
+ java_files = _RebasePath(java_files)
+ computed_dirs = _ComputeJavaSourceDirs(java_files)
+ java_dirs = computed_dirs.keys()
+ all_found_java_files = set()
+
+ for directory, files in computed_dirs.iteritems():
+ found_java_files = build_utils.FindInDirectory(directory, '*.java')
+ all_found_java_files.update(found_java_files)
+ unwanted_java_files = set(found_java_files) - set(files)
+ if unwanted_java_files:
+ logging.debug('Directory requires excludes: %s', directory)
+ excludes.extend(
+ _ComputeExcludeFilters(files, unwanted_java_files, directory))
+
+ missing_java_files = set(java_files) - all_found_java_files
+ # Warn only about non-generated files that are missing.
+ missing_java_files = [p for p in missing_java_files
+ if not p.startswith(output_dir)]
+ if missing_java_files:
+ logging.warning(
+ 'Some java files were not found: %s', missing_java_files)
+
+ return java_dirs, excludes
+
+
+def _CreateRelativeSymlink(target_path, link_path):
+ link_dir = os.path.dirname(link_path)
+ relpath = os.path.relpath(target_path, link_dir)
+ logging.debug('Creating symlink %s -> %s', link_path, relpath)
+ os.symlink(relpath, link_path)
+
+
+def _CreateJniLibsDir(output_dir, entry_output_dir, so_files):
+ """Creates directory with symlinked .so files if necessary.
+
+ Returns list of JNI libs directories."""
+
+ if so_files:
+ symlink_dir = os.path.join(entry_output_dir, _JNI_LIBS_SUBDIR)
+ shutil.rmtree(symlink_dir, True)
+ abi_dir = os.path.join(symlink_dir, _ARMEABI_SUBDIR)
+ if not os.path.exists(abi_dir):
+ os.makedirs(abi_dir)
+ for so_file in so_files:
+ target_path = os.path.join(output_dir, so_file)
+ symlinked_path = os.path.join(abi_dir, so_file)
+ _CreateRelativeSymlink(target_path, symlinked_path)
+
+ return [symlink_dir]
+
+ return []
+
+
+def _GenerateLocalProperties(sdk_dir):
+ """Returns the data for project.properties as a string."""
+ return '\n'.join([
+ '# Generated by //build/android/gradle/generate_gradle.py',
+ 'sdk.dir=%s' % sdk_dir,
+ ''])
+
+
+def _GenerateBaseVars(generator, build_vars):
+ variables = {}
+ variables['compile_sdk_version'] = (
+ 'android-%s' % build_vars['compile_sdk_version'])
+ target_sdk_version = build_vars['android_sdk_version']
+ if target_sdk_version.isalpha():
+ target_sdk_version = '"{}"'.format(target_sdk_version)
+ variables['target_sdk_version'] = target_sdk_version
+ variables['use_gradle_process_resources'] = (
+ generator.use_gradle_process_resources)
+ variables['channel'] = generator.channel
+ return variables
+
+
+def _GenerateGradleFile(entry, generator, build_vars, jinja_processor):
+ """Returns the data for a project's build.gradle."""
+ deps_info = entry.DepsInfo()
+ variables = _GenerateBaseVars(generator, build_vars)
+ sourceSetName = 'main'
+
+ if deps_info['type'] == 'android_apk':
+ target_type = 'android_apk'
+ elif deps_info['type'] in ('java_library', 'java_annotation_processor'):
+ is_prebuilt = deps_info.get('is_prebuilt', False)
+ gradle_treat_as_prebuilt = deps_info.get('gradle_treat_as_prebuilt', False)
+ if is_prebuilt or gradle_treat_as_prebuilt:
+ return None
+ elif deps_info['requires_android']:
+ target_type = 'android_library'
+ else:
+ target_type = 'java_library'
+ elif deps_info['type'] == 'java_binary':
+ target_type = 'java_binary'
+ variables['main_class'] = deps_info.get('main_class')
+ elif deps_info['type'] == 'junit_binary':
+ target_type = 'android_junit'
+ sourceSetName = 'test'
+ else:
+ return None
+
+ variables['target_name'] = os.path.splitext(deps_info['name'])[0]
+ variables['template_type'] = target_type
+ variables['main'] = {}
+ variables[sourceSetName] = generator.Generate(entry)
+ variables['main']['android_manifest'] = generator.GenerateManifest(entry)
+
+ if entry.android_test_entries:
+ variables['android_test'] = []
+ for e in entry.android_test_entries:
+ test_entry = generator.Generate(e)
+ test_entry['android_manifest'] = generator.GenerateManifest(e)
+ variables['android_test'].append(test_entry)
+ for key, value in test_entry.iteritems():
+ if isinstance(value, list):
+ test_entry[key] = sorted(set(value) - set(variables['main'][key]))
+
+ return jinja_processor.Render(
+ _TemplatePath(target_type.split('_')[0]), variables)
+
+
+def _IsTestDir(path):
+ return ('javatests/' in path or
+ 'junit/' in path or
+ 'test/' in path or
+ 'testing/' in path)
+
+
+# Example: //chrome/android:monochrome
+def _GetNative(relative_func, target_names):
+ out_dir = constants.GetOutDirectory()
+ with open(os.path.join(out_dir, 'project.json'), 'r') as project_file:
+ projects = json.load(project_file)
+ project_targets = projects['targets']
+ root_dir = projects['build_settings']['root_path']
+ targets = {}
+ includes = set()
+ def process_paths(paths):
+ # Ignores leading //
+ return relative_func(
+ sorted(os.path.join(root_dir, path[2:]) for path in paths))
+ for target_name in target_names:
+ target = project_targets[target_name]
+ includes.update(target.get('include_dirs', []))
+ sources = [f for f in target.get('sources', []) if f.endswith('.cc')]
+ if sources:
+ # CMake does not like forward slashes or colons for the target name.
+ filtered_name = target_name.replace('/', '.').replace(':', '-')
+ targets[filtered_name] = {
+ 'sources': process_paths(sources),
+ }
+ return {
+ 'targets': targets,
+ 'includes': process_paths(includes),
+ }
+
+
+def _GenerateModuleAll(gradle_output_dir, generator, build_vars,
+ jinja_processor, native_targets):
+ """Returns the data for a pseudo build.gradle of all dirs.
+
+ See //docs/android_studio.md for more details."""
+ variables = _GenerateBaseVars(generator, build_vars)
+ target_type = 'android_apk'
+ variables['target_name'] = _MODULE_ALL
+ variables['template_type'] = target_type
+ java_dirs = sorted(generator.processed_java_dirs)
+ prebuilts = sorted(generator.processed_prebuilts)
+ res_dirs = sorted(generator.processed_res_dirs)
+ def Relativize(paths):
+ return _RebasePath(paths, os.path.join(gradle_output_dir, _MODULE_ALL))
+ main_java_dirs = [d for d in java_dirs if not _IsTestDir(d)]
+ test_java_dirs = [d for d in java_dirs if _IsTestDir(d)]
+ variables['main'] = {
+ 'android_manifest': Relativize(_DEFAULT_ANDROID_MANIFEST_PATH),
+ 'java_dirs': Relativize(main_java_dirs),
+ 'prebuilts': Relativize(prebuilts),
+ 'java_excludes': ['**/*.java'],
+ 'res_dirs': Relativize(res_dirs),
+ }
+ variables['android_test'] = [{
+ 'java_dirs': Relativize(test_java_dirs),
+ 'java_excludes': ['**/*.java'],
+ }]
+ if native_targets:
+ variables['native'] = _GetNative(
+ relative_func=Relativize, target_names=native_targets)
+ data = jinja_processor.Render(
+ _TemplatePath(target_type.split('_')[0]), variables)
+ _WriteFile(
+ os.path.join(gradle_output_dir, _MODULE_ALL, _GRADLE_BUILD_FILE), data)
+ if native_targets:
+ cmake_data = jinja_processor.Render(_TemplatePath('cmake'), variables)
+ _WriteFile(
+ os.path.join(gradle_output_dir, _MODULE_ALL, _CMAKE_FILE), cmake_data)
+
+
+def _GenerateRootGradle(jinja_processor, channel):
+ """Returns the data for the root project's build.gradle."""
+ return jinja_processor.Render(_TemplatePath('root'), {'channel': channel})
+
+
+def _GenerateSettingsGradle(project_entries):
+ """Returns the data for settings.gradle."""
+ project_name = os.path.basename(os.path.dirname(host_paths.DIR_SOURCE_ROOT))
+ lines = []
+ lines.append('// Generated by //build/android/gradle/generate_gradle.py')
+ lines.append('rootProject.name = "%s"' % project_name)
+ lines.append('rootProject.projectDir = settingsDir')
+ lines.append('')
+ for name, subdir in project_entries:
+ # Example target:
+ # android_webview:android_webview_java__build_config_crbug_908819
+ lines.append('include ":%s"' % name)
+ lines.append('project(":%s").projectDir = new File(settingsDir, "%s")' %
+ (name, subdir))
+ return '\n'.join(lines)
+
+
+def _ExtractFile(zip_path, extracted_path):
+ logging.info('Extracting %s to %s', zip_path, extracted_path)
+ with zipfile.ZipFile(zip_path) as z:
+ z.extractall(extracted_path)
+
+
+def _ExtractZips(entry_output_dir, zip_tuples):
+ """Extracts all srcjars to the directory given by the tuples."""
+ extracted_paths = set(s[1] for s in zip_tuples)
+ for extracted_path in extracted_paths:
+ assert _IsSubpathOf(extracted_path, entry_output_dir)
+ shutil.rmtree(extracted_path, True)
+
+ for zip_path, extracted_path in zip_tuples:
+ _ExtractFile(zip_path, extracted_path)
+
+
+def _FindAllProjectEntries(main_entries):
+ """Returns the list of all _ProjectEntry instances given the root project."""
+ found = set()
+ to_scan = list(main_entries)
+ while to_scan:
+ cur_entry = to_scan.pop()
+ if cur_entry in found:
+ continue
+ found.add(cur_entry)
+ sub_config_paths = cur_entry.DepsInfo()['deps_configs']
+ to_scan.extend(
+ _ProjectEntry.FromBuildConfigPath(p) for p in sub_config_paths)
+ return list(found)
+
+
+def _CombineTestEntries(entries):
+ """Combines test apks into the androidTest source set of their target.
+
+ - Speeds up android studio
+ - Adds proper dependency between test and apk_under_test
+ - Doesn't work for junit yet due to resulting circular dependencies
+ - e.g. base_junit_tests > base_junit_test_support > base_java
+ """
+ combined_entries = []
+ android_test_entries = collections.defaultdict(list)
+ for entry in entries:
+ target_name = entry.GnTarget()
+ if (target_name.endswith('_test_apk__apk') and
+ 'apk_under_test' in entry.Gradle()):
+ apk_name = entry.Gradle()['apk_under_test']
+ android_test_entries[apk_name].append(entry)
+ else:
+ combined_entries.append(entry)
+ for entry in combined_entries:
+ target_name = entry.DepsInfo()['name']
+ if target_name in android_test_entries:
+ entry.android_test_entries = android_test_entries[target_name]
+ del android_test_entries[target_name]
+ # Add unmatched test entries as individual targets.
+ combined_entries.extend(e for l in android_test_entries.values() for e in l)
+ return combined_entries
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--output-directory',
+ help='Path to the root build directory.')
+ parser.add_argument('-v',
+ '--verbose',
+ dest='verbose_count',
+ default=0,
+ action='count',
+ help='Verbose level')
+ parser.add_argument('--target',
+ dest='targets',
+ action='append',
+ help='GN target to generate project for. Replaces set of '
+ 'default targets. May be repeated.')
+ parser.add_argument('--extra-target',
+ dest='extra_targets',
+ action='append',
+ help='GN target to generate project for, in addition to '
+ 'the default ones. May be repeated.')
+ parser.add_argument('--project-dir',
+ help='Root of the output project.',
+ default=os.path.join('$CHROMIUM_OUTPUT_DIR', 'gradle'))
+ parser.add_argument('--all',
+ action='store_true',
+ help='Include all .java files reachable from any '
+ 'apk/test/binary target. On by default unless '
+ '--split-projects is used (--split-projects can '
+ 'slow down Studio given too many targets).')
+ parser.add_argument('--use-gradle-process-resources',
+ action='store_true',
+ help='Have gradle generate R.java rather than ninja')
+ parser.add_argument('--split-projects',
+ action='store_true',
+ help='Split projects by their gn deps rather than '
+ 'combining all the dependencies of each target')
+ parser.add_argument('--fast',
+ action='store_true',
+ help='Skip generating R.java and other generated files.')
+ parser.add_argument('-j',
+ default=1000 if os.path.exists(_SRC_INTERNAL) else 50,
+ help='Value for number of parallel jobs for ninja')
+ parser.add_argument('--native-target',
+ dest='native_targets',
+ action='append',
+ help='GN native targets to generate for. May be '
+ 'repeated.')
+ parser.add_argument('--compile-sdk-version',
+ type=int,
+ default=0,
+ help='Override compileSdkVersion for android sdk docs. '
+ 'Useful when sources for android_sdk_version is '
+ 'not available in Android Studio.')
+ parser.add_argument(
+ '--sdk-path',
+ default=os.path.expanduser('~/Android/Sdk'),
+ help='The path to use as the SDK root, overrides the '
+ 'default at ~/Android/Sdk.')
+ version_group = parser.add_mutually_exclusive_group()
+ version_group.add_argument('--beta',
+ action='store_true',
+ help='Generate a project that is compatible with '
+ 'Android Studio Beta.')
+ version_group.add_argument('--canary',
+ action='store_true',
+ help='Generate a project that is compatible with '
+ 'Android Studio Canary.')
+ args = parser.parse_args()
+ if args.output_directory:
+ constants.SetOutputDirectory(args.output_directory)
+ constants.CheckOutputDirectory()
+ output_dir = constants.GetOutDirectory()
+ devil_chromium.Initialize(output_directory=output_dir)
+ run_tests_helper.SetLogLevel(args.verbose_count)
+
+ if args.use_gradle_process_resources:
+ assert args.split_projects, (
+ 'Gradle resources does not work without --split-projects.')
+
+ _gradle_output_dir = os.path.abspath(
+ args.project_dir.replace('$CHROMIUM_OUTPUT_DIR', output_dir))
+ logging.warning('Creating project at: %s', _gradle_output_dir)
+
+ # Generate for "all targets" by default when not using --split-projects (too
+ # slow), and when no --target has been explicitly set. "all targets" means all
+ # java targets that are depended on by an apk or java_binary (leaf
+ # java_library targets will not be included).
+ args.all = args.all or (not args.split_projects and not args.targets)
+
+ targets_from_args = set(args.targets or _DEFAULT_TARGETS)
+ if args.extra_targets:
+ targets_from_args.update(args.extra_targets)
+
+ if args.all:
+ if args.native_targets:
+ _RunGnGen(output_dir, ['--ide=json'])
+ elif not os.path.exists(os.path.join(output_dir, 'build.ninja')):
+ _RunGnGen(output_dir)
+ else:
+ # Faster than running "gn gen" in the no-op case.
+ _RunNinja(output_dir, ['build.ninja'], args.j)
+ # Query ninja for all __build_config_crbug_908819 targets.
+ targets = _QueryForAllGnTargets(output_dir)
+ else:
+ assert not args.native_targets, 'Native editing requires --all.'
+ targets = [re.sub(r'_test_apk$', '_test_apk__apk', t)
+ for t in targets_from_args]
+ # Necessary after "gn clean"
+ if not os.path.exists(os.path.join(output_dir, 'build_vars.txt')):
+ _RunGnGen(output_dir)
+
+ build_vars = _ReadPropertiesFile(os.path.join(output_dir, 'build_vars.txt'))
+ jinja_processor = jinja_template.JinjaProcessor(_FILE_DIR)
+ if args.beta:
+ channel = 'beta'
+ elif args.canary:
+ channel = 'canary'
+ else:
+ channel = 'stable'
+ if args.compile_sdk_version:
+ build_vars['compile_sdk_version'] = args.compile_sdk_version
+ else:
+ build_vars['compile_sdk_version'] = build_vars['android_sdk_version']
+ generator = _ProjectContextGenerator(_gradle_output_dir, build_vars,
+ args.use_gradle_process_resources, jinja_processor, args.split_projects,
+ channel)
+
+ main_entries = [_ProjectEntry.FromGnTarget(t) for t in targets]
+
+ logging.warning('Building .build_config files...')
+ _RunNinja(
+ output_dir, [e.NinjaBuildConfigTarget() for e in main_entries], args.j)
+
+ if args.all:
+ # There are many unused libraries, so restrict to those that are actually
+ # used by apks/binaries/tests or that are explicitly mentioned in --targets.
+ main_entries = [e for e in main_entries if (
+ e.GetType() in ('android_apk', 'java_binary', 'junit_binary') or
+ e.GnTarget() in targets_from_args or
+ e.GnTarget().endswith('_test_apk__apk'))]
+
+ if args.split_projects:
+ main_entries = _FindAllProjectEntries(main_entries)
+
+ logging.info('Generating for %d targets.', len(main_entries))
+
+ entries = [e for e in _CombineTestEntries(main_entries) if e.IsValid()]
+ logging.info('Creating %d projects for targets.', len(entries))
+
+ logging.warning('Writing .gradle files...')
+ project_entries = []
+ # When only one entry will be generated we want it to have a valid
+ # build.gradle file with its own AndroidManifest.
+ for entry in entries:
+ data = _GenerateGradleFile(entry, generator, build_vars, jinja_processor)
+ if data and not args.all:
+ project_entries.append((entry.ProjectName(), entry.GradleSubdir()))
+ _WriteFile(
+ os.path.join(generator.EntryOutputDir(entry), _GRADLE_BUILD_FILE),
+ data)
+ if args.all:
+ project_entries.append((_MODULE_ALL, _MODULE_ALL))
+ _GenerateModuleAll(_gradle_output_dir, generator, build_vars,
+ jinja_processor, args.native_targets)
+
+ _WriteFile(os.path.join(generator.project_dir, _GRADLE_BUILD_FILE),
+ _GenerateRootGradle(jinja_processor, channel))
+
+ _WriteFile(os.path.join(generator.project_dir, 'settings.gradle'),
+ _GenerateSettingsGradle(project_entries))
+
+ # Ensure the Android Studio sdk is correctly initialized.
+ if not os.path.exists(args.sdk_path):
+ # Help first-time users avoid Android Studio forcibly changing back to
+ # the previous default due to not finding a valid sdk under this dir.
+ shutil.copytree(_RebasePath(build_vars['android_sdk_root']), args.sdk_path)
+ _WriteFile(
+ os.path.join(generator.project_dir, 'local.properties'),
+ _GenerateLocalProperties(args.sdk_path))
+
+ zip_tuples = []
+ generated_inputs = set()
+ for entry in entries:
+ entries_to_gen = [entry]
+ entries_to_gen.extend(entry.android_test_entries)
+ for entry_to_gen in entries_to_gen:
+ # Build all paths references by .gradle that exist within output_dir.
+ generated_inputs.update(
+ generator.GeneratedInputs(entry_to_gen, args.fast))
+ zip_tuples.extend(generator.GeneratedZips(entry_to_gen, args.fast))
+ if generated_inputs:
+ logging.warning('Building generated source files...')
+ targets = _RebasePath(generated_inputs, output_dir)
+ _RunNinja(output_dir, targets, args.j)
+ if zip_tuples:
+ _ExtractZips(generator.project_dir, zip_tuples)
+
+ logging.warning('Generated projects for Android Studio %s', channel)
+ logging.warning('For more tips: https://chromium.googlesource.com/chromium'
+ '/src.git/+/master/docs/android_studio.md')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gradle/gn_to_cmake.py b/deps/v8/build/android/gradle/gn_to_cmake.py
new file mode 100755
index 0000000000..dd6c1323c0
--- /dev/null
+++ b/deps/v8/build/android/gradle/gn_to_cmake.py
@@ -0,0 +1,687 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Usage: gn_to_cmake.py <json_file_name>
+
+gn gen out/config --ide=json --json-ide-script=../../gn/gn_to_cmake.py
+
+or
+
+gn gen out/config --ide=json
+python gn/gn_to_cmake.py out/config/project.json
+
+The first is recommended, as it will auto-update.
+"""
+
+import functools
+import json
+import posixpath
+import string
+import sys
+
+
+def CMakeStringEscape(a):
+ """Escapes the string 'a' for use inside a CMake string.
+
+ This means escaping
+ '\' otherwise it may be seen as modifying the next character
+ '"' otherwise it will end the string
+ ';' otherwise the string becomes a list
+
+ The following do not need to be escaped
+ '#' when the lexer is in string state, this does not start a comment
+ """
+ return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
+
+
+def CMakeTargetEscape(a):
+ """Escapes the string 'a' for use as a CMake target name.
+
+ CMP0037 in CMake 3.0 restricts target names to "^[A-Za-z0-9_.:+-]+$"
+ The ':' is only allowed for imported targets.
+ """
+ def Escape(c):
+ if c in string.ascii_letters or c in string.digits or c in '_.+-':
+ return c
+ else:
+ return '__'
+ return ''.join([Escape(c) for c in a])
+
+
+def SetVariable(out, variable_name, value):
+ """Sets a CMake variable."""
+ out.write('set("')
+ out.write(CMakeStringEscape(variable_name))
+ out.write('" "')
+ out.write(CMakeStringEscape(value))
+ out.write('")\n')
+
+
+def SetVariableList(out, variable_name, values):
+ """Sets a CMake variable to a list."""
+ if not values:
+ return SetVariable(out, variable_name, "")
+ if len(values) == 1:
+ return SetVariable(out, variable_name, values[0])
+ out.write('list(APPEND "')
+ out.write(CMakeStringEscape(variable_name))
+ out.write('"\n "')
+ out.write('"\n "'.join([CMakeStringEscape(value) for value in values]))
+ out.write('")\n')
+
+
+def SetFilesProperty(output, variable, property_name, values, sep):
+ """Given a set of source files, sets the given property on them."""
+ output.write('set_source_files_properties(')
+ WriteVariable(output, variable)
+ output.write(' PROPERTIES ')
+ output.write(property_name)
+ output.write(' "')
+ for value in values:
+ output.write(CMakeStringEscape(value))
+ output.write(sep)
+ output.write('")\n')
+
+
+def SetCurrentTargetProperty(out, property_name, values, sep=''):
+ """Given a target, sets the given property."""
+ out.write('set_target_properties("${target}" PROPERTIES ')
+ out.write(property_name)
+ out.write(' "')
+ for value in values:
+ out.write(CMakeStringEscape(value))
+ out.write(sep)
+ out.write('")\n')
+
+
+def WriteVariable(output, variable_name, prepend=None):
+ if prepend:
+ output.write(prepend)
+ output.write('${')
+ output.write(variable_name)
+ output.write('}')
+
+
+# See GetSourceFileType in gn
+source_file_types = {
+ '.cc': 'cxx',
+ '.cpp': 'cxx',
+ '.cxx': 'cxx',
+ '.c': 'c',
+ '.s': 'asm',
+ '.S': 'asm',
+ '.asm': 'asm',
+ '.o': 'obj',
+ '.obj': 'obj',
+}
+
+
+class CMakeTargetType(object):
+ def __init__(self, command, modifier, property_modifier, is_linkable):
+ self.command = command
+ self.modifier = modifier
+ self.property_modifier = property_modifier
+ self.is_linkable = is_linkable
+CMakeTargetType.custom = CMakeTargetType('add_custom_target', 'SOURCES',
+ None, False)
+
+# See GetStringForOutputType in gn
+cmake_target_types = {
+ 'unknown': CMakeTargetType.custom,
+ 'group': CMakeTargetType.custom,
+ 'executable': CMakeTargetType('add_executable', None, 'RUNTIME', True),
+ 'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY', True),
+ 'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY', True),
+ 'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE', False),
+ 'source_set': CMakeTargetType('add_library', 'OBJECT', None, False),
+ 'copy': CMakeTargetType.custom,
+ 'action': CMakeTargetType.custom,
+ 'action_foreach': CMakeTargetType.custom,
+ 'bundle_data': CMakeTargetType.custom,
+ 'create_bundle': CMakeTargetType.custom,
+}
+
+
+def FindFirstOf(s, a):
+ return min(s.find(i) for i in a if i in s)
+
+
+def GetCMakeTargetName(gn_target_name):
+ # See <chromium>/src/tools/gn/label.cc#Resolve
+ # //base/test:test_support(//build/toolchain/win:msvc)
+ path_separator = FindFirstOf(gn_target_name, (':', '('))
+ location = None
+ name = None
+ toolchain = None
+ if not path_separator:
+ location = gn_target_name[2:]
+ else:
+ location = gn_target_name[2:path_separator]
+ toolchain_separator = gn_target_name.find('(', path_separator)
+ if toolchain_separator == -1:
+ name = gn_target_name[path_separator + 1:]
+ else:
+ if toolchain_separator > path_separator:
+ name = gn_target_name[path_separator + 1:toolchain_separator]
+ assert gn_target_name.endswith(')')
+ toolchain = gn_target_name[toolchain_separator + 1:-1]
+ assert location or name
+
+ cmake_target_name = None
+ if location.endswith('/' + name):
+ cmake_target_name = location
+ elif location:
+ cmake_target_name = location + '_' + name
+ else:
+ cmake_target_name = name
+ if toolchain:
+ cmake_target_name += '--' + toolchain
+ return CMakeTargetEscape(cmake_target_name)
+
+
+class Project(object):
+ def __init__(self, project_json):
+ self.targets = project_json['targets']
+ build_settings = project_json['build_settings']
+ self.root_path = build_settings['root_path']
+ self.build_path = posixpath.join(self.root_path,
+ build_settings['build_dir'][2:])
+ self.object_source_deps = {}
+
+ def GetAbsolutePath(self, path):
+ if path.startswith("//"):
+ return self.root_path + "/" + path[2:]
+ else:
+ return path
+
+ def GetObjectSourceDependencies(self, gn_target_name, object_dependencies):
+ """All OBJECT libraries whose sources have not been absorbed."""
+ if gn_target_name in self.object_source_deps:
+ object_dependencies.update(self.object_source_deps[gn_target_name])
+ return
+ target_deps = set()
+ dependencies = self.targets[gn_target_name].get('deps', [])
+ for dependency in dependencies:
+ dependency_type = self.targets[dependency].get('type', None)
+ if dependency_type == 'source_set':
+ target_deps.add(dependency)
+ if dependency_type not in gn_target_types_that_absorb_objects:
+ self.GetObjectSourceDependencies(dependency, target_deps)
+ self.object_source_deps[gn_target_name] = target_deps
+ object_dependencies.update(target_deps)
+
+ def GetObjectLibraryDependencies(self, gn_target_name, object_dependencies):
+ """All OBJECT libraries whose libraries have not been absorbed."""
+ dependencies = self.targets[gn_target_name].get('deps', [])
+ for dependency in dependencies:
+ dependency_type = self.targets[dependency].get('type', None)
+ if dependency_type == 'source_set':
+ object_dependencies.add(dependency)
+ self.GetObjectLibraryDependencies(dependency, object_dependencies)
+
+
+class Target(object):
+ def __init__(self, gn_target_name, project):
+ self.gn_name = gn_target_name
+ self.properties = project.targets[self.gn_name]
+ self.cmake_name = GetCMakeTargetName(self.gn_name)
+ self.gn_type = self.properties.get('type', None)
+ self.cmake_type = cmake_target_types.get(self.gn_type, None)
+
+
+def WriteAction(out, target, project, sources, synthetic_dependencies):
+ outputs = []
+ output_directories = set()
+ for output in target.properties.get('outputs', []):
+ output_abs_path = project.GetAbsolutePath(output)
+ outputs.append(output_abs_path)
+ output_directory = posixpath.dirname(output_abs_path)
+ if output_directory:
+ output_directories.add(output_directory)
+ outputs_name = '${target}__output'
+ SetVariableList(out, outputs_name, outputs)
+
+ out.write('add_custom_command(OUTPUT ')
+ WriteVariable(out, outputs_name)
+ out.write('\n')
+
+ if output_directories:
+ out.write(' COMMAND ${CMAKE_COMMAND} -E make_directory "')
+ out.write('" "'.join([CMakeStringEscape(d) for d in output_directories]))
+ out.write('"\n')
+
+ script = target.properties['script']
+ arguments = target.properties['args']
+ out.write(' COMMAND python "')
+ out.write(CMakeStringEscape(project.GetAbsolutePath(script)))
+ out.write('"')
+ if arguments:
+ out.write('\n "')
+ out.write('"\n "'.join([CMakeStringEscape(a) for a in arguments]))
+ out.write('"')
+ out.write('\n')
+
+ out.write(' DEPENDS ')
+ for sources_type_name in sources.values():
+ WriteVariable(out, sources_type_name, ' ')
+ out.write('\n')
+
+ #TODO: CMake 3.7 is introducing DEPFILE
+
+ out.write(' WORKING_DIRECTORY "')
+ out.write(CMakeStringEscape(project.build_path))
+ out.write('"\n')
+
+ out.write(' COMMENT "Action: ${target}"\n')
+
+ out.write(' VERBATIM)\n')
+
+ synthetic_dependencies.add(outputs_name)
+
+
+def ExpandPlaceholders(source, a):
+ source_dir, source_file_part = posixpath.split(source)
+ source_name_part, _ = posixpath.splitext(source_file_part)
+ #TODO: {{source_gen_dir}}, {{source_out_dir}}, {{response_file_name}}
+ return a.replace('{{source}}', source) \
+ .replace('{{source_file_part}}', source_file_part) \
+ .replace('{{source_name_part}}', source_name_part) \
+ .replace('{{source_dir}}', source_dir) \
+ .replace('{{source_root_relative_dir}}', source_dir)
+
+
+def WriteActionForEach(out, target, project, sources, synthetic_dependencies):
+ all_outputs = target.properties.get('outputs', [])
+ inputs = target.properties.get('sources', [])
+ # TODO: consider expanding 'output_patterns' instead.
+ outputs_per_input = len(all_outputs) / len(inputs)
+ for count, source in enumerate(inputs):
+ source_abs_path = project.GetAbsolutePath(source)
+
+ outputs = []
+ output_directories = set()
+ for output in all_outputs[outputs_per_input * count:
+ outputs_per_input * (count+1)]:
+ output_abs_path = project.GetAbsolutePath(output)
+ outputs.append(output_abs_path)
+ output_directory = posixpath.dirname(output_abs_path)
+ if output_directory:
+ output_directories.add(output_directory)
+ outputs_name = '${target}__output_' + str(count)
+ SetVariableList(out, outputs_name, outputs)
+
+ out.write('add_custom_command(OUTPUT ')
+ WriteVariable(out, outputs_name)
+ out.write('\n')
+
+ if output_directories:
+ out.write(' COMMAND ${CMAKE_COMMAND} -E make_directory "')
+ out.write('" "'.join([CMakeStringEscape(d) for d in output_directories]))
+ out.write('"\n')
+
+ script = target.properties['script']
+ # TODO: need to expand {{xxx}} in arguments
+ arguments = target.properties['args']
+ out.write(' COMMAND python "')
+ out.write(CMakeStringEscape(project.GetAbsolutePath(script)))
+ out.write('"')
+ if arguments:
+ out.write('\n "')
+ expand = functools.partial(ExpandPlaceholders, source_abs_path)
+ out.write('"\n "'.join(
+ [CMakeStringEscape(expand(a)) for a in arguments]))
+ out.write('"')
+ out.write('\n')
+
+ out.write(' DEPENDS')
+ if 'input' in sources:
+ WriteVariable(out, sources['input'], ' ')
+ out.write(' "')
+ out.write(CMakeStringEscape(source_abs_path))
+ out.write('"\n')
+
+ #TODO: CMake 3.7 is introducing DEPFILE
+
+ out.write(' WORKING_DIRECTORY "')
+ out.write(CMakeStringEscape(project.build_path))
+ out.write('"\n')
+
+ out.write(' COMMENT "Action ${target} on ')
+ out.write(CMakeStringEscape(source_abs_path))
+ out.write('"\n')
+
+ out.write(' VERBATIM)\n')
+
+ synthetic_dependencies.add(outputs_name)
+
+
+def WriteCopy(out, target, project, sources, synthetic_dependencies):
+ inputs = target.properties.get('sources', [])
+ raw_outputs = target.properties.get('outputs', [])
+
+ # TODO: consider expanding 'output_patterns' instead.
+ outputs = []
+ for output in raw_outputs:
+ output_abs_path = project.GetAbsolutePath(output)
+ outputs.append(output_abs_path)
+ outputs_name = '${target}__output'
+ SetVariableList(out, outputs_name, outputs)
+
+ out.write('add_custom_command(OUTPUT ')
+ WriteVariable(out, outputs_name)
+ out.write('\n')
+
+ for src, dst in zip(inputs, outputs):
+ out.write(' COMMAND ${CMAKE_COMMAND} -E copy "')
+ out.write(CMakeStringEscape(project.GetAbsolutePath(src)))
+ out.write('" "')
+ out.write(CMakeStringEscape(dst))
+ out.write('"\n')
+
+ out.write(' DEPENDS ')
+ for sources_type_name in sources.values():
+ WriteVariable(out, sources_type_name, ' ')
+ out.write('\n')
+
+ out.write(' WORKING_DIRECTORY "')
+ out.write(CMakeStringEscape(project.build_path))
+ out.write('"\n')
+
+ out.write(' COMMENT "Copy ${target}"\n')
+
+ out.write(' VERBATIM)\n')
+
+ synthetic_dependencies.add(outputs_name)
+
+
+def WriteCompilerFlags(out, target, project, sources):
+ # Hack, set linker language to c if no c or cxx files present.
+ if not 'c' in sources and not 'cxx' in sources:
+ SetCurrentTargetProperty(out, 'LINKER_LANGUAGE', ['C'])
+
+ # Mark uncompiled sources as uncompiled.
+ if 'input' in sources:
+ SetFilesProperty(out, sources['input'], 'HEADER_FILE_ONLY', ('True',), '')
+ if 'other' in sources:
+ SetFilesProperty(out, sources['other'], 'HEADER_FILE_ONLY', ('True',), '')
+
+ # Mark object sources as linkable.
+ if 'obj' in sources:
+ SetFilesProperty(out, sources['obj'], 'EXTERNAL_OBJECT', ('True',), '')
+
+ # TODO: 'output_name', 'output_dir', 'output_extension'
+ # This includes using 'source_outputs' to direct compiler output.
+
+ # Includes
+ includes = target.properties.get('include_dirs', [])
+ if includes:
+ out.write('set_property(TARGET "${target}" ')
+ out.write('APPEND PROPERTY INCLUDE_DIRECTORIES')
+ for include_dir in includes:
+ out.write('\n "')
+ out.write(project.GetAbsolutePath(include_dir))
+ out.write('"')
+ out.write(')\n')
+
+ # Defines
+ defines = target.properties.get('defines', [])
+ if defines:
+ SetCurrentTargetProperty(out, 'COMPILE_DEFINITIONS', defines, ';')
+
+ # Compile flags
+ # "arflags", "asmflags", "cflags",
+ # "cflags_c", "clfags_cc", "cflags_objc", "clfags_objcc"
+ # CMake does not have per target lang compile flags.
+ # TODO: $<$<COMPILE_LANGUAGE:CXX>:cflags_cc style generator expression.
+ # http://public.kitware.com/Bug/view.php?id=14857
+ flags = []
+ flags.extend(target.properties.get('cflags', []))
+ cflags_asm = target.properties.get('asmflags', [])
+ cflags_c = target.properties.get('cflags_c', [])
+ cflags_cxx = target.properties.get('cflags_cc', [])
+ if 'c' in sources and not any(k in sources for k in ('asm', 'cxx')):
+ flags.extend(cflags_c)
+ elif 'cxx' in sources and not any(k in sources for k in ('asm', 'c')):
+ flags.extend(cflags_cxx)
+ else:
+ # TODO: This is broken, one cannot generally set properties on files,
+ # as other targets may require different properties on the same files.
+ if 'asm' in sources and cflags_asm:
+ SetFilesProperty(out, sources['asm'], 'COMPILE_FLAGS', cflags_asm, ' ')
+ if 'c' in sources and cflags_c:
+ SetFilesProperty(out, sources['c'], 'COMPILE_FLAGS', cflags_c, ' ')
+ if 'cxx' in sources and cflags_cxx:
+ SetFilesProperty(out, sources['cxx'], 'COMPILE_FLAGS', cflags_cxx, ' ')
+ if flags:
+ SetCurrentTargetProperty(out, 'COMPILE_FLAGS', flags, ' ')
+
+ # Linker flags
+ ldflags = target.properties.get('ldflags', [])
+ if ldflags:
+ SetCurrentTargetProperty(out, 'LINK_FLAGS', ldflags, ' ')
+
+
+gn_target_types_that_absorb_objects = (
+ 'executable',
+ 'loadable_module',
+ 'shared_library',
+ 'static_library'
+)
+
+
+def WriteSourceVariables(out, target, project):
+ # gn separates the sheep from the goats based on file extensions.
+ # A full separation is done here because of flag handing (see Compile flags).
+ source_types = {'cxx':[], 'c':[], 'asm':[],
+ 'obj':[], 'obj_target':[], 'input':[], 'other':[]}
+
+ # TODO .def files on Windows
+ for source in target.properties.get('sources', []):
+ _, ext = posixpath.splitext(source)
+ source_abs_path = project.GetAbsolutePath(source)
+ source_types[source_file_types.get(ext, 'other')].append(source_abs_path)
+
+ for input_path in target.properties.get('inputs', []):
+ input_abs_path = project.GetAbsolutePath(input_path)
+ source_types['input'].append(input_abs_path)
+
+ # OBJECT library dependencies need to be listed as sources.
+ # Only executables and non-OBJECT libraries may reference an OBJECT library.
+ # https://gitlab.kitware.com/cmake/cmake/issues/14778
+ if target.gn_type in gn_target_types_that_absorb_objects:
+ object_dependencies = set()
+ project.GetObjectSourceDependencies(target.gn_name, object_dependencies)
+ for dependency in object_dependencies:
+ cmake_dependency_name = GetCMakeTargetName(dependency)
+ obj_target_sources = '$<TARGET_OBJECTS:' + cmake_dependency_name + '>'
+ source_types['obj_target'].append(obj_target_sources)
+
+ sources = {}
+ for source_type, sources_of_type in source_types.items():
+ if sources_of_type:
+ sources[source_type] = '${target}__' + source_type + '_srcs'
+ SetVariableList(out, sources[source_type], sources_of_type)
+ return sources
+
+
+def WriteTarget(out, target, project):
+ out.write('\n#')
+ out.write(target.gn_name)
+ out.write('\n')
+
+ if target.cmake_type is None:
+ print 'Target {} has unknown target type {}, skipping.'.format(
+ target.gn_name, target.gn_type)
+ return
+
+ SetVariable(out, 'target', target.cmake_name)
+
+ sources = WriteSourceVariables(out, target, project)
+
+ synthetic_dependencies = set()
+ if target.gn_type == 'action':
+ WriteAction(out, target, project, sources, synthetic_dependencies)
+ if target.gn_type == 'action_foreach':
+ WriteActionForEach(out, target, project, sources, synthetic_dependencies)
+ if target.gn_type == 'copy':
+ WriteCopy(out, target, project, sources, synthetic_dependencies)
+
+ out.write(target.cmake_type.command)
+ out.write('("${target}"')
+ if target.cmake_type.modifier is not None:
+ out.write(' ')
+ out.write(target.cmake_type.modifier)
+ for sources_type_name in sources.values():
+ WriteVariable(out, sources_type_name, ' ')
+ if synthetic_dependencies:
+ out.write(' DEPENDS')
+ for synthetic_dependencie in synthetic_dependencies:
+ WriteVariable(out, synthetic_dependencie, ' ')
+ out.write(')\n')
+
+ if target.cmake_type.command != 'add_custom_target':
+ WriteCompilerFlags(out, target, project, sources)
+
+ libraries = set()
+ nonlibraries = set()
+
+ dependencies = set(target.properties.get('deps', []))
+ # Transitive OBJECT libraries are in sources.
+ # Those sources are dependent on the OBJECT library dependencies.
+ # Those sources cannot bring in library dependencies.
+ object_dependencies = set()
+ if target.gn_type != 'source_set':
+ project.GetObjectLibraryDependencies(target.gn_name, object_dependencies)
+ for object_dependency in object_dependencies:
+ dependencies.update(project.targets.get(object_dependency).get('deps', []))
+
+ for dependency in dependencies:
+ gn_dependency_type = project.targets.get(dependency, {}).get('type', None)
+ cmake_dependency_type = cmake_target_types.get(gn_dependency_type, None)
+ cmake_dependency_name = GetCMakeTargetName(dependency)
+ if cmake_dependency_type.command != 'add_library':
+ nonlibraries.add(cmake_dependency_name)
+ elif cmake_dependency_type.modifier != 'OBJECT':
+ if target.cmake_type.is_linkable:
+ libraries.add(cmake_dependency_name)
+ else:
+ nonlibraries.add(cmake_dependency_name)
+
+ # Non-library dependencies.
+ if nonlibraries:
+ out.write('add_dependencies("${target}"')
+ for nonlibrary in nonlibraries:
+ out.write('\n "')
+ out.write(nonlibrary)
+ out.write('"')
+ out.write(')\n')
+
+ # Non-OBJECT library dependencies.
+ external_libraries = target.properties.get('libs', [])
+ if target.cmake_type.is_linkable and (external_libraries or libraries):
+ library_dirs = target.properties.get('lib_dirs', [])
+ if library_dirs:
+ SetVariableList(out, '${target}__library_directories', library_dirs)
+
+ system_libraries = []
+ for external_library in external_libraries:
+ if '/' in external_library:
+ libraries.add(project.GetAbsolutePath(external_library))
+ else:
+ if external_library.endswith('.framework'):
+ external_library = external_library[:-len('.framework')]
+ system_library = 'library__' + external_library
+ if library_dirs:
+ system_library = system_library + '__for_${target}'
+ out.write('find_library("')
+ out.write(CMakeStringEscape(system_library))
+ out.write('" "')
+ out.write(CMakeStringEscape(external_library))
+ out.write('"')
+ if library_dirs:
+ out.write(' PATHS "')
+ WriteVariable(out, '${target}__library_directories')
+ out.write('"')
+ out.write(')\n')
+ system_libraries.append(system_library)
+ out.write('target_link_libraries("${target}"')
+ for library in libraries:
+ out.write('\n "')
+ out.write(CMakeStringEscape(library))
+ out.write('"')
+ for system_library in system_libraries:
+ WriteVariable(out, system_library, '\n "')
+ out.write('"')
+ out.write(')\n')
+
+
+def WriteProject(project):
+ out = open(posixpath.join(project.build_path, 'CMakeLists.txt'), 'w+')
+ out.write('# Generated by gn_to_cmake.py.\n')
+ out.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
+ out.write('cmake_policy(VERSION 2.8.8)\n\n')
+
+ # Update the gn generated ninja build.
+ # If a build file has changed, this will update CMakeLists.ext if
+ # gn gen out/config --ide=json --json-ide-script=../../gn/gn_to_cmake.py
+ # style was used to create this config.
+ out.write('execute_process(COMMAND ninja -C "')
+ out.write(CMakeStringEscape(project.build_path))
+ out.write('" build.ninja)\n')
+
+ out.write('include(CMakeLists.ext)\n')
+ out.close()
+
+ out = open(posixpath.join(project.build_path, 'CMakeLists.ext'), 'w+')
+ out.write('# Generated by gn_to_cmake.py.\n')
+ out.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
+ out.write('cmake_policy(VERSION 2.8.8)\n')
+
+ # The following appears to be as-yet undocumented.
+ # http://public.kitware.com/Bug/view.php?id=8392
+ out.write('enable_language(ASM)\n\n')
+ # ASM-ATT does not support .S files.
+ # output.write('enable_language(ASM-ATT)\n')
+
+ # Current issues with automatic re-generation:
+ # The gn generated build.ninja target uses build.ninja.d
+ # but build.ninja.d does not contain the ide or gn.
+ # Currently the ide is not run if the project.json file is not changed
+ # but the ide needs to be run anyway if it has itself changed.
+ # This can be worked around by deleting the project.json file.
+ out.write('file(READ "')
+ gn_deps_file = posixpath.join(project.build_path, 'build.ninja.d')
+ out.write(CMakeStringEscape(gn_deps_file))
+ out.write('" "gn_deps_string" OFFSET ')
+ out.write(str(len('build.ninja: ')))
+ out.write(')\n')
+ # One would think this would need to worry about escaped spaces
+ # but gn doesn't escape spaces here (it generates invalid .d files).
+ out.write('string(REPLACE " " ";" "gn_deps" ${gn_deps_string})\n')
+ out.write('foreach("gn_dep" ${gn_deps})\n')
+ out.write(' configure_file(${gn_dep} "CMakeLists.devnull" COPYONLY)\n')
+ out.write('endforeach("gn_dep")\n')
+
+ for target_name in project.targets.keys():
+ out.write('\n')
+ WriteTarget(out, Target(target_name, project), project)
+
+
+def main():
+ if len(sys.argv) != 2:
+ print 'Usage: ' + sys.argv[0] + ' <json_file_name>'
+ exit(1)
+
+ json_path = sys.argv[1]
+ project = None
+ with open(json_path, 'r') as json_file:
+ project = json.loads(json_file.read())
+
+ WriteProject(Project(project))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/deps/v8/build/android/gradle/java.jinja b/deps/v8/build/android/gradle/java.jinja
new file mode 100644
index 0000000000..92fe575af8
--- /dev/null
+++ b/deps/v8/build/android/gradle/java.jinja
@@ -0,0 +1,41 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+// Generated by //build/android/generate_gradle.py
+
+apply plugin: "java"
+{% if template_type == 'java_binary' %}
+apply plugin: "application"
+{% endif %}
+
+sourceSets {
+ main {
+ java.srcDirs = [
+{% for path in main.java_dirs %}
+ "{{ path }}",
+{% endfor %}
+ ]
+{% if main.java_excludes is defined %}
+ java.filter.exclude(
+{% for path in main.java_excludes %}
+ "{{ path }}",
+{% endfor %}
+ )
+{% endif %}
+ }
+}
+
+sourceCompatibility = JavaVersion.VERSION_1_8
+targetCompatibility = JavaVersion.VERSION_1_8
+
+{% if template_type == 'java_binary' %}
+applicationName = "{{ target_name }}"
+{% if main_class %}
+mainClassName = "{{ main_class }}"
+{% endif %}
+{% endif %}
+{% if template_type in ('java_binary', 'java_library') %}
+archivesBaseName = "{{ target_name }}"
+{% endif %}
+
+{% include 'dependencies.jinja' %}
diff --git a/deps/v8/build/android/gradle/manifest.jinja b/deps/v8/build/android/gradle/manifest.jinja
new file mode 100644
index 0000000000..dea7071eb6
--- /dev/null
+++ b/deps/v8/build/android/gradle/manifest.jinja
@@ -0,0 +1,7 @@
+{# Copyright 2017 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="{{ package }}">
+</manifest>
diff --git a/deps/v8/build/android/gradle/root.jinja b/deps/v8/build/android/gradle/root.jinja
new file mode 100644
index 0000000000..a53591e965
--- /dev/null
+++ b/deps/v8/build/android/gradle/root.jinja
@@ -0,0 +1,20 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+// Generated by //build/android/generate_gradle.py
+
+buildscript {
+ repositories {
+ google()
+ jcenter()
+ }
+ dependencies {
+{% if channel == 'canary' %}
+ classpath "com.android.tools.build:gradle:3.5.0-alpha07"
+{% elif channel == 'beta' %}
+ classpath "com.android.tools.build:gradle:3.1.0-beta4"
+{% else %}
+ classpath "com.android.tools.build:gradle:3.0.1"
+{% endif %}
+ }
+}
diff --git a/deps/v8/build/android/gyp/OWNERS b/deps/v8/build/android/gyp/OWNERS
new file mode 100644
index 0000000000..74dca6f718
--- /dev/null
+++ b/deps/v8/build/android/gyp/OWNERS
@@ -0,0 +1,6 @@
+agrieve@chromium.org
+estevenson@chromium.org
+digit@chromium.org
+wnwen@chromium.org
+
+# COMPONENT: Build
diff --git a/deps/v8/build/android/gyp/aar.py b/deps/v8/build/android/gyp/aar.py
new file mode 100755
index 0000000000..d0f357db33
--- /dev/null
+++ b/deps/v8/build/android/gyp/aar.py
@@ -0,0 +1,166 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Processes an Android AAR file."""
+
+import argparse
+import os
+import posixpath
+import re
+import shutil
+import sys
+from xml.etree import ElementTree
+import zipfile
+
+from util import build_utils
+from util import md5_check
+
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir)))
+import gn_helpers
+
+
+def _IsManifestEmpty(manifest_str):
+ """Returns whether the given manifest has merge-worthy elements.
+
+ E.g.: <activity>, <service>, etc.
+ """
+ doc = ElementTree.fromstring(manifest_str)
+ for node in doc:
+ if node.tag == 'application':
+ if len(node):
+ return False
+ elif node.tag != 'uses-sdk':
+ return False
+
+ return True
+
+
+def _CreateInfo(aar_file):
+ data = {}
+ data['aidl'] = []
+ data['assets'] = []
+ data['resources'] = []
+ data['subjars'] = []
+ data['subjar_tuples'] = []
+ data['has_classes_jar'] = False
+ data['has_proguard_flags'] = False
+ data['has_native_libraries'] = False
+ data['has_r_text_file'] = False
+ with zipfile.ZipFile(aar_file) as z:
+ data['is_manifest_empty'] = (
+ _IsManifestEmpty(z.read('AndroidManifest.xml')))
+
+ for name in z.namelist():
+ if name.endswith('/'):
+ continue
+ if name.startswith('aidl/'):
+ data['aidl'].append(name)
+ elif name.startswith('res/'):
+ data['resources'].append(name)
+ elif name.startswith('libs/') and name.endswith('.jar'):
+ label = posixpath.basename(name)[:-4]
+ label = re.sub(r'[^a-zA-Z0-9._]', '_', label)
+ data['subjars'].append(name)
+ data['subjar_tuples'].append([label, name])
+ elif name.startswith('assets/'):
+ data['assets'].append(name)
+ elif name.startswith('jni/'):
+ data['has_native_libraries'] = True
+ if 'native_libraries' in data:
+ data['native_libraries'].append(name)
+ else:
+ data['native_libraries'] = [name]
+ elif name == 'classes.jar':
+ data['has_classes_jar'] = True
+ elif name == 'proguard.txt':
+ data['has_proguard_flags'] = True
+ elif name == 'R.txt':
+ # Some AARs, e.g. gvr_controller_java, have empty R.txt. Such AARs
+ # have no resources as well. We treat empty R.txt as having no R.txt.
+ data['has_r_text_file'] = (z.read('R.txt').strip() != '')
+
+ return """\
+# Generated by //build/android/gyp/aar.py
+# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
+
+""" + gn_helpers.ToGNString(data)
+
+
+def _AddCommonArgs(parser):
+ parser.add_argument('aar_file',
+ help='Path to the AAR file.',
+ type=os.path.normpath)
+
+
+def main():
+ parser = argparse.ArgumentParser(description=__doc__)
+ command_parsers = parser.add_subparsers(dest='command')
+ subp = command_parsers.add_parser(
+ 'list', help='Output a GN scope describing the contents of the .aar.')
+ _AddCommonArgs(subp)
+ subp.add_argument('--output',
+ help='Output file.',
+ default='-')
+
+ subp = command_parsers.add_parser('extract', help='Extracts the .aar')
+ _AddCommonArgs(subp)
+ subp.add_argument('--output-dir',
+ help='Output directory for the extracted files.',
+ required=True,
+ type=os.path.normpath)
+ subp.add_argument('--assert-info-file',
+ help='Path to .info file. Asserts that it matches what '
+ '"list" would output.',
+ type=argparse.FileType('r'))
+
+ args = parser.parse_args()
+
+ if args.command == 'extract':
+ if args.assert_info_file:
+ expected = _CreateInfo(args.aar_file)
+ actual = args.assert_info_file.read()
+ if actual != expected:
+ raise Exception('android_aar_prebuilt() cached .info file is '
+ 'out-of-date. Run gn gen with '
+ 'update_android_aar_prebuilts=true to update it.')
+
+ def clobber():
+ # Clear previously extracted versions of the AAR if it is obsolete.
+ shutil.rmtree(args.output_dir, ignore_errors=True)
+ build_utils.ExtractAll(args.aar_file, path=args.output_dir)
+
+ with zipfile.ZipFile(args.aar_file) as zf:
+ md5_check.CallAndRecordIfStale(
+ clobber, input_paths=[args.aar_file],
+ output_paths=[
+ os.path.join(args.output_dir, n) for n in zf.namelist()])
+
+ elif args.command == 'list':
+ aar_info = _CreateInfo(args.aar_file)
+ aar_output_present = args.output != '-' and os.path.isfile(args.output)
+ if aar_output_present:
+ # Some .info files are read-only, for examples the cipd-controlled ones
+ # under third_party/android_deps/repositoty. To deal with these, first
+ # that its content is correct, and if it is, exit without touching
+ # the file system.
+ file_info = open(args.output, 'r').read()
+ if file_info == aar_info:
+ return
+
+ # Try to write the file. This may fail for read-only ones that were
+ # not updated.
+ try:
+ with open(args.output, 'w') as f:
+ f.write(aar_info)
+ except IOError as e:
+ if not aar_output_present:
+ raise e
+ raise Exception('Could not update output file: %s\n%s\n' %
+ (args.output, e))
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/aar.pydeps b/deps/v8/build/android/gyp/aar.pydeps
new file mode 100644
index 0000000000..e08c5475e3
--- /dev/null
+++ b/deps/v8/build/android/gyp/aar.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aar.pydeps build/android/gyp/aar.py
+../../gn_helpers.py
+aar.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/aidl.py b/deps/v8/build/android/gyp/aidl.py
new file mode 100755
index 0000000000..64ad29041a
--- /dev/null
+++ b/deps/v8/build/android/gyp/aidl.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Invokes Android's aidl
+"""
+
+import optparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def main(argv):
+ option_parser = optparse.OptionParser()
+ option_parser.add_option('--aidl-path', help='Path to the aidl binary.')
+ option_parser.add_option('--imports', help='Files to import.')
+ option_parser.add_option('--includes',
+ help='Directories to add as import search paths.')
+ option_parser.add_option('--srcjar', help='Path for srcjar output.')
+ options, args = option_parser.parse_args(argv[1:])
+
+ with build_utils.TempDir() as temp_dir:
+ for f in args:
+ classname = os.path.splitext(os.path.basename(f))[0]
+ output = os.path.join(temp_dir, classname + '.java')
+ aidl_cmd = [options.aidl_path]
+ aidl_cmd += [
+ '-p' + s for s in build_utils.ParseGnList(options.imports)
+ ]
+ if options.includes is not None:
+ aidl_cmd += [
+ '-I' + s for s in build_utils.ParseGnList(options.includes)
+ ]
+ aidl_cmd += [
+ f,
+ output
+ ]
+ build_utils.CheckOutput(aidl_cmd)
+
+ with build_utils.AtomicOutput(options.srcjar) as f:
+ with zipfile.ZipFile(f, 'w') as srcjar:
+ for path in build_utils.FindInDirectory(temp_dir, '*.java'):
+ with open(path) as fileobj:
+ data = fileobj.read()
+ pkg_name = re.search(r'^\s*package\s+(.*?)\s*;', data, re.M).group(1)
+ arcname = '%s/%s' % (
+ pkg_name.replace('.', '/'), os.path.basename(path))
+ build_utils.AddToZipHermetic(srcjar, arcname, data=data)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/deps/v8/build/android/gyp/aidl.pydeps b/deps/v8/build/android/gyp/aidl.pydeps
new file mode 100644
index 0000000000..2dbce376f1
--- /dev/null
+++ b/deps/v8/build/android/gyp/aidl.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aidl.pydeps build/android/gyp/aidl.py
+../../gn_helpers.py
+aidl.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/apkbuilder.py b/deps/v8/build/android/gyp/apkbuilder.py
new file mode 100755
index 0000000000..310a192828
--- /dev/null
+++ b/deps/v8/build/android/gyp/apkbuilder.py
@@ -0,0 +1,377 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Adds the code parts to a resource APK."""
+
+import argparse
+import itertools
+import os
+import shutil
+import sys
+import tempfile
+import zipfile
+
+import finalize_apk
+
+from util import build_utils
+
+
+# Taken from aapt's Package.cpp:
+_NO_COMPRESS_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.gif', '.wav', '.mp2',
+ '.mp3', '.ogg', '.aac', '.mpg', '.mpeg', '.mid',
+ '.midi', '.smf', '.jet', '.rtttl', '.imy', '.xmf',
+ '.mp4', '.m4a', '.m4v', '.3gp', '.3gpp', '.3g2',
+ '.3gpp2', '.amr', '.awb', '.wma', '.wmv', '.webm')
+
+
+def _ParseArgs(args):
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument(
+ '--assets',
+ help='GYP-list of files to add as assets in the form '
+ '"srcPath:zipPath", where ":zipPath" is optional.')
+ parser.add_argument(
+ '--java-resources', help='GYP-list of java_resources JARs to include.')
+ parser.add_argument('--write-asset-list',
+ action='store_true',
+ help='Whether to create an assets/assets_list file.')
+ parser.add_argument(
+ '--uncompressed-assets',
+ help='Same as --assets, except disables compression.')
+ parser.add_argument('--resource-apk',
+ help='An .ap_ file built using aapt',
+ required=True)
+ parser.add_argument('--output-apk',
+ help='Path to the output file',
+ required=True)
+ parser.add_argument('--format', choices=['apk', 'bundle-module'],
+ default='apk', help='Specify output format.')
+ parser.add_argument('--dex-file',
+ help='Path to the classes.dex to use')
+ parser.add_argument('--uncompress-dex', action='store_true',
+ help='Store .dex files uncompressed in the APK')
+ parser.add_argument('--native-libs',
+ action='append',
+ help='GYP-list of native libraries to include. '
+ 'Can be specified multiple times.',
+ default=[])
+ parser.add_argument('--secondary-native-libs',
+ action='append',
+ help='GYP-list of native libraries for secondary '
+ 'android-abi. Can be specified multiple times.',
+ default=[])
+ parser.add_argument('--android-abi',
+ help='Android architecture to use for native libraries')
+ parser.add_argument('--secondary-android-abi',
+ help='The secondary Android architecture to use for'
+ 'secondary native libraries')
+ parser.add_argument(
+ '--native-lib-placeholders',
+ help='GYP-list of native library placeholders to add.')
+ parser.add_argument(
+ '--secondary-native-lib-placeholders',
+ help='GYP-list of native library placeholders to add '
+ 'for the secondary ABI')
+ parser.add_argument('--uncompress-shared-libraries', default='False',
+ choices=['true', 'True', 'false', 'False'],
+ help='Whether to uncompress native shared libraries. Argument must be '
+ 'a boolean value.')
+ parser.add_argument('--apksigner-path',
+ help='Path to the apksigner executable.')
+ parser.add_argument('--zipalign-path',
+ help='Path to the zipalign executable.')
+ parser.add_argument('--key-path',
+ help='Path to keystore for signing.')
+ parser.add_argument('--key-passwd',
+ help='Keystore password')
+ parser.add_argument('--key-name',
+ help='Keystore name')
+ options = parser.parse_args(args)
+ options.assets = build_utils.ParseGnList(options.assets)
+ options.uncompressed_assets = build_utils.ParseGnList(
+ options.uncompressed_assets)
+ options.native_lib_placeholders = build_utils.ParseGnList(
+ options.native_lib_placeholders)
+ options.secondary_native_lib_placeholders = build_utils.ParseGnList(
+ options.secondary_native_lib_placeholders)
+ options.java_resources = build_utils.ParseGnList(options.java_resources)
+ all_libs = []
+ for gyp_list in options.native_libs:
+ all_libs.extend(build_utils.ParseGnList(gyp_list))
+ options.native_libs = all_libs
+ secondary_libs = []
+ for gyp_list in options.secondary_native_libs:
+ secondary_libs.extend(build_utils.ParseGnList(gyp_list))
+ options.secondary_native_libs = secondary_libs
+
+ # --apksigner-path, --zipalign-path, --key-xxx arguments are
+ # required when building an APK, but not a bundle module.
+ if options.format == 'apk':
+ required_args = ['apksigner_path', 'zipalign_path', 'key_path',
+ 'key_passwd', 'key_name']
+ for required in required_args:
+ if not vars(options)[required]:
+ raise Exception('Argument --%s is required for APKs.' % (
+ required.replace('_', '-')))
+
+ options.uncompress_shared_libraries = \
+ options.uncompress_shared_libraries in [ 'true', 'True' ]
+
+ if not options.android_abi and (options.native_libs or
+ options.native_lib_placeholders):
+ raise Exception('Must specify --android-abi with --native-libs')
+ if not options.secondary_android_abi and (options.secondary_native_libs or
+ options.secondary_native_lib_placeholders):
+ raise Exception('Must specify --secondary-android-abi with'
+ ' --secondary-native-libs')
+ return options
+
+
+def _SplitAssetPath(path):
+ """Returns (src, dest) given an asset path in the form src[:dest]."""
+ path_parts = path.split(':')
+ src_path = path_parts[0]
+ if len(path_parts) > 1:
+ dest_path = path_parts[1]
+ else:
+ dest_path = os.path.basename(src_path)
+ return src_path, dest_path
+
+
+def _ExpandPaths(paths):
+ """Converts src:dst into tuples and enumerates files within directories.
+
+ Args:
+ paths: Paths in the form "src_path:dest_path"
+
+ Returns:
+ A list of (src_path, dest_path) tuples sorted by dest_path (for stable
+ ordering within output .apk).
+ """
+ ret = []
+ for path in paths:
+ src_path, dest_path = _SplitAssetPath(path)
+ if os.path.isdir(src_path):
+ for f in build_utils.FindInDirectory(src_path, '*'):
+ ret.append((f, os.path.join(dest_path, f[len(src_path) + 1:])))
+ else:
+ ret.append((src_path, dest_path))
+ ret.sort(key=lambda t:t[1])
+ return ret
+
+
+def _AddAssets(apk, path_tuples, disable_compression=False):
+ """Adds the given paths to the apk.
+
+ Args:
+ apk: ZipFile to write to.
+ paths: List of paths (with optional :zipPath suffix) to add.
+ disable_compression: Whether to disable compression.
+ """
+ # Group all uncompressed assets together in the hope that it will increase
+ # locality of mmap'ed files.
+ for target_compress in (False, True):
+ for src_path, dest_path in path_tuples:
+
+ compress = not disable_compression and (
+ os.path.splitext(src_path)[1] not in _NO_COMPRESS_EXTENSIONS)
+ if target_compress == compress:
+ apk_path = 'assets/' + dest_path
+ try:
+ apk.getinfo(apk_path)
+ # Should never happen since write_build_config.py handles merging.
+ raise Exception('Multiple targets specified the asset path: %s' %
+ apk_path)
+ except KeyError:
+ build_utils.AddToZipHermetic(apk, apk_path, src_path=src_path,
+ compress=compress)
+
+
+def _CreateAssetsList(path_tuples):
+ """Returns a newline-separated list of asset paths for the given paths."""
+ dests = sorted(t[1] for t in path_tuples)
+ return '\n'.join(dests) + '\n'
+
+
+def _AddNativeLibraries(out_apk, native_libs, android_abi, uncompress):
+ """Add native libraries to APK."""
+ has_crazy_linker = any('android_linker' in os.path.basename(p)
+ for p in native_libs)
+ for path in native_libs:
+ basename = os.path.basename(path)
+
+ compress = None
+ if (uncompress and os.path.splitext(basename)[1] == '.so'
+ and 'android_linker' not in basename
+ and (not has_crazy_linker or 'clang_rt' not in basename)
+ and (not has_crazy_linker or 'crashpad_handler' not in basename)):
+ compress = False
+ # Add prefix to prevent android install from extracting upon install.
+ if has_crazy_linker:
+ basename = 'crazy.' + basename
+
+ apk_path = 'lib/%s/%s' % (android_abi, basename)
+ build_utils.AddToZipHermetic(out_apk,
+ apk_path,
+ src_path=path,
+ compress=compress)
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseArgs(args)
+
+ native_libs = sorted(options.native_libs)
+
+ # Include native libs in the depfile_deps since GN doesn't know about the
+ # dependencies when is_component_build=true.
+ depfile_deps = list(native_libs)
+
+ secondary_native_libs = []
+ if options.secondary_native_libs:
+ secondary_native_libs = sorted(options.secondary_native_libs)
+ depfile_deps += secondary_native_libs
+
+ if options.java_resources:
+ # Included via .build_config, so need to write it to depfile.
+ depfile_deps.extend(options.java_resources)
+
+ assets = _ExpandPaths(options.assets)
+ uncompressed_assets = _ExpandPaths(options.uncompressed_assets)
+
+ # Included via .build_config, so need to write it to depfile.
+ depfile_deps.extend(x[0] for x in assets)
+ depfile_deps.extend(x[0] for x in uncompressed_assets)
+
+ # Bundle modules have a structure similar to APKs, except that resources
+ # are compiled in protobuf format (instead of binary xml), and that some
+ # files are located into different top-level directories, e.g.:
+ # AndroidManifest.xml -> manifest/AndroidManifest.xml
+ # classes.dex -> dex/classes.dex
+ # res/ -> res/ (unchanged)
+ # assets/ -> assets/ (unchanged)
+ # <other-file> -> root/<other-file>
+ #
+ # Hence, the following variables are used to control the location of files in
+ # the final archive.
+ if options.format == 'bundle-module':
+ apk_manifest_dir = 'manifest/'
+ apk_root_dir = 'root/'
+ apk_dex_dir = 'dex/'
+ else:
+ apk_manifest_dir = ''
+ apk_root_dir = ''
+ apk_dex_dir = ''
+
+ # Targets generally do not depend on apks, so no need for only_if_changed.
+ with build_utils.AtomicOutput(options.output_apk, only_if_changed=False) as f:
+ with zipfile.ZipFile(options.resource_apk) as resource_apk, \
+ zipfile.ZipFile(f, 'w', zipfile.ZIP_DEFLATED) as out_apk:
+
+ def copy_resource(zipinfo, out_dir=''):
+ compress = zipinfo.compress_type != zipfile.ZIP_STORED
+ build_utils.AddToZipHermetic(
+ out_apk,
+ out_dir + zipinfo.filename,
+ data=resource_apk.read(zipinfo.filename),
+ compress=compress)
+
+ # Make assets come before resources in order to maintain the same file
+ # ordering as GYP / aapt. http://crbug.com/561862
+ resource_infos = resource_apk.infolist()
+
+ # 1. AndroidManifest.xml
+ copy_resource(
+ resource_apk.getinfo('AndroidManifest.xml'), out_dir=apk_manifest_dir)
+
+ # 2. Assets
+ if options.write_asset_list:
+ data = _CreateAssetsList(itertools.chain(assets, uncompressed_assets))
+ build_utils.AddToZipHermetic(out_apk, 'assets/assets_list', data=data)
+
+ _AddAssets(out_apk, assets, disable_compression=False)
+ _AddAssets(out_apk, uncompressed_assets, disable_compression=True)
+
+ # 3. Dex files
+ if options.dex_file and options.dex_file.endswith('.zip'):
+ with zipfile.ZipFile(options.dex_file, 'r') as dex_zip:
+ for dex in (d for d in dex_zip.namelist() if d.endswith('.dex')):
+ build_utils.AddToZipHermetic(
+ out_apk,
+ apk_dex_dir + dex,
+ data=dex_zip.read(dex),
+ compress=not options.uncompress_dex)
+ elif options.dex_file:
+ build_utils.AddToZipHermetic(
+ out_apk,
+ apk_dex_dir + 'classes.dex',
+ src_path=options.dex_file,
+ compress=not options.uncompress_dex)
+
+ # 4. Native libraries.
+ _AddNativeLibraries(out_apk, native_libs, options.android_abi,
+ options.uncompress_shared_libraries)
+
+ if options.secondary_android_abi:
+ _AddNativeLibraries(out_apk, secondary_native_libs,
+ options.secondary_android_abi,
+ options.uncompress_shared_libraries)
+
+ for name in sorted(options.native_lib_placeholders):
+ # Note: Empty libs files are ignored by md5check (can cause issues
+ # with stale builds when the only change is adding/removing
+ # placeholders).
+ apk_path = 'lib/%s/%s' % (options.android_abi, name)
+ build_utils.AddToZipHermetic(out_apk, apk_path, data='')
+
+ for name in sorted(options.secondary_native_lib_placeholders):
+ # Note: Empty libs files are ignored by md5check (can cause issues
+ # with stale builds when the only change is adding/removing
+ # placeholders).
+ apk_path = 'lib/%s/%s' % (options.secondary_android_abi, name)
+ build_utils.AddToZipHermetic(out_apk, apk_path, data='')
+
+ # 5. Resources
+ for info in resource_infos:
+ if info.filename != 'AndroidManifest.xml':
+ copy_resource(info)
+
+ # 6. Java resources that should be accessible via
+ # Class.getResourceAsStream(), in particular parts of Emma jar.
+ # Prebuilt jars may contain class files which we shouldn't include.
+ for java_resource in options.java_resources:
+ with zipfile.ZipFile(java_resource, 'r') as java_resource_jar:
+ for apk_path in java_resource_jar.namelist():
+ apk_path_lower = apk_path.lower()
+
+ if apk_path_lower.startswith('meta-inf/'):
+ continue
+ if apk_path_lower.endswith('/'):
+ continue
+ if apk_path_lower.endswith('.class'):
+ continue
+
+ build_utils.AddToZipHermetic(
+ out_apk,
+ apk_root_dir + apk_path,
+ data=java_resource_jar.read(apk_path))
+
+ if options.format == 'apk':
+ finalize_apk.FinalizeApk(options.apksigner_path, options.zipalign_path,
+ f.name, f.name, options.key_path,
+ options.key_passwd, options.key_name)
+
+ if options.depfile:
+ build_utils.WriteDepfile(
+ options.depfile,
+ options.output_apk,
+ inputs=depfile_deps,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/apkbuilder.pydeps b/deps/v8/build/android/gyp/apkbuilder.pydeps
new file mode 100644
index 0000000000..3ae03319c9
--- /dev/null
+++ b/deps/v8/build/android/gyp/apkbuilder.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/apkbuilder.pydeps build/android/gyp/apkbuilder.py
+../../gn_helpers.py
+apkbuilder.py
+finalize_apk.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/assert_static_initializers.py b/deps/v8/build/android/gyp/assert_static_initializers.py
new file mode 100755
index 0000000000..019baface1
--- /dev/null
+++ b/deps/v8/build/android/gyp/assert_static_initializers.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Checks the number of static initializers in an APK's library."""
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+
+_DUMP_STATIC_INITIALIZERS_PATH = os.path.join(build_utils.DIR_SOURCE_ROOT,
+ 'tools', 'linux',
+ 'dump-static-initializers.py')
+
+
+def _RunReadelf(so_path, options, tool_prefix=''):
+ return subprocess.check_output([tool_prefix + 'readelf'] + options +
+ [so_path])
+
+
+def _ParseLibBuildId(so_path, tool_prefix):
+ """Returns the Build ID of the given native library."""
+ stdout = _RunReadelf(so_path, ['-n'], tool_prefix)
+ match = re.search(r'Build ID: (\w+)', stdout)
+ return match.group(1) if match else None
+
+
+def _VerifyLibBuildIdsMatch(tool_prefix, *so_files):
+ if len(set(_ParseLibBuildId(f, tool_prefix) for f in so_files)) > 1:
+ raise Exception('Found differing build ids in output directory and apk. '
+ 'Your output directory is likely stale.')
+
+
+def _GetStaticInitializers(so_path, tool_prefix):
+ output = subprocess.check_output(
+ [_DUMP_STATIC_INITIALIZERS_PATH, '-d', so_path, '-t', tool_prefix])
+ summary = re.search(r'Found \d+ static initializers in (\d+) files.', output)
+ return output.splitlines()[:-1], int(summary.group(1))
+
+
+def _PrintDumpSIsCount(apk_so_name, unzipped_so, out_dir, tool_prefix):
+ lib_name = os.path.basename(apk_so_name).replace('crazy.', '')
+ so_with_symbols_path = os.path.join(out_dir, 'lib.unstripped', lib_name)
+ if not os.path.exists(so_with_symbols_path):
+ raise Exception('Unstripped .so not found. Looked here: %s',
+ so_with_symbols_path)
+ _VerifyLibBuildIdsMatch(tool_prefix, unzipped_so, so_with_symbols_path)
+ sis, _ = _GetStaticInitializers(so_with_symbols_path, tool_prefix)
+ for si in sis:
+ print si
+
+
+# Mostly copied from //infra/scripts/legacy/scripts/slave/chromium/sizes.py.
+def _ReadInitArray(so_path, tool_prefix):
+ stdout = _RunReadelf(so_path, ['-SW'], tool_prefix)
+ # Matches: .ctors PROGBITS 000000000516add0 5169dd0 000010 00 WA 0 0 8
+ match = re.search(r'\.init_array.*$', stdout, re.MULTILINE)
+ if not match:
+ raise Exception('Did not find section: .init_array in:\n' + stdout)
+ size_str = re.split(r'\W+', match.group(0))[5]
+ return int(size_str, 16)
+
+
+def _CountStaticInitializers(so_path, tool_prefix):
+ # Find the number of files with at least one static initializer.
+ # First determine if we're 32 or 64 bit
+ stdout = _RunReadelf(so_path, ['-h'], tool_prefix)
+ elf_class_line = re.search('Class:.*$', stdout, re.MULTILINE).group(0)
+ elf_class = re.split(r'\W+', elf_class_line)[1]
+ if elf_class == 'ELF32':
+ word_size = 4
+ else:
+ word_size = 8
+
+ # Then find the number of files with global static initializers.
+ # NOTE: this is very implementation-specific and makes assumptions
+ # about how compiler and linker implement global static initializers.
+ init_array_size = _ReadInitArray(so_path, tool_prefix)
+ return init_array_size / word_size
+
+
+def _AnalyzeStaticInitializers(apk_filename, tool_prefix, dump_sis, out_dir,
+ ignored_libs):
+ # Static initializer counting mostly copies logic in
+ # infra/scripts/legacy/scripts/slave/chromium/sizes.py.
+ with zipfile.ZipFile(apk_filename) as z:
+ so_files = [
+ f for f in z.infolist() if f.filename.endswith('.so')
+ and f.file_size > 0 and os.path.basename(f.filename) not in ignored_libs
+ ]
+ # Skip checking static initializers for secondary abi libs. They will be
+ # checked by 32-bit bots. This avoids the complexity of finding 32 bit .so
+ # files in the output directory in 64 bit builds.
+ has_64 = any('64' in f.filename for f in so_files)
+ files_to_check = [f for f in so_files if not has_64 or '64' in f.filename]
+
+ si_count = 0
+ for f in files_to_check:
+ with tempfile.NamedTemporaryFile() as temp:
+ temp.write(z.read(f))
+ temp.flush()
+ si_count += _CountStaticInitializers(temp.name, tool_prefix)
+ if dump_sis:
+ # Print count and list of SIs reported by dump-static-initializers.py.
+ # Doesn't work well on all archs (particularly arm), which is why
+ # the readelf method is used for tracking SI counts.
+ _PrintDumpSIsCount(f.filename, temp.name, out_dir, tool_prefix)
+ return si_count
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--touch', help='File to touch upon success')
+ parser.add_argument('--tool-prefix', required=True,
+ help='Prefix for nm and friends')
+ parser.add_argument('--expected-count', required=True, type=int,
+ help='Fail if number of static initializers is not '
+ 'equal to this value.')
+ parser.add_argument('apk', help='APK file path.')
+ args = parser.parse_args()
+
+ #TODO(crbug.com/838414): add support for files included via loadable_modules.
+ ignored_libs = ['libarcore_sdk_c.so']
+
+ si_count = _AnalyzeStaticInitializers(args.apk, args.tool_prefix, False, '.',
+ ignored_libs)
+ if si_count != args.expected_count:
+ print 'Expected {} static initializers, but found {}.'.format(
+ args.expected_count, si_count)
+ if args.expected_count > si_count:
+ print 'You have removed one or more static initializers. Thanks!'
+ print 'To fix the build, update the expectation in:'
+ print ' //chrome/android/static_initializers.gni'
+ else:
+ print 'Dumping static initializers via dump-static-initializers.py:'
+ sys.stdout.flush()
+ _AnalyzeStaticInitializers(args.apk, args.tool_prefix, True, '.',
+ ignored_libs)
+ print
+ print 'If the above list is not useful, consider listing them with:'
+ print ' //tools/binary_size/diagnose_bloat.py'
+ print
+ print 'For more information:'
+ print (' https://chromium.googlesource.com/chromium/src/+/master/docs/'
+ 'static_initializers.md')
+ sys.exit(1)
+
+ if args.touch:
+ open(args.touch, 'w')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gyp/assert_static_initializers.pydeps b/deps/v8/build/android/gyp/assert_static_initializers.pydeps
new file mode 100644
index 0000000000..e031668f46
--- /dev/null
+++ b/deps/v8/build/android/gyp/assert_static_initializers.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/assert_static_initializers.pydeps build/android/gyp/assert_static_initializers.py
+../../gn_helpers.py
+assert_static_initializers.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/bundletool.py b/deps/v8/build/android/gyp/bundletool.py
new file mode 100755
index 0000000000..ac9561e768
--- /dev/null
+++ b/deps/v8/build/android/gyp/bundletool.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Simple wrapper around the bundletool tool.
+
+Bundletool is distributed as a versioned jar file. This script abstracts the
+location and version of this jar file, as well as the JVM invokation."""
+
+import logging
+import os
+import subprocess
+import sys
+
+# Assume this is stored under build/android/gyp/
+BUNDLETOOL_DIR = os.path.abspath(os.path.join(
+ __file__, '..', '..', '..', '..', 'third_party', 'android_build_tools',
+ 'bundletool'))
+
+BUNDLETOOL_VERSION = '0.9.0'
+
+BUNDLETOOL_JAR_PATH = os.path.join(
+ BUNDLETOOL_DIR, 'bundletool-all-%s.jar' % BUNDLETOOL_VERSION)
+
+def RunBundleTool(args):
+ args = ['java', '-jar', BUNDLETOOL_JAR_PATH] + args
+ logging.debug(' '.join(args))
+ subprocess.check_call(args)
+
+if __name__ == '__main__':
+ RunBundleTool(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/bytecode_processor.py b/deps/v8/build/android/gyp/bytecode_processor.py
new file mode 100755
index 0000000000..020b52f5f0
--- /dev/null
+++ b/deps/v8/build/android/gyp/bytecode_processor.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wraps bin/helper/java_bytecode_rewriter and expands @FileArgs."""
+
+import argparse
+import os
+import subprocess
+import sys
+
+from util import build_utils
+
+
+def _AddSwitch(parser, val):
+ parser.add_argument(
+ val, action='store_const', default='--disabled', const=val)
+
+
+def main(argv):
+ argv = build_utils.ExpandFileArgs(argv[1:])
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--script', required=True,
+ help='Path to the java binary wrapper script.')
+ parser.add_argument('--input-jar', required=True)
+ parser.add_argument('--output-jar', required=True)
+ parser.add_argument('--direct-classpath-jars', required=True)
+ parser.add_argument('--sdk-classpath-jars', required=True)
+ parser.add_argument('--extra-classpath-jars', dest='extra_jars',
+ action='append', default=[],
+ help='Extra inputs, passed last to the binary script.')
+ parser.add_argument('-v', '--verbose', action='store_true')
+ _AddSwitch(parser, '--is-prebuilt')
+ _AddSwitch(parser, '--enable-custom-resources')
+ _AddSwitch(parser, '--enable-assert')
+ _AddSwitch(parser, '--enable-thread-annotations')
+ _AddSwitch(parser, '--enable-check-class-path')
+ parser.add_argument(
+ '--split-compat-class-names',
+ action='append',
+ default=[],
+ help='Names of classes that need to be made SplitCompat-enabled.')
+ args = parser.parse_args(argv)
+
+ sdk_jars = build_utils.ParseGnList(args.sdk_classpath_jars)
+ assert len(sdk_jars) > 0
+
+ direct_jars = build_utils.ParseGnList(args.direct_classpath_jars)
+ assert len(direct_jars) > 0
+
+ extra_classpath_jars = []
+ for a in args.extra_jars:
+ extra_classpath_jars.extend(build_utils.ParseGnList(a))
+
+ split_compat_class_names = build_utils.ParseGnList(
+ args.split_compat_class_names)
+
+ if args.verbose:
+ verbose = '--verbose'
+ else:
+ verbose = '--not-verbose'
+
+ cmd = ([
+ args.script, args.input_jar, args.output_jar, verbose, args.is_prebuilt,
+ args.enable_assert, args.enable_custom_resources,
+ args.enable_thread_annotations, args.enable_check_class_path,
+ str(len(sdk_jars))
+ ] + sdk_jars + [str(len(direct_jars))] + direct_jars + [
+ str(len(split_compat_class_names))
+ ] + split_compat_class_names + extra_classpath_jars)
+ subprocess.check_call(cmd)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/deps/v8/build/android/gyp/bytecode_processor.pydeps b/deps/v8/build/android/gyp/bytecode_processor.pydeps
new file mode 100644
index 0000000000..d8ff396495
--- /dev/null
+++ b/deps/v8/build/android/gyp/bytecode_processor.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/bytecode_processor.pydeps build/android/gyp/bytecode_processor.py
+../../gn_helpers.py
+bytecode_processor.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/compile_resources.py b/deps/v8/build/android/gyp/compile_resources.py
new file mode 100755
index 0000000000..3f2f5dfe6e
--- /dev/null
+++ b/deps/v8/build/android/gyp/compile_resources.py
@@ -0,0 +1,916 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compile Android resources into an intermediate APK.
+
+This can also generate an R.txt, and an .srcjar file containing the proper
+final R.java class for all resource packages the APK depends on.
+
+This will crunch images with aapt2.
+"""
+
+import argparse
+import collections
+import contextlib
+import multiprocessing.pool
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import zipfile
+from xml.etree import ElementTree
+
+from util import build_utils
+from util import resource_utils
+
+# Name of environment variable that can be used to force this script to
+# put temporary resource files into specific sub-directories, instead of
+# temporary ones.
+_ENV_DEBUG_VARIABLE = 'ANDROID_DEBUG_TEMP_RESOURCES_DIR'
+
+# Import jinja2 from third_party/jinja2
+sys.path.insert(1, os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party'))
+from jinja2 import Template # pylint: disable=F0401
+
+# Pngs that we shouldn't convert to webp. Please add rationale when updating.
+_PNG_WEBP_BLACKLIST_PATTERN = re.compile('|'.join([
+ # Crashes on Galaxy S5 running L (https://crbug.com/807059).
+ r'.*star_gray\.png',
+ # Android requires pngs for 9-patch images.
+ r'.*\.9\.png',
+ # Daydream requires pngs for icon files.
+ r'.*daydream_icon_.*\.png']))
+
+
+def _ListToDictionary(lst, separator):
+ """Splits each element of the passed-in |lst| using |separator| and creates
+ dictionary treating first element of the split as the key and second as the
+ value."""
+ return dict(item.split(separator, 1) for item in lst)
+
+
+def _ParseArgs(args):
+ """Parses command line options.
+
+ Returns:
+ An options object as from argparse.ArgumentParser.parse_args()
+ """
+ parser, input_opts, output_opts = resource_utils.ResourceArgsParser()
+
+ input_opts.add_argument(
+ '--aapt2-path', required=True, help='Path to the Android aapt2 tool.')
+ input_opts.add_argument('--android-manifest', required=True,
+ help='AndroidManifest.xml path')
+ input_opts.add_argument(
+ '--shared-resources',
+ action='store_true',
+ help='Make all resources in R.java non-final and allow the resource IDs '
+ 'to be reset to a different package index when the apk is loaded by '
+ 'another application at runtime.')
+
+ input_opts.add_argument(
+ '--app-as-shared-lib',
+ action='store_true',
+ help='Same as --shared-resources, but also ensures all resource IDs are '
+ 'directly usable from the APK loaded as an application.')
+
+ input_opts.add_argument(
+ '--package-id',
+ help='Custom package ID for resources (instead of 0x7f). Cannot be used '
+ 'with --shared-resources.')
+
+ input_opts.add_argument(
+ '--package-name-to-id-mapping',
+ help='List containing mapping from package name to package IDs that will '
+ 'be assigned.')
+
+ input_opts.add_argument(
+ '--package-name',
+ help='Package name that will be used to determine package ID.')
+
+ input_opts.add_argument(
+ '--arsc-package-name', help='Package name to use for resources.arsc file')
+
+ input_opts.add_argument(
+ '--shared-resources-whitelist',
+ help='An R.txt file acting as a whitelist for resources that should be '
+ 'non-final and have their package ID changed at runtime in R.java. '
+ 'Implies and overrides --shared-resources.')
+
+ input_opts.add_argument(
+ '--shared-resources-whitelist-locales',
+ default='[]',
+ help='Optional GN-list of locales. If provided, all strings corresponding'
+ ' to this locale list will be kept in the final output for the '
+ 'resources identified through --shared-resources-whitelist, even '
+ 'if --locale-whitelist is being used.')
+
+ input_opts.add_argument(
+ '--use-resource-ids-path',
+ help='Use resource IDs generated by aapt --emit-ids')
+
+ input_opts.add_argument('--proto-format', action='store_true',
+ help='Compile resources to protocol buffer format.')
+
+ input_opts.add_argument('--support-zh-hk', action='store_true',
+ help='Use zh-rTW resources for zh-rHK.')
+
+ input_opts.add_argument('--debuggable',
+ action='store_true',
+ help='Whether to add android:debuggable="true"')
+
+ input_opts.add_argument('--version-code', help='Version code for apk.')
+ input_opts.add_argument('--version-name', help='Version name for apk.')
+
+ input_opts.add_argument(
+ '--no-compress',
+ help='disables compression for the given comma-separated list of '
+ 'extensions')
+
+ input_opts.add_argument(
+ '--locale-whitelist',
+ default='[]',
+ help='GN list of languages to include. All other language configs will '
+ 'be stripped out. List may include a combination of Android locales '
+ 'or Chrome locales.')
+
+ input_opts.add_argument('--resource-blacklist-regex', default='',
+ help='Do not include matching drawables.')
+
+ input_opts.add_argument(
+ '--resource-blacklist-exceptions',
+ default='[]',
+ help='GN list of globs that say which blacklisted images to include even '
+ 'when --resource-blacklist-regex is set.')
+
+ input_opts.add_argument('--png-to-webp', action='store_true',
+ help='Convert png files to webp format.')
+
+ input_opts.add_argument('--webp-binary', default='',
+ help='Path to the cwebp binary.')
+
+ input_opts.add_argument('--no-xml-namespaces',
+ action='store_true',
+ help='Whether to strip xml namespaces from processed '
+ 'xml resources')
+ input_opts.add_argument(
+ '--resources-config-path', help='Path to aapt2 resources config file.')
+ input_opts.add_argument(
+ '--optimized-resources-path',
+ help='Output for `aapt2 optimize` (also enables the step).')
+
+ output_opts.add_argument('--apk-path', required=True,
+ help='Path to output (partial) apk.')
+
+ output_opts.add_argument('--apk-info-path', required=True,
+ help='Path to output info file for the partial apk.')
+
+ output_opts.add_argument('--srcjar-out',
+ help='Path to srcjar to contain generated R.java.')
+
+ output_opts.add_argument('--r-text-out',
+ help='Path to store the generated R.txt file.')
+
+ output_opts.add_argument('--proguard-file',
+ help='Path to proguard.txt generated file')
+
+ output_opts.add_argument(
+ '--proguard-file-main-dex',
+ help='Path to proguard.txt generated file for main dex')
+
+ output_opts.add_argument(
+ '--emit-ids-out',
+ help=
+ 'Path to file produced by aapt2 --emit-ids (for use with --stable-ids)')
+
+ options = parser.parse_args(args)
+
+ resource_utils.HandleCommonOptions(options)
+
+ options.locale_whitelist = build_utils.ParseGnList(options.locale_whitelist)
+ options.shared_resources_whitelist_locales = build_utils.ParseGnList(
+ options.shared_resources_whitelist_locales)
+ options.resource_blacklist_exceptions = build_utils.ParseGnList(
+ options.resource_blacklist_exceptions)
+
+ if options.shared_resources and options.app_as_shared_lib:
+ raise Exception('Only one of --app-as-shared-lib or --shared-resources '
+ 'can be used.')
+
+ if options.package_name_to_id_mapping:
+ package_names_list = build_utils.ParseGnList(
+ options.package_name_to_id_mapping)
+ options.package_name_to_id_mapping = _ListToDictionary(
+ package_names_list, '=')
+
+ return options
+
+
+def _SortZip(original_path, sorted_path):
+ """Generate new zip archive by sorting all files in the original by name."""
+ with zipfile.ZipFile(sorted_path, 'w') as sorted_zip, \
+ zipfile.ZipFile(original_path, 'r') as original_zip:
+ for info in sorted(original_zip.infolist(), key=lambda i: i.filename):
+ sorted_zip.writestr(info, original_zip.read(info))
+
+
+def _IterFiles(root_dir):
+ for root, _, files in os.walk(root_dir):
+ for f in files:
+ yield os.path.join(root, f)
+
+
+def _DuplicateZhResources(resource_dirs):
+ """Duplicate Taiwanese resources into Hong-Kong specific directory."""
+ renamed_paths = dict()
+ for resource_dir in resource_dirs:
+ # We use zh-TW resources for zh-HK (if we have zh-TW resources).
+ for path in _IterFiles(resource_dir):
+ if 'zh-rTW' in path:
+ hk_path = path.replace('zh-rTW', 'zh-rHK')
+ build_utils.MakeDirectory(os.path.dirname(hk_path))
+ shutil.copyfile(path, hk_path)
+ renamed_paths[os.path.relpath(hk_path, resource_dir)] = os.path.relpath(
+ path, resource_dir)
+ return renamed_paths
+
+
+def _RenameLocaleResourceDirs(resource_dirs):
+ """Rename locale resource directories into standard names when necessary.
+
+ This is necessary to deal with the fact that older Android releases only
+ support ISO 639-1 two-letter codes, and sometimes even obsolete versions
+ of them.
+
+ In practice it means:
+ * 3-letter ISO 639-2 qualifiers are renamed under a corresponding
+ 2-letter one. E.g. for Filipino, strings under values-fil/ will be moved
+ to a new corresponding values-tl/ sub-directory.
+
+ * Modern ISO 639-1 codes will be renamed to their obsolete variant
+ for Indonesian, Hebrew and Yiddish (e.g. 'values-in/ -> values-id/).
+
+ * Norwegian macrolanguage strings will be renamed to Bokmål (main
+ Norway language). See http://crbug.com/920960. In practice this
+ means that 'values-no/ -> values-nb/' unless 'values-nb/' already
+ exists.
+
+ * BCP 47 langauge tags will be renamed to an equivalent ISO 639-1
+ locale qualifier if possible (e.g. 'values-b+en+US/ -> values-en-rUS').
+ Though this is not necessary at the moment, because no third-party
+ package that Chromium links against uses these for the current list of
+ supported locales, this may change when the list is extended in the
+ future).
+
+ Args:
+ resource_dirs: list of top-level resource directories.
+ Returns:
+ A dictionary mapping renamed paths to their original location
+ (e.g. '.../values-tl/strings.xml' -> ' .../values-fil/strings.xml').
+ """
+ renamed_paths = dict()
+ for resource_dir in resource_dirs:
+ for path in _IterFiles(resource_dir):
+ locale = resource_utils.FindLocaleInStringResourceFilePath(path)
+ if not locale:
+ continue
+ cr_locale = resource_utils.ToChromiumLocaleName(locale)
+ if not cr_locale:
+ continue # Unsupported Android locale qualifier!?
+ locale2 = resource_utils.ToAndroidLocaleName(cr_locale)
+ if locale != locale2:
+ path2 = path.replace('/values-%s/' % locale, '/values-%s/' % locale2)
+ if path == path2:
+ raise Exception('Could not substitute locale %s for %s in %s' %
+ (locale, locale2, path))
+ if os.path.exists(path2):
+ # This happens sometimes, e.g. some libraries provide both
+ # values-nb/ and values-no/ with the same content.
+ continue
+ build_utils.MakeDirectory(os.path.dirname(path2))
+ shutil.move(path, path2)
+ renamed_paths[os.path.relpath(path2, resource_dir)] = os.path.relpath(
+ path, resource_dir)
+ return renamed_paths
+
+
+def _ToAndroidLocales(locale_whitelist, support_zh_hk):
+ """Converts the list of Chrome locales to Android config locale qualifiers.
+
+ Args:
+ locale_whitelist: A list of Chromium locale names.
+ support_zh_hk: True if we need to support zh-HK by duplicating
+ the zh-TW strings.
+ Returns:
+ A set of matching Android config locale qualifier names.
+ """
+ ret = set()
+ for locale in locale_whitelist:
+ locale = resource_utils.ToAndroidLocaleName(locale)
+ if locale is None or ('-' in locale and '-r' not in locale):
+ raise Exception('Unsupported Chromium locale name: %s' % locale)
+ ret.add(locale)
+ # Always keep non-regional fall-backs.
+ language = locale.split('-')[0]
+ ret.add(language)
+
+ # We don't actually support zh-HK in Chrome on Android, but we mimic the
+ # native side behavior where we use zh-TW resources when the locale is set to
+ # zh-HK. See https://crbug.com/780847.
+ if support_zh_hk:
+ assert not any('HK' in l for l in locale_whitelist), (
+ 'Remove special logic if zh-HK is now supported (crbug.com/780847).')
+ ret.add('zh-rHK')
+ return set(ret)
+
+
+def _MoveImagesToNonMdpiFolders(res_root):
+ """Move images from drawable-*-mdpi-* folders to drawable-* folders.
+
+ Why? http://crbug.com/289843
+ """
+ renamed_paths = dict()
+ for src_dir_name in os.listdir(res_root):
+ src_components = src_dir_name.split('-')
+ if src_components[0] != 'drawable' or 'mdpi' not in src_components:
+ continue
+ src_dir = os.path.join(res_root, src_dir_name)
+ if not os.path.isdir(src_dir):
+ continue
+ dst_components = [c for c in src_components if c != 'mdpi']
+ assert dst_components != src_components
+ dst_dir_name = '-'.join(dst_components)
+ dst_dir = os.path.join(res_root, dst_dir_name)
+ build_utils.MakeDirectory(dst_dir)
+ for src_file_name in os.listdir(src_dir):
+ if not os.path.splitext(src_file_name)[1] in ('.png', '.webp'):
+ continue
+ src_file = os.path.join(src_dir, src_file_name)
+ dst_file = os.path.join(dst_dir, src_file_name)
+ assert not os.path.lexists(dst_file)
+ shutil.move(src_file, dst_file)
+ renamed_paths[os.path.relpath(dst_file, res_root)] = os.path.relpath(
+ src_file, res_root)
+ return renamed_paths
+
+
+def _PackageIdFromOptions(options):
+ package_id = None
+ if options.package_id:
+ package_id = options.package_id
+ if options.package_name:
+ package_id = options.package_name_to_id_mapping.get(options.package_name)
+ if package_id is None:
+ raise Exception(
+ 'Package name %s is not present in package_name_to_id_mapping.' %
+ options.package_name)
+ return package_id
+
+
+def _CreateLinkApkArgs(options):
+ """Create command-line arguments list to invoke 'aapt2 link'.
+
+ Args:
+ options: The command-line options tuple.
+ Returns:
+ A list of strings corresponding to the command-line invokation for
+ the command, matching the arguments from |options|.
+ """
+ link_command = [
+ options.aapt2_path,
+ 'link',
+ '--version-code', options.version_code,
+ '--version-name', options.version_name,
+ '--auto-add-overlay',
+ '--no-version-vectors',
+ ]
+
+ for j in options.include_resources:
+ link_command += ['-I', j]
+ if options.proguard_file:
+ link_command += ['--proguard', options.proguard_file]
+ if options.proguard_file_main_dex:
+ link_command += ['--proguard-main-dex', options.proguard_file_main_dex]
+ if options.emit_ids_out:
+ link_command += ['--emit-ids', options.emit_ids_out]
+
+ if options.no_compress:
+ for ext in options.no_compress.split(','):
+ link_command += ['-0', ext]
+
+ # Note: only one of --proto-format, --shared-lib or --app-as-shared-lib
+ # can be used with recent versions of aapt2.
+ if options.proto_format:
+ link_command.append('--proto-format')
+ elif options.shared_resources:
+ link_command.append('--shared-lib')
+
+ if options.no_xml_namespaces:
+ link_command.append('--no-xml-namespaces')
+
+ package_id = _PackageIdFromOptions(options)
+ if package_id is not None:
+ link_command += ['--package-id', package_id, '--allow-reserved-package-id']
+
+ return link_command
+
+
+def _FixManifest(options, temp_dir):
+ """Fix the APK's AndroidManifest.xml.
+
+ This adds any missing namespaces for 'android' and 'tools', and
+ sets certains elements like 'platformBuildVersionCode' or
+ 'android:debuggable' depending on the content of |options|.
+
+ Args:
+ options: The command-line arguments tuple.
+ temp_dir: A temporary directory where the fixed manifest will be written to.
+ Returns:
+ Tuple of:
+ * Manifest path within |temp_dir|.
+ * Original package_name (if different from arsc_package_name).
+ """
+ def maybe_extract_version(j):
+ try:
+ return resource_utils.ExtractBinaryManifestValues(options.aapt2_path, j)
+ except build_utils.CalledProcessError:
+ return None
+
+ android_sdk_jars = [j for j in options.include_resources
+ if os.path.basename(j) in ('android.jar',
+ 'android_system.jar')]
+ extract_all = [maybe_extract_version(j) for j in android_sdk_jars]
+ successful_extractions = [x for x in extract_all if x]
+ if len(successful_extractions) == 0:
+ raise Exception(
+ 'Unable to find android SDK jar among candidates: %s'
+ % ', '.join(android_sdk_jars))
+ elif len(successful_extractions) > 1:
+ raise Exception(
+ 'Found multiple android SDK jars among candidates: %s'
+ % ', '.join(android_sdk_jars))
+ version_code, version_name = successful_extractions.pop()[:2]
+
+ debug_manifest_path = os.path.join(temp_dir, 'AndroidManifest.xml')
+ doc, manifest_node, app_node = resource_utils.ParseAndroidManifest(
+ options.android_manifest)
+
+ manifest_node.set('platformBuildVersionCode', version_code)
+ manifest_node.set('platformBuildVersionName', version_name)
+
+ orig_package = manifest_node.get('package')
+ if options.arsc_package_name:
+ manifest_node.set('package', options.arsc_package_name)
+
+ if options.debuggable:
+ app_node.set('{%s}%s' % (resource_utils.ANDROID_NAMESPACE, 'debuggable'),
+ 'true')
+
+ with open(debug_manifest_path, 'w') as debug_manifest:
+ debug_manifest.write(ElementTree.tostring(doc.getroot(), encoding='UTF-8'))
+
+ return debug_manifest_path, orig_package
+
+
+def _ResourceNameFromPath(path):
+ return os.path.splitext(os.path.basename(path))[0]
+
+
+def _CreateKeepPredicate(resource_dirs, resource_blacklist_regex,
+ resource_blacklist_exceptions):
+ """Return a predicate lambda to determine which resource files to keep.
+
+ Args:
+ resource_dirs: list of top-level resource directories.
+ resource_blacklist_regex: A regular expression describing all resources
+ to exclude, except if they are mip-maps, or if they are listed
+ in |resource_blacklist_exceptions|.
+ resource_blacklist_exceptions: A list of glob patterns corresponding
+ to exceptions to the |resource_blacklist_regex|.
+ Returns:
+ A lambda that takes a path, and returns true if the corresponding file
+ must be kept.
+ """
+ naive_predicate = lambda path: os.path.basename(path)[0] != '.'
+ if resource_blacklist_regex == '':
+ # Do not extract dotfiles (e.g. ".gitkeep"). aapt ignores them anyways.
+ return naive_predicate
+
+ if resource_blacklist_regex != '':
+ # A simple predicate that only removes (returns False for) paths covered by
+ # the blacklist regex, except if they are mipmaps, or listed as exceptions.
+ naive_predicate = lambda path: (
+ not re.search(resource_blacklist_regex, path) or
+ re.search(r'[/-]mipmap[/-]', path) or
+ build_utils.MatchesGlob(path, resource_blacklist_exceptions))
+
+ # Build a set of all names from drawables kept by naive_predicate().
+ # Used later to ensure that we never exclude drawables from densities
+ # that are filtered-out by naive_predicate().
+ non_filtered_drawables = set()
+ for resource_dir in resource_dirs:
+ for path in _IterFiles(resource_dir):
+ if re.search(r'[/-]drawable[/-]', path) and naive_predicate(path):
+ non_filtered_drawables.add(_ResourceNameFromPath(path))
+
+ # NOTE: Defined as a function, instead of a lambda to avoid the
+ # auto-formatter to put this on a very long line that overflows.
+ def drawable_predicate(path):
+ return (naive_predicate(path)
+ or _ResourceNameFromPath(path) not in non_filtered_drawables)
+
+ return drawable_predicate
+
+
+def _ConvertToWebP(webp_binary, png_files):
+ renamed_paths = dict()
+ pool = multiprocessing.pool.ThreadPool(10)
+ def convert_image(png_path_tuple):
+ png_path, original_dir = png_path_tuple
+ root = os.path.splitext(png_path)[0]
+ webp_path = root + '.webp'
+ args = [webp_binary, png_path, '-mt', '-quiet', '-m', '6', '-q', '100',
+ '-lossless', '-o', webp_path]
+ subprocess.check_call(args)
+ os.remove(png_path)
+ renamed_paths[os.path.relpath(webp_path, original_dir)] = os.path.relpath(
+ png_path, original_dir)
+
+ pool.map(convert_image, [f for f in png_files
+ if not _PNG_WEBP_BLACKLIST_PATTERN.match(f[0])])
+ pool.close()
+ pool.join()
+ return renamed_paths
+
+
+def _CompileDeps(aapt2_path, dep_subdirs, temp_dir):
+ partials_dir = os.path.join(temp_dir, 'partials')
+ build_utils.MakeDirectory(partials_dir)
+ partial_compile_command = [
+ aapt2_path,
+ 'compile',
+ # TODO(wnwen): Turn this on once aapt2 forces 9-patch to be crunched.
+ # '--no-crunch',
+ ]
+ pool = multiprocessing.pool.ThreadPool(10)
+ def compile_partial(directory):
+ dirname = os.path.basename(directory)
+ partial_path = os.path.join(partials_dir, dirname + '.zip')
+ compile_command = (partial_compile_command +
+ ['--dir', directory, '-o', partial_path])
+ build_utils.CheckOutput(
+ compile_command,
+ stderr_filter=lambda output:
+ build_utils.FilterLines(
+ output, r'ignoring configuration .* for styleable'))
+
+ # Sorting the files in the partial ensures deterministic output from the
+ # aapt2 link step which uses order of files in the partial.
+ sorted_partial_path = os.path.join(partials_dir, dirname + '.sorted.zip')
+ _SortZip(partial_path, sorted_partial_path)
+
+ return sorted_partial_path
+
+ partials = pool.map(compile_partial, dep_subdirs)
+ pool.close()
+ pool.join()
+ return partials
+
+
+def _CreateResourceInfoFile(
+ renamed_paths, apk_info_path, dependencies_res_zips):
+ lines = set()
+ for zip_file in dependencies_res_zips:
+ zip_info_file_path = zip_file + '.info'
+ if os.path.exists(zip_info_file_path):
+ with open(zip_info_file_path, 'r') as zip_info_file:
+ lines.update(zip_info_file.readlines())
+ for dest, source in renamed_paths.iteritems():
+ lines.add('Rename:{},{}\n'.format(dest, source))
+ with build_utils.AtomicOutput(apk_info_path) as info_file:
+ info_file.writelines(sorted(lines))
+
+
+def _RemoveUnwantedLocalizedStrings(dep_subdirs, options):
+ """Remove localized strings that should not go into the final output.
+
+ Args:
+ dep_subdirs: List of resource dependency directories.
+ options: Command-line options namespace.
+ """
+ if (not options.locale_whitelist
+ and not options.shared_resources_whitelist_locales):
+ # Keep everything, there is nothing to do.
+ return
+
+ # Collect locale and file paths from the existing subdirs.
+ # The following variable maps Android locale names to
+ # sets of corresponding xml file paths.
+ locale_to_files_map = collections.defaultdict(set)
+ for directory in dep_subdirs:
+ for f in _IterFiles(directory):
+ locale = resource_utils.FindLocaleInStringResourceFilePath(f)
+ if locale:
+ locale_to_files_map[locale].add(f)
+
+ all_locales = set(locale_to_files_map)
+
+ # Set A: wanted locales, either all of them or the
+ # list provided by --locale-whitelist.
+ wanted_locales = all_locales
+ if options.locale_whitelist:
+ wanted_locales = _ToAndroidLocales(options.locale_whitelist,
+ options.support_zh_hk)
+
+ # Set B: shared resources locales, which is either set A
+ # or the list provided by --shared-resources-whitelist-locales
+ shared_resources_locales = wanted_locales
+ shared_names_whitelist = set()
+ if options.shared_resources_whitelist_locales:
+ shared_names_whitelist = set(
+ resource_utils.GetRTxtStringResourceNames(
+ options.shared_resources_whitelist))
+
+ shared_resources_locales = _ToAndroidLocales(
+ options.shared_resources_whitelist_locales, options.support_zh_hk)
+
+ # Remove any file that belongs to a locale not covered by
+ # either A or B.
+ removable_locales = (all_locales - wanted_locales - shared_resources_locales)
+ for locale in removable_locales:
+ for path in locale_to_files_map[locale]:
+ os.remove(path)
+
+ # For any locale in B but not in A, only keep the shared
+ # resource strings in each file.
+ for locale in shared_resources_locales - wanted_locales:
+ for path in locale_to_files_map[locale]:
+ resource_utils.FilterAndroidResourceStringsXml(
+ path, lambda x: x in shared_names_whitelist)
+
+ # For any locale in A but not in B, only keep the strings
+ # that are _not_ from shared resources in the file.
+ for locale in wanted_locales - shared_resources_locales:
+ for path in locale_to_files_map[locale]:
+ resource_utils.FilterAndroidResourceStringsXml(
+ path, lambda x: x not in shared_names_whitelist)
+
+
+def _PackageApk(options, dep_subdirs, temp_dir, gen_dir, r_txt_path):
+ """Compile resources with aapt2 and generate intermediate .ap_ file.
+
+ Args:
+ options: The command-line options tuple. E.g. the generated apk
+ will be written to |options.apk_path|.
+ dep_subdirs: The list of directories where dependency resource zips
+ were extracted (its content will be altered by this function).
+ temp_dir: A temporary directory.
+ gen_dir: Another temp directory where some intermediate files are
+ generated.
+ r_txt_path: The path where the R.txt file will written to.
+ """
+ renamed_paths = dict()
+ renamed_paths.update(_DuplicateZhResources(dep_subdirs))
+ renamed_paths.update(_RenameLocaleResourceDirs(dep_subdirs))
+
+ _RemoveUnwantedLocalizedStrings(dep_subdirs, options)
+
+ # Create a function that selects which resource files should be packaged
+ # into the final output. Any file that does not pass the predicate will
+ # be removed below.
+ keep_predicate = _CreateKeepPredicate(dep_subdirs,
+ options.resource_blacklist_regex,
+ options.resource_blacklist_exceptions)
+ png_paths = []
+ for directory in dep_subdirs:
+ for f in _IterFiles(directory):
+ if not keep_predicate(f):
+ os.remove(f)
+ elif f.endswith('.png'):
+ png_paths.append((f, directory))
+ if png_paths and options.png_to_webp:
+ renamed_paths.update(_ConvertToWebP(options.webp_binary, png_paths))
+ for directory in dep_subdirs:
+ renamed_paths.update(_MoveImagesToNonMdpiFolders(directory))
+
+ link_command = _CreateLinkApkArgs(options)
+ # TODO(digit): Is this below actually required for R.txt generation?
+ link_command += ['--java', gen_dir]
+
+ fixed_manifest, orig_package = _FixManifest(options, temp_dir)
+ link_command += [
+ '--manifest', fixed_manifest, '--rename-manifest-package', orig_package
+ ]
+
+ partials = _CompileDeps(options.aapt2_path, dep_subdirs, temp_dir)
+ for partial in partials:
+ link_command += ['-R', partial]
+
+ # Creates a .zip with AndroidManifest.xml, resources.arsc, res/*
+ # Also creates R.txt
+ with build_utils.AtomicOutput(options.apk_path) as unoptimized, \
+ build_utils.AtomicOutput(r_txt_path) as r_txt, \
+ _MaybeCreateStableIdsFile(options) as stable_ids:
+ if stable_ids:
+ link_command += ['--stable-ids', stable_ids.name]
+ link_command += ['-o', unoptimized.name]
+ link_command += ['--output-text-symbols', r_txt.name]
+ build_utils.CheckOutput(
+ link_command, print_stdout=False, print_stderr=False)
+
+ if options.optimized_resources_path:
+ with build_utils.AtomicOutput(options.optimized_resources_path) as opt:
+ _OptimizeApk(opt.name, options, temp_dir, unoptimized.name, r_txt.name)
+
+ _CreateResourceInfoFile(
+ renamed_paths, options.apk_info_path, options.dependencies_res_zips)
+
+
+def _OptimizeApk(output, options, temp_dir, unoptimized_apk_path, r_txt_path):
+ """Optimize intermediate .ap_ file with aapt2.
+
+ Args:
+ output: Path to write to.
+ options: The command-line options.
+ temp_dir: A temporary directory.
+ unoptimized_apk_path: path of the apk to optimize.
+ r_txt_path: path to the R.txt file of the unoptimized apk.
+ """
+ # Resources of type ID are references to UI elements/views. They are used by
+ # UI automation testing frameworks. They are kept in so that they dont break
+ # tests, even though they may not actually be used during runtime. See
+ # https://crbug.com/900993
+ id_resources = _ExtractIdResources(r_txt_path)
+ gen_config_path = os.path.join(temp_dir, 'aapt2.config')
+ if options.resources_config_path:
+ shutil.copyfile(options.resources_config_path, gen_config_path)
+ with open(gen_config_path, 'a+') as config:
+ for resource in id_resources:
+ config.write('{}#no_obfuscate\n'.format(resource))
+
+ # Optimize the resources.arsc file by obfuscating resource names and only
+ # allow usage via R.java constant.
+ optimize_command = [
+ options.aapt2_path,
+ 'optimize',
+ '--enable-resource-obfuscation',
+ '-o',
+ output,
+ '--resources-config-path',
+ gen_config_path,
+ unoptimized_apk_path,
+ ]
+ build_utils.CheckOutput(
+ optimize_command, print_stdout=False, print_stderr=False)
+
+
+def _ExtractIdResources(rtxt_path):
+ """Extract resources of type ID from the R.txt file
+
+ Args:
+ rtxt_path: Path to R.txt file with all the resources
+ Returns:
+ List of id resources in the form of id/<resource_name>
+ """
+ id_resources = []
+ with open(rtxt_path) as rtxt:
+ for line in rtxt:
+ if ' id ' in line:
+ resource_name = line.split()[2]
+ id_resources.append('id/{}'.format(resource_name))
+ return id_resources
+
+
+@contextlib.contextmanager
+def _MaybeCreateStableIdsFile(options):
+ """Transforms a file generated by --emit-ids from another package.
+
+ --stable-ids is generally meant to be used by different versions of the same
+ package. To make it work for other packages, we need to transform the package
+ name references to match the package that resources are being generated for.
+
+ Note: This will fail if the package ID of the resources in
+ |options.use_resource_ids_path| does not match the package ID of the
+ resources being linked.
+
+ Args:
+ options: The command-line options
+ Yields:
+ Path to the transformed resource IDs file (lines formatted like
+ package:type/name = 0xPPTTEEEE) or None
+ """
+ if options.use_resource_ids_path:
+ package_name = options.package_name
+ if not package_name:
+ package_name = resource_utils.ExtractPackageFromManifest(
+ options.android_manifest)
+ with open(options.use_resource_ids_path) as stable_ids_file:
+ with tempfile.NamedTemporaryFile() as output_ids_file:
+ output_stable_ids = re.sub(
+ r'^.*?:',
+ package_name + ':',
+ stable_ids_file.read(),
+ flags=re.MULTILINE)
+ output_ids_file.write(output_stable_ids)
+ output_ids_file.flush()
+ yield output_ids_file
+ else:
+ yield None
+
+
+def _WriteFinalRTxtFile(options, aapt_r_txt_path):
+ """Determine final R.txt and return its location.
+
+ This handles --r-text-in and --r-text-out options at the same time.
+
+ Args:
+ options: The command-line options tuple.
+ aapt_r_txt_path: The path to the R.txt generated by aapt.
+ Returns:
+ Path to the final R.txt file.
+ """
+ if options.r_text_in:
+ r_txt_file = options.r_text_in
+ else:
+ # When an empty res/ directory is passed, aapt does not write an R.txt.
+ r_txt_file = aapt_r_txt_path
+ if not os.path.exists(r_txt_file):
+ build_utils.Touch(r_txt_file)
+
+ if options.r_text_out:
+ shutil.copyfile(r_txt_file, options.r_text_out)
+
+ return r_txt_file
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseArgs(args)
+
+ debug_temp_resources_dir = os.environ.get(_ENV_DEBUG_VARIABLE)
+ if debug_temp_resources_dir:
+ debug_temp_resources_dir = os.path.join(debug_temp_resources_dir,
+ os.path.basename(options.apk_path))
+ build_utils.DeleteDirectory(debug_temp_resources_dir)
+ build_utils.MakeDirectory(debug_temp_resources_dir)
+
+ with resource_utils.BuildContext(debug_temp_resources_dir) as build:
+ dep_subdirs = resource_utils.ExtractDeps(options.dependencies_res_zips,
+ build.deps_dir)
+
+ _PackageApk(options, dep_subdirs, build.temp_dir, build.gen_dir,
+ build.r_txt_path)
+
+ r_txt_path = _WriteFinalRTxtFile(options, build.r_txt_path)
+
+ # If --shared-resources-whitelist is used, the all resources listed in
+ # the corresponding R.txt file will be non-final, and an onResourcesLoaded()
+ # will be generated to adjust them at runtime.
+ #
+ # Otherwise, if --shared-resources is used, the all resources will be
+ # non-final, and an onResourcesLoaded() method will be generated too.
+ #
+ # Otherwise, all resources will be final, and no method will be generated.
+ #
+ rjava_build_options = resource_utils.RJavaBuildOptions()
+ if options.shared_resources_whitelist:
+ rjava_build_options.ExportSomeResources(
+ options.shared_resources_whitelist)
+ rjava_build_options.GenerateOnResourcesLoaded()
+ elif options.shared_resources or options.app_as_shared_lib:
+ rjava_build_options.ExportAllResources()
+ rjava_build_options.GenerateOnResourcesLoaded()
+
+ resource_utils.CreateRJavaFiles(
+ build.srcjar_dir, None, r_txt_path, options.extra_res_packages,
+ options.extra_r_text_files, rjava_build_options)
+
+ if options.srcjar_out:
+ build_utils.ZipDir(options.srcjar_out, build.srcjar_dir)
+
+ # Sanity check that the created resources have the expected package ID.
+ expected_id = _PackageIdFromOptions(options)
+ if expected_id is None:
+ expected_id = '0x00' if options.shared_resources else '0x7f'
+ expected_id = int(expected_id, 16)
+ _, package_id = resource_utils.ExtractArscPackage(options.aapt2_path,
+ options.apk_path)
+ if package_id != expected_id:
+ raise Exception(
+ 'Invalid package ID 0x%x (expected 0x%x)' % (package_id, expected_id))
+
+ if options.depfile:
+ build_utils.WriteDepfile(
+ options.depfile,
+ options.apk_path,
+ inputs=options.dependencies_res_zips + options.extra_r_text_files,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/compile_resources.pydeps b/deps/v8/build/android/gyp/compile_resources.pydeps
new file mode 100644
index 0000000000..2ffcb52a2c
--- /dev/null
+++ b/deps/v8/build/android/gyp/compile_resources.pydeps
@@ -0,0 +1,29 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_resources.pydeps build/android/gyp/compile_resources.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+compile_resources.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/resource_utils.py
diff --git a/deps/v8/build/android/gyp/copy_ex.py b/deps/v8/build/android/gyp/copy_ex.py
new file mode 100755
index 0000000000..48d1b26df1
--- /dev/null
+++ b/deps/v8/build/android/gyp/copy_ex.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies files to a directory."""
+
+import filecmp
+import itertools
+import optparse
+import os
+import shutil
+import sys
+
+from util import build_utils
+
+
+def _get_all_files(base):
+ """Returns a list of all the files in |base|. Each entry is relative to the
+ last path entry of |base|."""
+ result = []
+ dirname = os.path.dirname(base)
+ for root, _, files in os.walk(base):
+ result.extend([os.path.join(root[len(dirname):], f) for f in files])
+ return result
+
+def CopyFile(f, dest, deps):
+ """Copy file or directory and update deps."""
+ if os.path.isdir(f):
+ shutil.copytree(f, os.path.join(dest, os.path.basename(f)))
+ deps.extend(_get_all_files(f))
+ else:
+ if os.path.isfile(os.path.join(dest, os.path.basename(f))):
+ dest = os.path.join(dest, os.path.basename(f))
+
+ deps.append(f)
+
+ if os.path.isfile(dest):
+ if filecmp.cmp(dest, f, shallow=False):
+ return
+ # The shutil.copy() below would fail if the file does not have write
+ # permissions. Deleting the file has similar costs to modifying the
+ # permissions.
+ os.unlink(dest)
+
+ shutil.copy(f, dest)
+
+def DoCopy(options, deps):
+ """Copy files or directories given in options.files and update deps."""
+ files = list(itertools.chain.from_iterable(build_utils.ParseGnList(f)
+ for f in options.files))
+
+ for f in files:
+ if os.path.isdir(f) and not options.clear:
+ print ('To avoid stale files you must use --clear when copying '
+ 'directories')
+ sys.exit(-1)
+ CopyFile(f, options.dest, deps)
+
+def DoRenaming(options, deps):
+ """Copy and rename files given in options.renaming_sources and update deps."""
+ src_files = list(itertools.chain.from_iterable(
+ build_utils.ParseGnList(f)
+ for f in options.renaming_sources))
+
+ dest_files = list(itertools.chain.from_iterable(
+ build_utils.ParseGnList(f)
+ for f in options.renaming_destinations))
+
+ if (len(src_files) != len(dest_files)):
+ print('Renaming source and destination files not match.')
+ sys.exit(-1)
+
+ for src, dest in itertools.izip(src_files, dest_files):
+ if os.path.isdir(src):
+ print ('renaming diretory is not supported.')
+ sys.exit(-1)
+ else:
+ CopyFile(src, os.path.join(options.dest, dest), deps)
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_option('--dest', help='Directory to copy files to.')
+ parser.add_option('--files', action='append',
+ help='List of files to copy.')
+ parser.add_option('--clear', action='store_true',
+ help='If set, the destination directory will be deleted '
+ 'before copying files to it. This is highly recommended to '
+ 'ensure that no stale files are left in the directory.')
+ parser.add_option('--stamp', help='Path to touch on success.')
+ parser.add_option('--renaming-sources',
+ action='append',
+ help='List of files need to be renamed while being '
+ 'copied to dest directory')
+ parser.add_option('--renaming-destinations',
+ action='append',
+ help='List of destination file name without path, the '
+ 'number of elements must match rename-sources.')
+
+ options, _ = parser.parse_args(args)
+
+ if options.clear:
+ build_utils.DeleteDirectory(options.dest)
+ build_utils.MakeDirectory(options.dest)
+
+ deps = []
+
+ if options.files:
+ DoCopy(options, deps)
+
+ if options.renaming_sources:
+ DoRenaming(options, deps)
+
+ if options.depfile:
+ build_utils.WriteDepfile(
+ options.depfile, options.stamp, deps, add_pydeps=False)
+
+ if options.stamp:
+ build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/copy_ex.pydeps b/deps/v8/build/android/gyp/copy_ex.pydeps
new file mode 100644
index 0000000000..e0fb31eaa9
--- /dev/null
+++ b/deps/v8/build/android/gyp/copy_ex.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/copy_ex.pydeps build/android/gyp/copy_ex.py
+../../gn_helpers.py
+copy_ex.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/create_apk_operations_script.py b/deps/v8/build/android/gyp/create_apk_operations_script.py
new file mode 100755
index 0000000000..cd2722f9c0
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_apk_operations_script.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import argparse
+import os
+import string
+import sys
+
+
+SCRIPT_TEMPLATE = string.Template("""\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_apk_operations_script.py
+
+import os
+import sys
+
+def main():
+ script_directory = os.path.dirname(__file__)
+ resolve = lambda p: p if p is None else os.path.abspath(os.path.join(
+ script_directory, p))
+ sys.path.append(resolve(${APK_OPERATIONS_DIR}))
+ import apk_operations
+ output_dir = resolve(${OUTPUT_DIR})
+ try:
+ apk_operations.Run(
+ output_dir,
+ resolve(${APK_PATH}),
+ resolve(${INC_JSON_PATH}),
+ ${FLAGS_FILE},
+ ${TARGET_CPU},
+ resolve(${MAPPING_PATH}))
+ except TypeError:
+ rel_output_dir = os.path.relpath(output_dir)
+ rel_script_path = os.path.relpath(sys.argv[0], output_dir)
+ sys.stderr.write('Script out-of-date. Rebuild via:\\n')
+ sys.stderr.write(' ninja -C %s %s\\n' % (rel_output_dir, rel_script_path))
+ return 1
+
+
+if __name__ == '__main__':
+ sys.exit(main())
+""")
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--script-output-path',
+ help='Output path for executable script.')
+ parser.add_argument('--apk-path')
+ parser.add_argument('--incremental-install-json-path')
+ parser.add_argument('--command-line-flags-file')
+ parser.add_argument('--target-cpu')
+ parser.add_argument('--proguard-mapping-path')
+ args = parser.parse_args(args)
+
+ def relativize(path):
+ """Returns the path relative to the output script directory."""
+ if path is None:
+ return path
+ return os.path.relpath(path, os.path.dirname(args.script_output_path))
+ apk_operations_dir = os.path.join(os.path.dirname(__file__), os.path.pardir)
+ apk_operations_dir = relativize(apk_operations_dir)
+
+ with open(args.script_output_path, 'w') as script:
+ script_dict = {
+ 'APK_OPERATIONS_DIR': repr(apk_operations_dir),
+ 'OUTPUT_DIR': repr(relativize('.')),
+ 'APK_PATH': repr(relativize(args.apk_path)),
+ 'INC_JSON_PATH': repr(relativize(args.incremental_install_json_path)),
+ 'MAPPING_PATH': repr(relativize(args.proguard_mapping_path)),
+ 'FLAGS_FILE': repr(args.command_line_flags_file),
+ 'TARGET_CPU': repr(args.target_cpu),
+ }
+ script.write(SCRIPT_TEMPLATE.substitute(script_dict))
+ os.chmod(args.script_output_path, 0750)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/create_apk_operations_script.pydeps b/deps/v8/build/android/gyp/create_apk_operations_script.pydeps
new file mode 100644
index 0000000000..9d4dcb8fe5
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_apk_operations_script.pydeps
@@ -0,0 +1,3 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_apk_operations_script.pydeps build/android/gyp/create_apk_operations_script.py
+create_apk_operations_script.py
diff --git a/deps/v8/build/android/gyp/create_app_bundle.py b/deps/v8/build/android/gyp/create_app_bundle.py
new file mode 100755
index 0000000000..9666feb3fe
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_app_bundle.py
@@ -0,0 +1,377 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create an Android application bundle from one or more bundle modules."""
+
+import argparse
+import itertools
+import json
+import os
+import shutil
+import sys
+import tempfile
+import zipfile
+
+# NOTE: Keep this consistent with the _create_app_bundle_py_imports definition
+# in build/config/android/rules.py
+from util import build_utils
+from util import resource_utils
+
+import bundletool
+
+# Location of language-based assets in bundle modules.
+_LOCALES_SUBDIR = 'assets/locales/'
+
+# The fallback locale should always have its .pak file included in
+# the base apk, i.e. not use language-based asset targetting. This ensures
+# that Chrome won't crash on startup if its bundle is installed on a device
+# with an unsupported system locale (e.g. fur-rIT).
+_FALLBACK_LOCALE = 'en-US'
+
+# List of split dimensions recognized by this tool.
+_ALL_SPLIT_DIMENSIONS = [ 'ABI', 'SCREEN_DENSITY', 'LANGUAGE' ]
+
+# Due to historical reasons, certain languages identified by Chromium with a
+# 3-letters ISO 639-2 code, are mapped to a nearly equivalent 2-letters
+# ISO 639-1 code instead (due to the fact that older Android releases only
+# supported the latter when matching resources).
+#
+# the same conversion as for Java resources.
+_SHORTEN_LANGUAGE_CODE_MAP = {
+ 'fil': 'tl', # Filipino to Tagalog.
+}
+
+# A list of extensions corresponding to files that should never be compressed
+# in the bundle. This used to be handled by bundletool automatically until
+# release 0.8.0, which required that this be passed to the BundleConfig
+# file instead.
+#
+# This is the original list, which was taken from aapt2, with 'webp' added to
+# it (which curiously was missing from the list).
+_UNCOMPRESSED_FILE_EXTS = [
+ '3g2', '3gp', '3gpp', '3gpp2', 'aac', 'amr', 'awb', 'git', 'imy', 'jet',
+ 'jpeg', 'jpg', 'm4a', 'm4v', 'mid', 'midi', 'mkv', 'mp2', 'mp3', 'mp4',
+ 'mpeg', 'mpg', 'ogg', 'png', 'rtttl', 'smf', 'wav', 'webm', 'webp', 'wmv',
+ 'xmf'
+]
+
+
+def _ParseArgs(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--out-bundle', required=True,
+ help='Output bundle zip archive.')
+ parser.add_argument('--module-zips', required=True,
+ help='GN-list of module zip archives.')
+ parser.add_argument(
+ '--rtxt-in-paths', action='append', help='GN-list of module R.txt files.')
+ parser.add_argument(
+ '--rtxt-out-path', help='Path to combined R.txt file for bundle.')
+ parser.add_argument('--uncompressed-assets', action='append',
+ help='GN-list of uncompressed assets.')
+ parser.add_argument(
+ '--compress-shared-libraries',
+ action='store_true',
+ help='Whether to store native libraries compressed.')
+ parser.add_argument('--split-dimensions',
+ help="GN-list of split dimensions to support.")
+ parser.add_argument(
+ '--base-module-rtxt-path',
+ help='Optional path to the base module\'s R.txt file, only used with '
+ 'language split dimension.')
+ parser.add_argument(
+ '--base-whitelist-rtxt-path',
+ help='Optional path to an R.txt file, string resources '
+ 'listed there _and_ in --base-module-rtxt-path will '
+ 'be kept in the base bundle module, even if language'
+ ' splitting is enabled.')
+
+ parser.add_argument('--keystore-path', help='Keystore path')
+ parser.add_argument('--keystore-password', help='Keystore password')
+ parser.add_argument('--key-name', help='Keystore key name')
+
+ options = parser.parse_args(args)
+ options.module_zips = build_utils.ParseGnList(options.module_zips)
+ options.rtxt_in_paths = build_utils.ExpandFileArgs(options.rtxt_in_paths)
+
+ if len(options.module_zips) == 0:
+ raise Exception('The module zip list cannot be empty.')
+
+ # Signing is optional, but all --keyXX parameters should be set.
+ if options.keystore_path or options.keystore_password or options.key_name:
+ if not options.keystore_path or not options.keystore_password or \
+ not options.key_name:
+ raise Exception('When signing the bundle, use --keystore-path, '
+ '--keystore-password and --key-name.')
+
+ # Merge all uncompressed assets into a set.
+ uncompressed_list = []
+ if options.uncompressed_assets:
+ for l in options.uncompressed_assets:
+ for entry in build_utils.ParseGnList(l):
+ # Each entry has the following format: 'zipPath' or 'srcPath:zipPath'
+ pos = entry.find(':')
+ if pos >= 0:
+ uncompressed_list.append(entry[pos + 1:])
+ else:
+ uncompressed_list.append(entry)
+
+ options.uncompressed_assets = set(uncompressed_list)
+
+ # Check that all split dimensions are valid
+ if options.split_dimensions:
+ options.split_dimensions = build_utils.ParseGnList(options.split_dimensions)
+ for dim in options.split_dimensions:
+ if dim.upper() not in _ALL_SPLIT_DIMENSIONS:
+ parser.error('Invalid split dimension "%s" (expected one of: %s)' % (
+ dim, ', '.join(x.lower() for x in _ALL_SPLIT_DIMENSIONS)))
+
+ # As a special case, --base-whitelist-rtxt-path can be empty to indicate
+ # that the module doesn't need such a whitelist. That's because it is easier
+ # to check this condition here than through GN rules :-(
+ if options.base_whitelist_rtxt_path == '':
+ options.base_module_rtxt_path = None
+
+ # Check --base-module-rtxt-path and --base-whitelist-rtxt-path usage.
+ if options.base_module_rtxt_path:
+ if not options.base_whitelist_rtxt_path:
+ parser.error(
+ '--base-module-rtxt-path requires --base-whitelist-rtxt-path')
+ if 'language' not in options.split_dimensions:
+ parser.error('--base-module-rtxt-path is only valid with '
+ 'language-based splits.')
+
+ return options
+
+
+def _MakeSplitDimension(value, enabled):
+ """Return dict modelling a BundleConfig splitDimension entry."""
+ return {'value': value, 'negate': not enabled}
+
+
+def _GenerateBundleConfigJson(uncompressed_assets, compress_shared_libraries,
+ split_dimensions, base_master_resource_ids):
+ """Generate a dictionary that can be written to a JSON BuildConfig.
+
+ Args:
+ uncompressed_assets: A list or set of file paths under assets/ that always
+ be stored uncompressed.
+ compress_shared_libraries: Boolean, whether to compress native libs.
+ split_dimensions: list of split dimensions.
+ base_master_resource_ids: Optional list of 32-bit resource IDs to keep
+ inside the base module, even when split dimensions are enabled.
+ Returns:
+ A dictionary that can be written as a json file.
+ """
+ # Compute splitsConfig list. Each item is a dictionary that can have
+ # the following keys:
+ # 'value': One of ['LANGUAGE', 'DENSITY', 'ABI']
+ # 'negate': Boolean, True to indicate that the bundle should *not* be
+ # split (unused at the moment by this script).
+
+ split_dimensions = [ _MakeSplitDimension(dim, dim in split_dimensions)
+ for dim in _ALL_SPLIT_DIMENSIONS ]
+
+ # Native libraries loaded by the crazy linker.
+ # Whether other .so files are compressed is controlled by
+ # "uncompressNativeLibraries".
+ uncompressed_globs = ['lib/*/crazy.*']
+ # Locale-specific pak files stored in bundle splits need not be compressed.
+ uncompressed_globs.extend(
+ ['assets/locales#lang_*/*.pak', 'assets/fallback-locales/*.pak'])
+ uncompressed_globs.extend('assets/' + x for x in uncompressed_assets)
+ # NOTE: Use '**' instead of '*' to work through directories!
+ uncompressed_globs.extend('**.' + ext for ext in _UNCOMPRESSED_FILE_EXTS)
+
+ data = {
+ 'optimizations': {
+ 'splitsConfig': {
+ 'splitDimension': split_dimensions,
+ },
+ 'uncompressNativeLibraries': {
+ 'enabled': not compress_shared_libraries,
+ },
+ },
+ 'compression': {
+ 'uncompressedGlob': sorted(uncompressed_globs),
+ },
+ }
+
+ if base_master_resource_ids:
+ data['master_resources'] = {
+ 'resource_ids': list(base_master_resource_ids),
+ }
+
+ return json.dumps(data, indent=2)
+
+
+def _RewriteLanguageAssetPath(src_path):
+ """Rewrite the destination path of a locale asset for language-based splits.
+
+ Should only be used when generating bundles with language-based splits.
+ This will rewrite paths that look like locales/<locale>.pak into
+ locales#<language>/<locale>.pak, where <language> is the language code
+ from the locale.
+
+ Returns new path.
+ """
+ if not src_path.startswith(_LOCALES_SUBDIR) or not src_path.endswith('.pak'):
+ return [src_path]
+
+ locale = src_path[len(_LOCALES_SUBDIR):-4]
+ android_locale = resource_utils.ToAndroidLocaleName(locale)
+
+ # The locale format is <lang>-<region> or <lang>. Extract the language.
+ pos = android_locale.find('-')
+ if pos >= 0:
+ android_language = android_locale[:pos]
+ else:
+ android_language = android_locale
+
+ if locale == _FALLBACK_LOCALE:
+ # Fallback locale .pak files must be placed in a different directory
+ # to ensure they are always stored in the base module.
+ result_path = 'assets/fallback-locales/%s.pak' % locale
+ else:
+ # Other language .pak files go into a language-specific asset directory
+ # that bundletool will store in separate split APKs.
+ result_path = 'assets/locales#lang_%s/%s.pak' % (android_language, locale)
+
+ return result_path
+
+
+def _SplitModuleForAssetTargeting(src_module_zip, tmp_dir, split_dimensions):
+ """Splits assets in a module if needed.
+
+ Args:
+ src_module_zip: input zip module path.
+ tmp_dir: Path to temporary directory, where the new output module might
+ be written to.
+ split_dimensions: list of split dimensions.
+
+ Returns:
+ If the module doesn't need asset targeting, doesn't do anything and
+ returns src_module_zip. Otherwise, create a new module zip archive under
+ tmp_dir with the same file name, but which contains assets paths targeting
+ the proper dimensions.
+ """
+ split_language = 'LANGUAGE' in split_dimensions
+ if not split_language:
+ # Nothing to target, so return original module path.
+ return src_module_zip
+
+ with zipfile.ZipFile(src_module_zip, 'r') as src_zip:
+ language_files = [
+ f for f in src_zip.namelist() if f.startswith(_LOCALES_SUBDIR)]
+
+ if not language_files:
+ # Not language-based assets to split in this module.
+ return src_module_zip
+
+ tmp_zip = os.path.join(tmp_dir, os.path.basename(src_module_zip))
+ with zipfile.ZipFile(tmp_zip, 'w') as dst_zip:
+ for info in src_zip.infolist():
+ src_path = info.filename
+ is_compressed = info.compress_type != zipfile.ZIP_STORED
+
+ dst_path = src_path
+ if src_path in language_files:
+ dst_path = _RewriteLanguageAssetPath(src_path)
+
+ build_utils.AddToZipHermetic(
+ dst_zip,
+ dst_path,
+ data=src_zip.read(src_path),
+ compress=is_compressed)
+
+ return tmp_zip
+
+
+def _GenerateBaseResourcesWhitelist(base_module_rtxt_path,
+ base_whitelist_rtxt_path):
+ """Generate a whitelist of base master resource ids.
+
+ Args:
+ base_module_rtxt_path: Path to base module R.txt file.
+ base_whitelist_rtxt_path: Path to base whitelist R.txt file.
+ Returns:
+ list of resource ids.
+ """
+ ids_map = resource_utils.GenerateStringResourcesWhitelist(
+ base_module_rtxt_path, base_whitelist_rtxt_path)
+ return ids_map.keys()
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseArgs(args)
+
+ split_dimensions = []
+ if options.split_dimensions:
+ split_dimensions = [x.upper() for x in options.split_dimensions]
+
+
+ with build_utils.TempDir() as tmp_dir:
+ module_zips = [
+ _SplitModuleForAssetTargeting(module, tmp_dir, split_dimensions) \
+ for module in options.module_zips]
+
+ base_master_resource_ids = None
+ if options.base_module_rtxt_path:
+ base_master_resource_ids = _GenerateBaseResourcesWhitelist(
+ options.base_module_rtxt_path, options.base_whitelist_rtxt_path)
+
+ bundle_config = _GenerateBundleConfigJson(
+ options.uncompressed_assets, options.compress_shared_libraries,
+ split_dimensions, base_master_resource_ids)
+
+ tmp_bundle = os.path.join(tmp_dir, 'tmp_bundle')
+
+ tmp_unsigned_bundle = tmp_bundle
+ if options.keystore_path:
+ tmp_unsigned_bundle = tmp_bundle + '.unsigned'
+
+ # Important: bundletool requires that the bundle config file is
+ # named with a .pb.json extension.
+ tmp_bundle_config = tmp_bundle + '.BundleConfig.pb.json'
+
+ with open(tmp_bundle_config, 'w') as f:
+ f.write(bundle_config)
+
+ cmd_args = ['java', '-jar', bundletool.BUNDLETOOL_JAR_PATH, 'build-bundle']
+ cmd_args += ['--modules=%s' % ','.join(module_zips)]
+ cmd_args += ['--output=%s' % tmp_unsigned_bundle]
+ cmd_args += ['--config=%s' % tmp_bundle_config]
+
+ build_utils.CheckOutput(cmd_args, print_stdout=True, print_stderr=True)
+
+ if options.keystore_path:
+ # NOTE: As stated by the public documentation, apksigner cannot be used
+ # to sign the bundle (because it rejects anything that isn't an APK).
+ # The signature and digest algorithm selection come from the internal
+ # App Bundle documentation. There is no corresponding public doc :-(
+ signing_cmd_args = [
+ 'jarsigner', '-sigalg', 'SHA256withRSA', '-digestalg', 'SHA-256',
+ '-keystore', 'file:' + options.keystore_path,
+ '-storepass' , options.keystore_password,
+ '-signedjar', tmp_bundle,
+ tmp_unsigned_bundle,
+ options.key_name,
+ ]
+ build_utils.CheckOutput(signing_cmd_args, print_stderr=True)
+
+ shutil.move(tmp_bundle, options.out_bundle)
+
+ if options.rtxt_out_path:
+ with open(options.rtxt_out_path, 'w') as rtxt_out:
+ for rtxt_in_path in options.rtxt_in_paths:
+ with open(rtxt_in_path, 'r') as rtxt_in:
+ rtxt_out.write('-- Contents of {}\n'.format(
+ os.path.basename(rtxt_in_path)))
+ rtxt_out.write(rtxt_in.read())
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/create_app_bundle.pydeps b/deps/v8/build/android/gyp/create_app_bundle.pydeps
new file mode 100644
index 0000000000..fef04fab53
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_app_bundle.pydeps
@@ -0,0 +1,30 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle.pydeps build/android/gyp/create_app_bundle.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+bundletool.py
+create_app_bundle.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/resource_utils.py
diff --git a/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.py b/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.py
new file mode 100755
index 0000000000..f01691e418
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates an .apks from an .aab with only English strings."""
+
+import argparse
+import os
+import sys
+
+sys.path.append(
+ os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from pylib.utils import app_bundle_utils
+
+
+def main():
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument(
+ '--bundle', required=True, help='Path to input .aab file.')
+ parser.add_argument(
+ '--output', required=True, help='Path to output .apks file.')
+ parser.add_argument('--aapt2-path', required=True, help='Path to aapt2.')
+ parser.add_argument(
+ '--keystore-path', required=True, help='Path to keystore.')
+ parser.add_argument(
+ '--keystore-password', required=True, help='Keystore password.')
+ parser.add_argument(
+ '--keystore-name', required=True, help='Key name within keystore')
+
+ args = parser.parse_args()
+
+ app_bundle_utils.GenerateBundleApks(
+ args.bundle,
+ args.output,
+ args.aapt2_path,
+ args.keystore_path,
+ args.keystore_password,
+ args.keystore_name,
+ minimal=True,
+ check_for_noop=False)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.pydeps b/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.pydeps
new file mode 100644
index 0000000000..cd5b08158f
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.pydeps
@@ -0,0 +1,33 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle_minimal_apks.pydeps build/android/gyp/create_app_bundle_minimal_apks.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../pylib/__init__.py
+../pylib/utils/__init__.py
+../pylib/utils/app_bundle_utils.py
+bundletool.py
+create_app_bundle_minimal_apks.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/resource_utils.py
diff --git a/deps/v8/build/android/gyp/create_bundle_wrapper_script.py b/deps/v8/build/android/gyp/create_bundle_wrapper_script.py
new file mode 100755
index 0000000000..a1a34fe77c
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_bundle_wrapper_script.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create a wrapper script to manage an Android App Bundle."""
+
+import argparse
+import os
+import string
+import sys
+
+# Import apk_operations even though this script doesn't use it so that
+# targets that depend on the wrapper scripts will rebuild when apk_operations
+# or its deps change.
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), os.pardir))
+import apk_operations # pylint: disable=unused-import
+from util import build_utils
+
+SCRIPT_TEMPLATE = string.Template("""\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_bundle_wrapper_script.py
+
+import os
+import sys
+
+def main():
+ script_directory = os.path.dirname(__file__)
+ resolve = lambda p: p if p is None else os.path.abspath(os.path.join(
+ script_directory, p))
+ sys.path.append(resolve(${WRAPPED_SCRIPT_DIR}))
+ import apk_operations
+
+ apk_operations.RunForBundle(output_directory=resolve(${OUTPUT_DIR}),
+ bundle_path=resolve(${BUNDLE_PATH}),
+ bundle_apks_path=resolve(${BUNDLE_APKS_PATH}),
+ aapt2_path=resolve(${AAPT2_PATH}),
+ keystore_path=resolve(${KEYSTORE_PATH}),
+ keystore_password=${KEYSTORE_PASSWORD},
+ keystore_alias=${KEY_NAME},
+ package_name=${PACKAGE_NAME},
+ command_line_flags_file=${FLAGS_FILE},
+ proguard_mapping_path=resolve(${MAPPING_PATH}),
+ target_cpu=${TARGET_CPU},
+ system_image_locales=${SYSTEM_IMAGE_LOCALES})
+
+if __name__ == '__main__':
+ sys.exit(main())
+""")
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--script-output-path', required=True,
+ help='Output path for executable script.')
+ parser.add_argument('--bundle-path', required=True)
+ parser.add_argument('--bundle-apks-path', required=True)
+ parser.add_argument('--package-name', required=True)
+ parser.add_argument('--aapt2-path', required=True)
+ parser.add_argument('--keystore-path', required=True)
+ parser.add_argument('--keystore-password', required=True)
+ parser.add_argument('--key-name', required=True)
+ parser.add_argument('--command-line-flags-file')
+ parser.add_argument('--proguard-mapping-path')
+ parser.add_argument('--target-cpu')
+ parser.add_argument('--system-image-locales')
+ args = parser.parse_args(args)
+
+ def relativize(path):
+ """Returns the path relative to the output script directory."""
+ if path is None:
+ return path
+ return os.path.relpath(path, os.path.dirname(args.script_output_path))
+
+ wrapped_script_dir = os.path.join(os.path.dirname(__file__), os.path.pardir)
+ wrapped_script_dir = relativize(wrapped_script_dir)
+
+ with open(args.script_output_path, 'w') as script:
+ script_dict = {
+ 'WRAPPED_SCRIPT_DIR':
+ repr(wrapped_script_dir),
+ 'OUTPUT_DIR':
+ repr(relativize('.')),
+ 'BUNDLE_PATH':
+ repr(relativize(args.bundle_path)),
+ 'BUNDLE_APKS_PATH':
+ repr(relativize(args.bundle_apks_path)),
+ 'PACKAGE_NAME':
+ repr(args.package_name),
+ 'AAPT2_PATH':
+ repr(relativize(args.aapt2_path)),
+ 'KEYSTORE_PATH':
+ repr(relativize(args.keystore_path)),
+ 'KEYSTORE_PASSWORD':
+ repr(args.keystore_password),
+ 'KEY_NAME':
+ repr(args.key_name),
+ 'MAPPING_PATH':
+ repr(relativize(args.proguard_mapping_path)),
+ 'FLAGS_FILE':
+ repr(args.command_line_flags_file),
+ 'TARGET_CPU':
+ repr(args.target_cpu),
+ 'SYSTEM_IMAGE_LOCALES':
+ repr(build_utils.ParseGnList(args.system_image_locales)),
+ }
+ script.write(SCRIPT_TEMPLATE.substitute(script_dict))
+ os.chmod(args.script_output_path, 0750)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/create_bundle_wrapper_script.pydeps b/deps/v8/build/android/gyp/create_bundle_wrapper_script.pydeps
new file mode 100644
index 0000000000..5587566f50
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_bundle_wrapper_script.pydeps
@@ -0,0 +1,102 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_bundle_wrapper_script.pydeps build/android/gyp/create_bundle_wrapper_script.py
+../../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
+../../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../../third_party/catapult/devil/devil/__init__.py
+../../../third_party/catapult/devil/devil/android/__init__.py
+../../../third_party/catapult/devil/devil/android/apk_helper.py
+../../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../../third_party/catapult/devil/devil/android/constants/file_system.py
+../../../third_party/catapult/devil/devil/android/decorators.py
+../../../third_party/catapult/devil/devil/android/device_errors.py
+../../../third_party/catapult/devil/devil/android/device_signal.py
+../../../third_party/catapult/devil/devil/android/device_temp_file.py
+../../../third_party/catapult/devil/devil/android/device_utils.py
+../../../third_party/catapult/devil/devil/android/flag_changer.py
+../../../third_party/catapult/devil/devil/android/install_commands.py
+../../../third_party/catapult/devil/devil/android/logcat_monitor.py
+../../../third_party/catapult/devil/devil/android/md5sum.py
+../../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../../third_party/catapult/devil/devil/android/sdk/aapt.py
+../../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py
+../../../third_party/catapult/devil/devil/android/sdk/build_tools.py
+../../../third_party/catapult/devil/devil/android/sdk/intent.py
+../../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../../third_party/catapult/devil/devil/android/sdk/split_select.py
+../../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../../third_party/catapult/devil/devil/base_error.py
+../../../third_party/catapult/devil/devil/constants/__init__.py
+../../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../../third_party/catapult/devil/devil/devil_env.py
+../../../third_party/catapult/devil/devil/utils/__init__.py
+../../../third_party/catapult/devil/devil/utils/cmd_helper.py
+../../../third_party/catapult/devil/devil/utils/host_utils.py
+../../../third_party/catapult/devil/devil/utils/lazy/__init__.py
+../../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py
+../../../third_party/catapult/devil/devil/utils/logging_common.py
+../../../third_party/catapult/devil/devil/utils/lsusb.py
+../../../third_party/catapult/devil/devil/utils/parallelizer.py
+../../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../../third_party/catapult/devil/devil/utils/reset_usb.py
+../../../third_party/catapult/devil/devil/utils/run_tests_helper.py
+../../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../../third_party/catapult/devil/devil/utils/zip_utils.py
+../../../third_party/catapult/third_party/zipfile/zipfile_2_7_13.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../apk_operations.py
+../devil_chromium.py
+../incremental_install/__init__.py
+../incremental_install/installer.py
+../pylib/__init__.py
+../pylib/constants/__init__.py
+../pylib/constants/host_paths.py
+../pylib/symbols/__init__.py
+../pylib/symbols/deobfuscator.py
+../pylib/utils/__init__.py
+../pylib/utils/app_bundle_utils.py
+../pylib/utils/simpleperf.py
+../pylib/utils/time_profile.py
+bundletool.py
+create_bundle_wrapper_script.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/resource_utils.py
diff --git a/deps/v8/build/android/gyp/create_java_binary_script.py b/deps/v8/build/android/gyp/create_java_binary_script.py
new file mode 100755
index 0000000000..4469381c7c
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_java_binary_script.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a simple script to run a java "binary".
+
+This creates a script that sets up the java command line for running a java
+jar. This includes correctly setting the classpath and the main class.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+# The java command must be executed in the current directory because there may
+# be user-supplied paths in the args. The script receives the classpath relative
+# to the directory that the script is written in and then, when run, must
+# recalculate the paths relative to the current directory.
+script_template = """\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_java_binary_script.py
+
+import argparse
+import os
+import sys
+
+self_dir = os.path.dirname(__file__)
+classpath = [{classpath}]
+bootclasspath = [{bootclasspath}]
+extra_program_args = {extra_program_args}
+if os.getcwd() != self_dir:
+ offset = os.path.relpath(self_dir, os.getcwd())
+ classpath = [os.path.join(offset, p) for p in classpath]
+ bootclasspath = [os.path.join(offset, p) for p in bootclasspath]
+java_cmd = ["java"]
+# This is a simple argparser for jvm and jar arguments.
+parser = argparse.ArgumentParser()
+parser.add_argument('--jar-args')
+parser.add_argument('--jvm-args')
+
+known_args, unknown_args = parser.parse_known_args(sys.argv[1:])
+if known_args.jvm_args:
+ jvm_arguments = known_args.jvm_args.strip('"').split()
+ java_cmd.extend(jvm_arguments)
+if known_args.jar_args:
+ jar_arguments = known_args.jar_args.strip('"').split()
+ if unknown_args:
+ raise Exception('There are unknown arguments')
+else:
+ jar_arguments = unknown_args
+
+{noverify_flag}
+if bootclasspath:
+ java_cmd.append("-Xbootclasspath/p:" + ":".join(bootclasspath))
+java_cmd.extend(
+ ["-classpath", ":".join(classpath), "-enableassertions", \"{main_class}\"])
+java_cmd.extend(extra_program_args)
+java_cmd.extend(jar_arguments)
+os.execvp("java", java_cmd)
+"""
+
+def main(argv):
+ argv = build_utils.ExpandFileArgs(argv)
+ parser = optparse.OptionParser()
+ parser.add_option('--output', help='Output path for executable script.')
+ parser.add_option('--main-class',
+ help='Name of the java class with the "main" entry point.')
+ parser.add_option('--classpath', action='append', default=[],
+ help='Classpath for running the jar.')
+ parser.add_option('--bootclasspath', action='append', default=[],
+ help='zip/jar files to add to bootclasspath for java cmd.')
+ parser.add_option('--noverify', action='store_true',
+ help='JVM flag: noverify.')
+
+ options, extra_program_args = parser.parse_args(argv)
+
+ if (options.noverify):
+ noverify_flag = 'java_cmd.append("-noverify")'
+ else:
+ noverify_flag = ''
+
+ classpath = []
+ for cp_arg in options.classpath:
+ classpath += build_utils.ParseGnList(cp_arg)
+
+ bootclasspath = []
+ for bootcp_arg in options.bootclasspath:
+ bootclasspath += build_utils.ParseGnList(bootcp_arg)
+
+ run_dir = os.path.dirname(options.output)
+ bootclasspath = [os.path.relpath(p, run_dir) for p in bootclasspath]
+ classpath = [os.path.relpath(p, run_dir) for p in classpath]
+
+ with build_utils.AtomicOutput(options.output) as script:
+ script.write(script_template.format(
+ classpath=('"%s"' % '", "'.join(classpath)),
+ bootclasspath=('"%s"' % '", "'.join(bootclasspath)
+ if bootclasspath else ''),
+ main_class=options.main_class,
+ extra_program_args=repr(extra_program_args),
+ noverify_flag=noverify_flag))
+
+ os.chmod(options.output, 0750)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/create_java_binary_script.pydeps b/deps/v8/build/android/gyp/create_java_binary_script.pydeps
new file mode 100644
index 0000000000..96d79bf609
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_java_binary_script.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_java_binary_script.pydeps build/android/gyp/create_java_binary_script.py
+../../gn_helpers.py
+create_java_binary_script.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/create_size_info_files.py b/deps/v8/build/android/gyp/create_size_info_files.py
new file mode 100755
index 0000000000..5b248e4195
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_size_info_files.py
@@ -0,0 +1,167 @@
+#!/usr/bin/env python
+
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates size-info/*.info files used by SuperSize."""
+
+import argparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+from util import jar_info_utils
+from util import md5_check
+
+
+def _MergeResInfoFiles(res_info_path, info_paths):
+ # Concatenate them all.
+ # only_if_changed=False since no build rules depend on this as an input.
+ with build_utils.AtomicOutput(res_info_path, only_if_changed=False) as dst:
+ for p in info_paths:
+ with open(p) as src:
+ dst.write(src.read())
+
+
+def _PakInfoPathsForAssets(assets):
+ return [f.split(':')[0] + '.info' for f in assets if f.endswith('.pak')]
+
+
+def _MergePakInfoFiles(merged_path, pak_infos):
+ info_lines = set()
+ for pak_info_path in pak_infos:
+ with open(pak_info_path, 'r') as src_info_file:
+ info_lines.update(src_info_file.readlines())
+ # only_if_changed=False since no build rules depend on this as an input.
+ with build_utils.AtomicOutput(merged_path, only_if_changed=False) as f:
+ f.writelines(sorted(info_lines))
+
+
+def _FullJavaNameFromClassFilePath(path):
+ # Input: base/android/java/src/org/chromium/Foo.class
+ # Output: base.android.java.src.org.chromium.Foo
+ if not path.endswith('.class'):
+ return ''
+ path = os.path.splitext(path)[0]
+ parts = []
+ while path:
+ # Use split to be platform independent.
+ head, tail = os.path.split(path)
+ path = head
+ parts.append(tail)
+ parts.reverse() # Package comes first
+ return '.'.join(parts)
+
+
+def _MergeJarInfoFiles(output, inputs):
+ """Merge several .jar.info files to generate an .apk.jar.info.
+
+ Args:
+ output: output file path.
+ inputs: List of .info.jar or .jar files.
+ """
+ info_data = dict()
+ for path in inputs:
+ # android_java_prebuilt adds jar files in the src directory (relative to
+ # the output directory, usually ../../third_party/example.jar).
+ # android_aar_prebuilt collects jar files in the aar file and uses the
+ # java_prebuilt rule to generate gen/example/classes.jar files.
+ # We scan these prebuilt jars to parse each class path for the FQN. This
+ # allows us to later map these classes back to their respective src
+ # directories.
+ # TODO(agrieve): This should probably also check that the mtime of the .info
+ # is newer than that of the .jar, or change prebuilts to always output
+ # .info files so that they always exist (and change the depfile to
+ # depend directly on them).
+ if path.endswith('.info'):
+ info_data.update(jar_info_utils.ParseJarInfoFile(path))
+ else:
+ with zipfile.ZipFile(path) as zip_info:
+ for name in zip_info.namelist():
+ fully_qualified_name = _FullJavaNameFromClassFilePath(name)
+ if fully_qualified_name:
+ info_data[fully_qualified_name] = '{}/{}'.format(path, name)
+
+ # only_if_changed=False since no build rules depend on this as an input.
+ with build_utils.AtomicOutput(output, only_if_changed=False) as f:
+ jar_info_utils.WriteJarInfoFile(f, info_data)
+
+
+def _FindJarInputs(jar_paths):
+ ret = []
+ for jar_path in jar_paths:
+ jar_info_path = jar_path + '.info'
+ if os.path.exists(jar_info_path):
+ ret.append(jar_info_path)
+ else:
+ ret.append(jar_path)
+ return ret
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser(description=__doc__)
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument(
+ '--jar-info-path', required=True, help='Output .jar.info file')
+ parser.add_argument(
+ '--pak-info-path', required=True, help='Output .pak.info file')
+ parser.add_argument(
+ '--res-info-path', required=True, help='Output .res.info file')
+ parser.add_argument(
+ '--jar-files',
+ required=True,
+ action='append',
+ help='GN-list of .jar file paths')
+ parser.add_argument(
+ '--assets',
+ required=True,
+ action='append',
+ help='GN-list of files to add as assets in the form '
+ '"srcPath:zipPath", where ":zipPath" is optional.')
+ parser.add_argument(
+ '--uncompressed-assets',
+ required=True,
+ action='append',
+ help='Same as --assets, except disables compression.')
+ parser.add_argument(
+ '--resource-apk',
+ dest='resource_apks',
+ required=True,
+ action='append',
+ help='An .ap_ file built using aapt')
+
+ options = parser.parse_args(args)
+
+ options.jar_files = build_utils.ParseGnList(options.jar_files)
+ options.assets = build_utils.ParseGnList(options.assets)
+ options.uncompressed_assets = build_utils.ParseGnList(
+ options.uncompressed_assets)
+
+ jar_inputs = _FindJarInputs(set(options.jar_files))
+ pak_inputs = _PakInfoPathsForAssets(options.assets +
+ options.uncompressed_assets)
+ res_inputs = [p + '.info' for p in options.resource_apks]
+
+ # Don't bother re-running if no .info files have changed (saves ~250ms).
+ md5_check.CallAndRecordIfStale(
+ lambda: _MergeJarInfoFiles(options.jar_info_path, jar_inputs),
+ input_paths=jar_inputs,
+ output_paths=[options.jar_info_path])
+
+ # Always recreate these (just as fast as md5 checking them).
+ _MergePakInfoFiles(options.pak_info_path, pak_inputs)
+ _MergeResInfoFiles(options.res_info_path, res_inputs)
+
+ all_inputs = jar_inputs + pak_inputs + res_inputs
+ build_utils.WriteDepfile(
+ options.depfile,
+ options.jar_info_path,
+ inputs=all_inputs,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/create_size_info_files.pydeps b/deps/v8/build/android/gyp/create_size_info_files.pydeps
new file mode 100644
index 0000000000..4ab7f94ea6
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_size_info_files.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_size_info_files.pydeps build/android/gyp/create_size_info_files.py
+../../gn_helpers.py
+create_size_info_files.py
+util/__init__.py
+util/build_utils.py
+util/jar_info_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/create_stack_script.py b/deps/v8/build/android/gyp/create_stack_script.py
new file mode 100755
index 0000000000..6ccdc384e2
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_stack_script.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import argparse
+import os
+import sys
+import textwrap
+
+from util import build_utils
+
+SCRIPT_TEMPLATE = textwrap.dedent(
+ """\
+ #!/usr/bin/env python
+ #
+ # This file was generated by build/android/gyp/create_stack_script.py
+
+ import os
+ import sys
+
+ def main(argv):
+ script_directory = os.path.dirname(__file__)
+ resolve = lambda p: os.path.abspath(os.path.join(script_directory, p))
+ script_path = resolve('{script_path}')
+ script_args = {script_args}
+ script_path_args = {script_path_args}
+ for arg, path in script_path_args:
+ script_args.extend([arg, resolve(path)])
+ script_cmd = [script_path] + script_args + argv
+ print ' '.join(script_cmd)
+ os.execv(script_path, script_cmd)
+
+ if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
+ """)
+
+
+def main(args):
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--script-path',
+ help='Path to the wrapped script.')
+ parser.add_argument(
+ '--script-output-path',
+ help='Path to the output script.')
+ group = parser.add_argument_group('Path arguments')
+ group.add_argument('--output-directory')
+ group.add_argument('--packed-libs')
+
+ args, script_args = parser.parse_known_args(build_utils.ExpandFileArgs(args))
+
+ def relativize(p):
+ return os.path.relpath(p, os.path.dirname(args.script_output_path))
+
+ script_path = relativize(args.script_path)
+
+ script_path_args = []
+ if args.output_directory:
+ script_path_args.append(
+ ('--output-directory', relativize(args.output_directory)))
+ if args.packed_libs:
+ for p in build_utils.ParseGnList(args.packed_libs):
+ script_path_args.append(('--packed-lib', relativize(p)))
+
+ with build_utils.AtomicOutput(args.script_output_path) as script:
+ script.write(SCRIPT_TEMPLATE.format(
+ script_path=script_path,
+ script_args=script_args,
+ script_path_args=script_path_args))
+
+ os.chmod(args.script_output_path, 0750)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/create_stack_script.pydeps b/deps/v8/build/android/gyp/create_stack_script.pydeps
new file mode 100644
index 0000000000..7bddb156f4
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_stack_script.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_stack_script.pydeps build/android/gyp/create_stack_script.py
+../../gn_helpers.py
+create_stack_script.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/create_tool_wrapper.py b/deps/v8/build/android/gyp/create_tool_wrapper.py
new file mode 100755
index 0000000000..4433004541
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_tool_wrapper.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a simple wrapper script that passes the correct --output-directory.
+"""
+
+import argparse
+import os
+
+_TEMPLATE = """\
+#!/usr/bin/env python
+#
+# This file was generated by //build/android/gyp/create_tool_script.py
+
+import os
+import sys
+
+cmd = '{cmd}'
+args = [os.path.basename(cmd), '{flag_name}={output_directory}'] + sys.argv[1:]
+os.execv(cmd, args)
+"""
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--output', help='Output path for executable script.')
+ parser.add_argument('--target', help='Path to script being wrapped.')
+ parser.add_argument('--output-directory', help='Value for --output-directory')
+ parser.add_argument('--flag-name',
+ help='Flag name to use instead of --output-directory',
+ default='--output-directory')
+ args = parser.parse_args()
+
+ with open(args.output, 'w') as script:
+ script.write(_TEMPLATE.format(
+ cmd=os.path.abspath(args.target),
+ flag_name=args.flag_name,
+ output_directory=os.path.abspath(args.output_directory)))
+
+ os.chmod(args.output, 0750)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gyp/create_tool_wrapper.pydeps b/deps/v8/build/android/gyp/create_tool_wrapper.pydeps
new file mode 100644
index 0000000000..75b8326e70
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_tool_wrapper.pydeps
@@ -0,0 +1,3 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_tool_wrapper.pydeps build/android/gyp/create_tool_wrapper.py
+create_tool_wrapper.py
diff --git a/deps/v8/build/android/gyp/create_ui_locale_resources.py b/deps/v8/build/android/gyp/create_ui_locale_resources.py
new file mode 100755
index 0000000000..97868cbfde
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_ui_locale_resources.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate a zip archive containing localized locale name Android resource
+strings!
+
+This script takes a list of input Chrome-specific locale names, as well as an
+output zip file path.
+
+Each output file will contain the definition of a single string resource,
+named 'current_locale', whose value will be the matching Chromium locale name.
+E.g. values-en-rUS/strings.xml will define 'current_locale' as 'en-US'.
+"""
+
+import argparse
+import os
+import sys
+import zipfile
+
+sys.path.insert(
+ 0,
+ os.path.join(
+ os.path.dirname(__file__), '..', '..', '..', 'build', 'android', 'gyp'))
+
+from util import build_utils
+from util import resource_utils
+
+# A small string template for the content of each strings.xml file.
+# NOTE: The name is chosen to avoid any conflicts with other string defined
+# by other resource archives.
+_TEMPLATE = """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <string name="current_detected_ui_locale_name">{resource_text}</string>
+</resources>
+"""
+
+# The default Chrome locale value.
+_DEFAULT_CHROME_LOCALE = 'en-US'
+
+
+def _GenerateLocaleStringsXml(locale):
+ return _TEMPLATE.format(resource_text=locale)
+
+
+def _AddLocaleResourceFileToZip(out_zip, android_locale, locale):
+ locale_data = _GenerateLocaleStringsXml(locale)
+ if android_locale:
+ zip_path = 'values-%s/strings.xml' % android_locale
+ else:
+ zip_path = 'values/strings.xml'
+ build_utils.AddToZipHermetic(
+ out_zip, zip_path, data=locale_data, compress=False)
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument(
+ '--locale-list',
+ required=True,
+ help='GN-list of Chrome-specific locale names.')
+ parser.add_argument(
+ '--output-zip', required=True, help='Output zip archive path.')
+
+ args = parser.parse_args()
+
+ locale_list = build_utils.ParseGnList(args.locale_list)
+ if not locale_list:
+ raise Exception('Locale list cannot be empty!')
+
+ with build_utils.AtomicOutput(args.output_zip) as tmp_file:
+ with zipfile.ZipFile(tmp_file, 'w') as out_zip:
+ # First, write the default value, since aapt requires one.
+ _AddLocaleResourceFileToZip(out_zip, '', _DEFAULT_CHROME_LOCALE)
+
+ for locale in locale_list:
+ android_locale = resource_utils.ToAndroidLocaleName(locale)
+ _AddLocaleResourceFileToZip(out_zip, android_locale, locale)
+
+ if args.depfile:
+ build_utils.WriteDepfile(args.depfile, args.output_zip)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gyp/desugar.py b/deps/v8/build/android/gyp/desugar.py
new file mode 100755
index 0000000000..b9d04059e5
--- /dev/null
+++ b/deps/v8/build/android/gyp/desugar.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import sys
+
+from util import build_utils
+
+
+def main():
+ args = build_utils.ExpandFileArgs(sys.argv[1:])
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--desugar-jar', required=True,
+ help='Path to Desugar.jar.')
+ parser.add_argument('--input-jar', required=True,
+ help='Jar input path to include .class files from.')
+ parser.add_argument('--output-jar', required=True,
+ help='Jar output path.')
+ parser.add_argument('--classpath', required=True,
+ help='Classpath.')
+ parser.add_argument('--bootclasspath', required=True,
+ help='Path to javac bootclasspath interface jar.')
+ options = parser.parse_args(args)
+
+ options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
+ options.classpath = build_utils.ParseGnList(options.classpath)
+
+ cmd = [
+ 'java',
+ '-jar',
+ options.desugar_jar,
+ '--input',
+ options.input_jar,
+ '--output',
+ options.output_jar,
+ # Don't include try-with-resources files in every .jar. Instead, they
+ # are included via //third_party/bazel/desugar:desugar_runtime_java.
+ '--desugar_try_with_resources_omit_runtime_classes',
+ ]
+ for path in options.bootclasspath:
+ cmd += ['--bootclasspath_entry', path]
+ for path in options.classpath:
+ cmd += ['--classpath_entry', path]
+ build_utils.CheckOutput(cmd, print_stdout=False)
+
+ if options.depfile:
+ build_utils.WriteDepfile(
+ options.depfile,
+ options.output_jar,
+ inputs=options.bootclasspath + options.classpath,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/desugar.pydeps b/deps/v8/build/android/gyp/desugar.pydeps
new file mode 100644
index 0000000000..a40f3aa7dd
--- /dev/null
+++ b/deps/v8/build/android/gyp/desugar.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/desugar.pydeps build/android/gyp/desugar.py
+../../gn_helpers.py
+desugar.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/dex.py b/deps/v8/build/android/gyp/dex.py
new file mode 100755
index 0000000000..cba8c7f17f
--- /dev/null
+++ b/deps/v8/build/android/gyp/dex.py
@@ -0,0 +1,388 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import logging
+import optparse
+import os
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), os.path.pardir))
+
+import convert_dex_profile
+
+
+def _CheckFilePathEndsWithJar(parser, file_path):
+ if not file_path.endswith(".jar"):
+ parser.error("%s does not end in .jar" % file_path)
+
+
+def _CheckFilePathsEndWithJar(parser, file_paths):
+ for file_path in file_paths:
+ _CheckFilePathEndsWithJar(parser, file_path)
+
+
+def _ParseArgs(args):
+ args = build_utils.ExpandFileArgs(args)
+
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_option('--output-directory',
+ default=os.getcwd(),
+ help='Path to the output build directory.')
+ parser.add_option('--dex-path', help='Dex output path.')
+ parser.add_option('--configuration-name',
+ help='The build CONFIGURATION_NAME.')
+ parser.add_option('--proguard-enabled',
+ help='"true" if proguard is enabled.')
+ parser.add_option('--debug-build-proguard-enabled',
+ help='"true" if proguard is enabled for debug build.')
+ parser.add_option('--proguard-enabled-input-path',
+ help=('Path to dex in Release mode when proguard '
+ 'is enabled.'))
+ parser.add_option('--inputs', help='A list of additional input paths.')
+ parser.add_option('--excluded-paths',
+ help='A list of paths to exclude from the dex file.')
+ parser.add_option('--main-dex-list-path',
+ help='A file containing a list of the classes to '
+ 'include in the main dex.')
+ parser.add_option('--multidex-configuration-path',
+ help='A JSON file containing multidex build configuration.')
+ parser.add_option('--multi-dex', default=False, action='store_true',
+ help='Generate multiple dex files.')
+ parser.add_option('--d8-jar-path', help='Path to D8 jar.')
+ parser.add_option('--release', action='store_true', default=False,
+ help='Run D8 in release mode. Release mode maximises main '
+ 'dex and deletes non-essential line number information '
+ '(vs debug which minimizes main dex and keeps all line '
+ 'number information, and then some.')
+ parser.add_option('--min-api',
+ help='Minimum Android API level compatibility.')
+
+ parser.add_option('--dexlayout-profile',
+ help=('Text profile for dexlayout. If present, a dexlayout '
+ 'pass will happen'))
+ parser.add_option('--profman-path',
+ help=('Path to ART profman binary. There should be a '
+ 'lib/ directory at the same path containing shared '
+ 'libraries (shared with dexlayout).'))
+ parser.add_option('--dexlayout-path',
+ help=('Path to ART dexlayout binary. There should be a '
+ 'lib/ directory at the same path containing shared '
+ 'libraries (shared with dexlayout).'))
+ parser.add_option('--dexdump-path', help='Path to dexdump binary.')
+ parser.add_option(
+ '--proguard-mapping-path',
+ help=('Path to proguard map from obfuscated symbols in the jar to '
+ 'unobfuscated symbols present in the code. If not '
+ 'present, the jar is assumed not to be obfuscated.'))
+
+ options, paths = parser.parse_args(args)
+
+ required_options = ('d8_jar_path',)
+ build_utils.CheckOptions(options, parser, required=required_options)
+
+ if options.dexlayout_profile:
+ build_utils.CheckOptions(
+ options,
+ parser,
+ required=('profman_path', 'dexlayout_path', 'dexdump_path'))
+ elif options.proguard_mapping_path is not None:
+ raise Exception('Unexpected proguard mapping without dexlayout')
+
+ if options.multidex_configuration_path:
+ with open(options.multidex_configuration_path) as multidex_config_file:
+ multidex_config = json.loads(multidex_config_file.read())
+ options.multi_dex = multidex_config.get('enabled', False)
+
+ if options.multi_dex and not options.main_dex_list_path:
+ logging.warning('multidex cannot be enabled without --main-dex-list-path')
+ options.multi_dex = False
+ elif options.main_dex_list_path and not options.multi_dex:
+ logging.warning('--main-dex-list-path is unused if multidex is not enabled')
+
+ if options.inputs:
+ options.inputs = build_utils.ParseGnList(options.inputs)
+ _CheckFilePathsEndWithJar(parser, options.inputs)
+ if options.excluded_paths:
+ options.excluded_paths = build_utils.ParseGnList(options.excluded_paths)
+
+ if options.proguard_enabled_input_path:
+ _CheckFilePathEndsWithJar(parser, options.proguard_enabled_input_path)
+ _CheckFilePathsEndWithJar(parser, paths)
+
+ return options, paths
+
+
+def _MoveTempDexFile(tmp_dex_dir, dex_path):
+ """Move the temp dex file out of |tmp_dex_dir|.
+
+ Args:
+ tmp_dex_dir: Path to temporary directory created with tempfile.mkdtemp().
+ The directory should have just a single file.
+ dex_path: Target path to move dex file to.
+
+ Raises:
+ Exception if there are multiple files in |tmp_dex_dir|.
+ """
+ tempfiles = os.listdir(tmp_dex_dir)
+ if len(tempfiles) > 1:
+ raise Exception('%d files created, expected 1' % len(tempfiles))
+
+ tmp_dex_path = os.path.join(tmp_dex_dir, tempfiles[0])
+ shutil.move(tmp_dex_path, dex_path)
+
+
+def _NoClassFiles(jar_paths):
+ """Returns True if there are no .class files in the given JARs.
+
+ Args:
+ jar_paths: list of strings representing JAR file paths.
+
+ Returns:
+ (bool) True if no .class files are found.
+ """
+ for jar_path in jar_paths:
+ with zipfile.ZipFile(jar_path) as jar:
+ if any(name.endswith('.class') for name in jar.namelist()):
+ return False
+ return True
+
+
+def _RunD8(dex_cmd, input_paths, output_path):
+ dex_cmd += ['--output', output_path]
+ dex_cmd += input_paths
+ build_utils.CheckOutput(dex_cmd, print_stderr=False)
+
+
+def _EnvWithArtLibPath(binary_path):
+ """Return an environment dictionary for ART host shared libraries.
+
+ Args:
+ binary_path: the path to an ART host binary.
+
+ Returns:
+ An environment dictionary where LD_LIBRARY_PATH has been augmented with the
+ shared library path for the binary. This assumes that there is a lib/
+ directory in the same location as the binary.
+ """
+ lib_path = os.path.join(os.path.dirname(binary_path), 'lib')
+ env = os.environ.copy()
+ libraries = [l for l in env.get('LD_LIBRARY_PATH', '').split(':') if l]
+ libraries.append(lib_path)
+ env['LD_LIBRARY_PATH'] = ':'.join(libraries)
+ return env
+
+
+def _CreateBinaryProfile(text_profile, input_dex, profman_path, temp_dir):
+ """Create a binary profile for dexlayout.
+
+ Args:
+ text_profile: The ART text profile that will be converted to a binary
+ profile.
+ input_dex: The input dex file to layout.
+ profman_path: Path to the profman binary.
+ temp_dir: Directory to work in.
+
+ Returns:
+ The name of the binary profile, which will live in temp_dir.
+ """
+ binary_profile = os.path.join(
+ temp_dir, 'binary_profile-for-' + os.path.basename(text_profile))
+ open(binary_profile, 'w').close() # Touch binary_profile.
+ profman_cmd = [profman_path,
+ '--apk=' + input_dex,
+ '--dex-location=' + input_dex,
+ '--create-profile-from=' + text_profile,
+ '--reference-profile-file=' + binary_profile]
+ build_utils.CheckOutput(
+ profman_cmd,
+ env=_EnvWithArtLibPath(profman_path),
+ stderr_filter=lambda output:
+ build_utils.FilterLines(output, '|'.join(
+ [r'Could not find (method_id|proto_id|name):',
+ r'Could not create type list'])))
+ return binary_profile
+
+
+def _LayoutDex(binary_profile, input_dex, dexlayout_path, temp_dir):
+ """Layout a dexfile using a profile.
+
+ Args:
+ binary_profile: An ART binary profile, eg output from _CreateBinaryProfile.
+ input_dex: The dex file used to create the binary profile.
+ dexlayout_path: Path to the dexlayout binary.
+ temp_dir: Directory to work in.
+
+ Returns:
+ List of output files produced by dexlayout. This will be one if the input
+ was a single dexfile, or multiple files if the input was a multidex
+ zip. These output files are located in temp_dir.
+ """
+ dexlayout_output_dir = os.path.join(temp_dir, 'dexlayout_output')
+ os.mkdir(dexlayout_output_dir)
+ dexlayout_cmd = [ dexlayout_path,
+ '-u', # Update checksum
+ '-p', binary_profile,
+ '-w', dexlayout_output_dir,
+ input_dex ]
+ build_utils.CheckOutput(
+ dexlayout_cmd,
+ env=_EnvWithArtLibPath(dexlayout_path),
+ stderr_filter=lambda output:
+ build_utils.FilterLines(output,
+ r'Can.t mmap dex file.*please zipalign'))
+ output_files = os.listdir(dexlayout_output_dir)
+ if not output_files:
+ raise Exception('dexlayout unexpectedly produced no output')
+ return [os.path.join(dexlayout_output_dir, f) for f in output_files]
+
+
+def _ZipMultidex(file_dir, dex_files):
+ """Zip dex files into a multidex.
+
+ Args:
+ file_dir: The directory into which to write the output.
+ dex_files: The dexfiles forming the multizip. Their names must end with
+ classes.dex, classes2.dex, ...
+
+ Returns:
+ The name of the multidex file, which will live in file_dir.
+ """
+ ordered_files = [] # List of (archive name, file name)
+ for f in dex_files:
+ if f.endswith('classes.dex.zip'):
+ ordered_files.append(('classes.dex', f))
+ break
+ if not ordered_files:
+ raise Exception('Could not find classes.dex multidex file in %s',
+ dex_files)
+ for dex_idx in xrange(2, len(dex_files) + 1):
+ archive_name = 'classes%d.dex' % dex_idx
+ for f in dex_files:
+ if f.endswith(archive_name):
+ ordered_files.append((archive_name, f))
+ break
+ else:
+ raise Exception('Could not find classes%d.dex multidex file in %s',
+ dex_files)
+ if len(set(f[1] for f in ordered_files)) != len(ordered_files):
+ raise Exception('Unexpected clashing filenames for multidex in %s',
+ dex_files)
+
+ zip_name = os.path.join(file_dir, 'multidex_classes.zip')
+ build_utils.DoZip(((archive_name, os.path.join(file_dir, file_name))
+ for archive_name, file_name in ordered_files),
+ zip_name)
+ return zip_name
+
+
+def _ZipSingleDex(dex_file, zip_name):
+ """Zip up a single dex file.
+
+ Args:
+ dex_file: A dexfile whose name is ignored.
+ zip_name: The output file in which to write the zip.
+ """
+ build_utils.DoZip([('classes.dex', dex_file)], zip_name)
+
+
+def main(args):
+ options, paths = _ParseArgs(args)
+ if ((options.proguard_enabled == 'true'
+ and options.configuration_name == 'Release')
+ or (options.debug_build_proguard_enabled == 'true'
+ and options.configuration_name == 'Debug')):
+ paths = [options.proguard_enabled_input_path]
+
+ if options.inputs:
+ paths += options.inputs
+
+ if options.excluded_paths:
+ # Excluded paths are relative to the output directory.
+ exclude_paths = options.excluded_paths
+ paths = [p for p in paths if not
+ os.path.relpath(p, options.output_directory) in exclude_paths]
+
+ input_paths = list(paths)
+ if options.multi_dex:
+ input_paths.append(options.main_dex_list_path)
+
+ dex_cmd = ['java', '-jar', options.d8_jar_path, '--no-desugaring']
+ if options.multi_dex:
+ dex_cmd += ['--main-dex-list', options.main_dex_list_path]
+ if options.release:
+ dex_cmd += ['--release']
+ if options.min_api:
+ dex_cmd += ['--min-api', options.min_api]
+
+ is_dex = options.dex_path.endswith('.dex')
+ is_jar = options.dex_path.endswith('.jar')
+
+ with build_utils.TempDir() as tmp_dir:
+ tmp_dex_dir = os.path.join(tmp_dir, 'tmp_dex_dir')
+ os.mkdir(tmp_dex_dir)
+ if is_jar and _NoClassFiles(paths):
+ # Handle case where no classfiles are specified in inputs
+ # by creating an empty JAR
+ with zipfile.ZipFile(options.dex_path, 'w') as outfile:
+ outfile.comment = 'empty'
+ else:
+ # .dex files can't specify a name for D8. Instead, we output them to a
+ # temp directory then move them after the command has finished running
+ # (see _MoveTempDexFile). For other files, tmp_dex_dir is None.
+ _RunD8(dex_cmd, paths, tmp_dex_dir)
+
+ tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output')
+ if is_dex:
+ _MoveTempDexFile(tmp_dex_dir, tmp_dex_output)
+ else:
+ # d8 supports outputting to a .zip, but does not have deterministic file
+ # ordering: https://issuetracker.google.com/issues/119945929
+ build_utils.ZipDir(tmp_dex_output, tmp_dex_dir)
+
+ if options.dexlayout_profile:
+ if options.proguard_mapping_path is not None:
+ matching_profile = os.path.join(tmp_dir, 'obfuscated_profile')
+ convert_dex_profile.ObfuscateProfile(
+ options.dexlayout_profile, tmp_dex_output,
+ options.proguard_mapping_path, options.dexdump_path,
+ matching_profile)
+ else:
+ logging.warning('No obfuscation for %s', options.dexlayout_profile)
+ matching_profile = options.dexlayout_profile
+ binary_profile = _CreateBinaryProfile(matching_profile, tmp_dex_output,
+ options.profman_path, tmp_dir)
+ output_files = _LayoutDex(binary_profile, tmp_dex_output,
+ options.dexlayout_path, tmp_dir)
+ target = None
+ if len(output_files) > 1:
+ target = _ZipMultidex(tmp_dir, output_files)
+ else:
+ output = output_files[0]
+ if not zipfile.is_zipfile(output):
+ target = os.path.join(tmp_dir, 'dex_classes.zip')
+ _ZipSingleDex(output, target)
+ else:
+ target = output
+ shutil.move(os.path.join(tmp_dir, target), tmp_dex_output)
+
+ # The dex file is complete and can be moved out of tmp_dir.
+ shutil.move(tmp_dex_output, options.dex_path)
+
+ build_utils.WriteDepfile(
+ options.depfile, options.dex_path, input_paths, add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/dex.pydeps b/deps/v8/build/android/gyp/dex.pydeps
new file mode 100644
index 0000000000..e5ecbd2335
--- /dev/null
+++ b/deps/v8/build/android/gyp/dex.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex.pydeps build/android/gyp/dex.py
+../../gn_helpers.py
+../convert_dex_profile.py
+dex.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/dexsplitter.py b/deps/v8/build/android/gyp/dexsplitter.py
new file mode 100755
index 0000000000..a0761581bd
--- /dev/null
+++ b/deps/v8/build/android/gyp/dexsplitter.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import shutil
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def _ParseOptions(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--depfile', help='Path to the depfile to write to.')
+ parser.add_argument('--stamp', help='Path to stamp to mark when finished.')
+ parser.add_argument('--r8-path', help='Path to the r8.jar to use.')
+ parser.add_argument(
+ '--input-dex-zip', help='Path to dex files in zip being split.')
+ parser.add_argument(
+ '--proguard-mapping-file', help='Path to proguard mapping file.')
+ parser.add_argument(
+ '--feature-name',
+ action='append',
+ dest='feature_names',
+ help='The name of the feature module.')
+ parser.add_argument(
+ '--feature-jars',
+ action='append',
+ help='GN list of path to jars which compirse the corresponding feature.')
+ parser.add_argument(
+ '--dex-dest',
+ action='append',
+ dest='dex_dests',
+ help='Destination for dex file of the corresponding feature.')
+ options = parser.parse_args(args)
+
+ assert len(options.feature_names) == len(options.feature_jars) and len(
+ options.feature_names) == len(options.dex_dests)
+ options.features = {}
+ for i, name in enumerate(options.feature_names):
+ options.features[name] = build_utils.ParseGnList(options.feature_jars[i])
+
+ return options
+
+
+def _RunDexsplitter(options, output_dir):
+ cmd = [
+ 'java',
+ '-jar',
+ options.r8_path,
+ 'dexsplitter',
+ '--output',
+ output_dir,
+ '--proguard-map',
+ options.proguard_mapping_file,
+ ]
+
+ for base_jar in options.features['base']:
+ cmd += ['--base-jar', base_jar]
+
+ base_jars_lookup = set(options.features['base'])
+ for feature in options.features:
+ if feature == 'base':
+ continue
+ for feature_jar in options.features[feature]:
+ if feature_jar not in base_jars_lookup:
+ cmd += ['--feature-jar', feature_jar + ':' + feature]
+
+ with build_utils.TempDir() as temp_dir:
+ unzipped_files = build_utils.ExtractAll(options.input_dex_zip, temp_dir)
+ for file_name in unzipped_files:
+ cmd += ['--input', file_name]
+ build_utils.CheckOutput(cmd)
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseOptions(args)
+
+ input_paths = []
+ for feature_jars in options.features.itervalues():
+ for feature_jar in feature_jars:
+ input_paths.append(feature_jar)
+
+ with build_utils.TempDir() as dexsplitter_output_dir:
+ curr_location_to_dest = []
+ if len(options.features) == 1:
+ # Don't run dexsplitter since it needs at least 1 feature module.
+ curr_location_to_dest.append((options.input_dex_zip,
+ options.dex_dests[0]))
+ else:
+ _RunDexsplitter(options, dexsplitter_output_dir)
+
+ for i, dest in enumerate(options.dex_dests):
+ module_dex_file = os.path.join(dexsplitter_output_dir,
+ options.feature_names[i], 'classes.dex')
+ if os.path.exists(module_dex_file):
+ curr_location_to_dest.append((module_dex_file, dest))
+ else:
+ module_dex_file += '.zip'
+ assert os.path.exists(
+ module_dex_file), 'Dexsplitter tool output not found.'
+ curr_location_to_dest.append((module_dex_file + '.zip', dest))
+
+ for curr_location, dest in curr_location_to_dest:
+ with build_utils.AtomicOutput(dest) as f:
+ if curr_location.endswith('.zip'):
+ if dest.endswith('.zip'):
+ shutil.copy(curr_location, f.name)
+ else:
+ with zipfile.ZipFile(curr_location, 'r') as z:
+ namelist = z.namelist()
+ assert len(namelist) == 1, (
+ 'Unzipping to single dex file, but not single dex file in ' +
+ options.input_dex_zip)
+ z.extract(namelist[0], f.name)
+ else:
+ if dest.endswith('.zip'):
+ build_utils.ZipDir(
+ f.name, os.path.abspath(os.path.join(curr_location, os.pardir)))
+ else:
+ shutil.move(curr_location, f.name)
+
+ build_utils.Touch(options.stamp)
+ build_utils.WriteDepfile(options.depfile, options.stamp, inputs=input_paths)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/dexsplitter.pydeps b/deps/v8/build/android/gyp/dexsplitter.pydeps
new file mode 100644
index 0000000000..5935d23885
--- /dev/null
+++ b/deps/v8/build/android/gyp/dexsplitter.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dexsplitter.pydeps build/android/gyp/dexsplitter.py
+../../gn_helpers.py
+dexsplitter.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/dist_aar.py b/deps/v8/build/android/gyp/dist_aar.py
new file mode 100755
index 0000000000..ed823f18b7
--- /dev/null
+++ b/deps/v8/build/android/gyp/dist_aar.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates an Android .aar file."""
+
+import argparse
+import os
+import posixpath
+import shutil
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+
+
+_ANDROID_BUILD_DIR = os.path.dirname(os.path.dirname(__file__))
+
+
+def _MergeRTxt(r_paths):
+ """Merging the given R.txt files and returns them as a string."""
+ all_lines = set()
+ for r_path in r_paths:
+ with open(r_path) as f:
+ all_lines.update(f.readlines())
+ return ''.join(sorted(all_lines))
+
+
+def _MergeProguardConfigs(proguard_configs):
+ """Merging the given proguard config files and returns them as a string."""
+ ret = []
+ for config in proguard_configs:
+ ret.append('# FROM: {}'.format(config))
+ with open(config) as f:
+ ret.append(f.read())
+ return '\n'.join(ret)
+
+
+def _AddResources(aar_zip, resource_zips):
+ """Adds all resource zips to the given aar_zip.
+
+ Ensures all res/values/* files have unique names by prefixing them.
+ """
+ for i, path in enumerate(resource_zips):
+ with zipfile.ZipFile(path) as res_zip:
+ for info in res_zip.infolist():
+ data = res_zip.read(info)
+ dirname, basename = posixpath.split(info.filename)
+ if 'values' in dirname:
+ basename = '{}_{}'.format(basename, i)
+ info.filename = posixpath.join(dirname, basename)
+ info.filename = posixpath.join('res', info.filename)
+ aar_zip.writestr(info, data)
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--output', required=True, help='Path to output aar.')
+ parser.add_argument('--jars', required=True, help='GN list of jar inputs.')
+ parser.add_argument('--dependencies-res-zips', required=True,
+ help='GN list of resource zips')
+ parser.add_argument('--r-text-files', required=True,
+ help='GN list of R.txt files to merge')
+ parser.add_argument('--proguard-configs', required=True,
+ help='GN list of ProGuard flag files to merge.')
+ parser.add_argument(
+ '--android-manifest',
+ help='Path to AndroidManifest.xml to include.',
+ default=os.path.join(_ANDROID_BUILD_DIR, 'AndroidManifest.xml'))
+ parser.add_argument('--native-libraries', default='',
+ help='GN list of native libraries. If non-empty then '
+ 'ABI must be specified.')
+ parser.add_argument('--abi',
+ help='ABI (e.g. armeabi-v7a) for native libraries.')
+
+ options = parser.parse_args(args)
+
+ if options.native_libraries and not options.abi:
+ parser.error('You must provide --abi if you have native libs')
+
+ options.jars = build_utils.ParseGnList(options.jars)
+ options.dependencies_res_zips = build_utils.ParseGnList(
+ options.dependencies_res_zips)
+ options.r_text_files = build_utils.ParseGnList(options.r_text_files)
+ options.proguard_configs = build_utils.ParseGnList(options.proguard_configs)
+ options.native_libraries = build_utils.ParseGnList(options.native_libraries)
+
+ with tempfile.NamedTemporaryFile(delete=False) as staging_file:
+ try:
+ with zipfile.ZipFile(staging_file.name, 'w') as z:
+ build_utils.AddToZipHermetic(
+ z, 'AndroidManifest.xml', src_path=options.android_manifest)
+
+ with tempfile.NamedTemporaryFile() as jar_file:
+ build_utils.MergeZips(jar_file.name, options.jars)
+ build_utils.AddToZipHermetic(z, 'classes.jar', src_path=jar_file.name)
+
+ build_utils.AddToZipHermetic(
+ z, 'R.txt', data=_MergeRTxt(options.r_text_files))
+ build_utils.AddToZipHermetic(z, 'public.txt', data='')
+
+ if options.proguard_configs:
+ build_utils.AddToZipHermetic(
+ z, 'proguard.txt',
+ data=_MergeProguardConfigs(options.proguard_configs))
+
+ _AddResources(z, options.dependencies_res_zips)
+
+ for native_library in options.native_libraries:
+ libname = os.path.basename(native_library)
+ build_utils.AddToZipHermetic(
+ z, os.path.join('jni', options.abi, libname),
+ src_path=native_library)
+ except:
+ os.unlink(staging_file.name)
+ raise
+ shutil.move(staging_file.name, options.output)
+
+ if options.depfile:
+ all_inputs = (options.jars + options.dependencies_res_zips +
+ options.r_text_files + options.proguard_configs)
+ build_utils.WriteDepfile(options.depfile, options.output, all_inputs,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/dist_aar.pydeps b/deps/v8/build/android/gyp/dist_aar.pydeps
new file mode 100644
index 0000000000..da5ea8da23
--- /dev/null
+++ b/deps/v8/build/android/gyp/dist_aar.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dist_aar.pydeps build/android/gyp/dist_aar.py
+../../gn_helpers.py
+dist_aar.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/emma_instr.py b/deps/v8/build/android/gyp/emma_instr.py
new file mode 100755
index 0000000000..cbe913eb26
--- /dev/null
+++ b/deps/v8/build/android/gyp/emma_instr.py
@@ -0,0 +1,271 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Instruments classes and jar files.
+
+This script corresponds to the 'emma_instr' action in the java build process.
+Depending on whether emma_instrument is set, the 'emma_instr' action will either
+call the instrument command or the copy command.
+
+Possible commands are:
+- instrument_jar: Accepts a jar and instruments it using emma.jar.
+- copy: Called when EMMA coverage is not enabled. This allows us to make
+ this a required step without necessarily instrumenting on every build.
+ Also removes any stale coverage files.
+"""
+
+import collections
+import json
+import optparse
+import os
+import shutil
+import sys
+import tempfile
+
+from util import build_utils
+
+
+def _AddCommonOptions(option_parser):
+ """Adds common options to |option_parser|."""
+ option_parser.add_option('--input-path',
+ help=('Path to input file(s). Either the classes '
+ 'directory, or the path to a jar.'))
+ option_parser.add_option('--output-path',
+ help=('Path to output final file(s) to. Either the '
+ 'final classes directory, or the directory in '
+ 'which to place the instrumented/copied jar.'))
+ option_parser.add_option('--coverage-file',
+ help='File to create with coverage metadata.')
+ option_parser.add_option('--sources-list-file',
+ help='File to create with the list of sources.')
+
+
+def _AddInstrumentOptions(option_parser):
+ """Adds options related to instrumentation to |option_parser|."""
+ _AddCommonOptions(option_parser)
+ option_parser.add_option('--source-dirs',
+ help='Space separated list of source directories. '
+ 'source-files should not be specified if '
+ 'source-dirs is specified')
+ option_parser.add_option('--source-files',
+ help='Space separated list of source files. '
+ 'source-dirs should not be specified if '
+ 'source-files is specified')
+ option_parser.add_option('--java-sources-file',
+ help='File containing newline-separated .java paths')
+ option_parser.add_option('--src-root',
+ help='Root of the src repository.')
+ option_parser.add_option('--emma-jar',
+ help='Path to emma.jar.')
+ option_parser.add_option(
+ '--filter-string', default='',
+ help=('Filter string consisting of a list of inclusion/exclusion '
+ 'patterns separated with whitespace and/or comma.'))
+
+
+def _RunCopyCommand(_command, options, _, option_parser):
+ """Copies the jar from input to output locations.
+
+ Also removes any old coverage/sources file.
+
+ Args:
+ command: String indicating the command that was received to trigger
+ this function.
+ options: optparse options dictionary.
+ args: List of extra args from optparse.
+ option_parser: optparse.OptionParser object.
+
+ Returns:
+ An exit code.
+ """
+ if not (options.input_path and options.output_path and
+ options.coverage_file and options.sources_list_file):
+ option_parser.error('All arguments are required.')
+
+ if os.path.exists(options.coverage_file):
+ os.remove(options.coverage_file)
+ if os.path.exists(options.sources_list_file):
+ os.remove(options.sources_list_file)
+
+ shutil.copy(options.input_path, options.output_path)
+
+
+def _GetSourceDirsFromSourceFiles(source_files):
+ """Returns list of directories for the files in |source_files|.
+
+ Args:
+ source_files: List of source files.
+
+ Returns:
+ List of source directories.
+ """
+ return list(set(os.path.dirname(source_file) for source_file in source_files))
+
+
+def _CreateSourcesListFile(source_dirs, sources_list_file, src_root):
+ """Adds all normalized source directories to |sources_list_file|.
+
+ Args:
+ source_dirs: List of source directories.
+ sources_list_file: File into which to write the JSON list of sources.
+ src_root: Root which sources added to the file should be relative to.
+
+ Returns:
+ An exit code.
+ """
+ src_root = os.path.abspath(src_root)
+ relative_sources = []
+ for s in source_dirs:
+ abs_source = os.path.abspath(s)
+ if abs_source[:len(src_root)] != src_root:
+ print ('Error: found source directory not under repository root: %s %s'
+ % (abs_source, src_root))
+ return 1
+ rel_source = os.path.relpath(abs_source, src_root)
+
+ relative_sources.append(rel_source)
+
+ with open(sources_list_file, 'w') as f:
+ json.dump(relative_sources, f)
+
+
+def _RunInstrumentCommand(_command, options, _, option_parser):
+ """Instruments jar files using EMMA.
+
+ Args:
+ command: String indicating the command that was received to trigger
+ this function.
+ options: optparse options dictionary.
+ args: List of extra args from optparse.
+ option_parser: optparse.OptionParser object.
+
+ Returns:
+ An exit code.
+ """
+ if not (options.input_path and options.output_path and
+ options.coverage_file and options.sources_list_file and
+ (options.source_files or options.source_dirs or
+ options.java_sources_file) and
+ options.src_root and options.emma_jar):
+ option_parser.error('All arguments are required.')
+
+ if os.path.exists(options.coverage_file):
+ os.remove(options.coverage_file)
+ temp_dir = tempfile.mkdtemp()
+ try:
+ cmd = ['java', '-cp', options.emma_jar,
+ 'emma', 'instr',
+ '-ip', options.input_path,
+ '-ix', options.filter_string,
+ '-d', temp_dir,
+ '-out', options.coverage_file,
+ '-m', 'fullcopy']
+ build_utils.CheckOutput(cmd)
+
+ # File is not generated when filter_string doesn't match any files.
+ if not os.path.exists(options.coverage_file):
+ build_utils.Touch(options.coverage_file)
+
+ temp_jar_dir = os.path.join(temp_dir, 'lib')
+ jars = os.listdir(temp_jar_dir)
+ if len(jars) != 1:
+ print('Error: multiple output files in: %s' % (temp_jar_dir))
+ return 1
+
+ # Delete output_path first to avoid modifying input_path in the case where
+ # input_path is a hardlink to output_path. http://crbug.com/571642
+ if os.path.exists(options.output_path):
+ os.unlink(options.output_path)
+ shutil.move(os.path.join(temp_jar_dir, jars[0]), options.output_path)
+ finally:
+ shutil.rmtree(temp_dir)
+
+ if options.source_dirs:
+ source_dirs = build_utils.ParseGnList(options.source_dirs)
+ else:
+ source_files = []
+ if options.source_files:
+ source_files += build_utils.ParseGnList(options.source_files)
+ if options.java_sources_file:
+ source_files.extend(
+ build_utils.ReadSourcesList(options.java_sources_file))
+ source_dirs = _GetSourceDirsFromSourceFiles(source_files)
+
+ # TODO(GYP): In GN, we are passed the list of sources, detecting source
+ # directories, then walking them to re-establish the list of sources.
+ # This can obviously be simplified!
+ _CreateSourcesListFile(source_dirs, options.sources_list_file,
+ options.src_root)
+
+ return 0
+
+
+CommandFunctionTuple = collections.namedtuple(
+ 'CommandFunctionTuple', ['add_options_func', 'run_command_func'])
+VALID_COMMANDS = {
+ 'copy': CommandFunctionTuple(_AddCommonOptions,
+ _RunCopyCommand),
+ 'instrument_jar': CommandFunctionTuple(_AddInstrumentOptions,
+ _RunInstrumentCommand),
+}
+
+
+class CommandOptionParser(optparse.OptionParser):
+ """Wrapper class for OptionParser to help with listing commands."""
+
+ def __init__(self, *args, **kwargs):
+ """Creates a CommandOptionParser.
+
+ Args:
+ commands_dict: A dictionary mapping command strings to an object defining
+ - add_options_func: Adds options to the option parser
+ - run_command_func: Runs the command itself.
+ example: An example command.
+ everything else: Passed to optparse.OptionParser contructor.
+ """
+ self.commands_dict = kwargs.pop('commands_dict', {})
+ self.example = kwargs.pop('example', '')
+ if not 'usage' in kwargs:
+ kwargs['usage'] = 'Usage: %prog <command> [options]'
+ optparse.OptionParser.__init__(self, *args, **kwargs)
+
+ #override
+ def get_usage(self):
+ normal_usage = optparse.OptionParser.get_usage(self)
+ command_list = self.get_command_list()
+ example = self.get_example()
+ return self.expand_prog_name(normal_usage + example + command_list)
+
+ #override
+ def get_command_list(self):
+ if self.commands_dict.keys():
+ return '\nCommands:\n %s\n' % '\n '.join(
+ sorted(self.commands_dict.keys()))
+ return ''
+
+ def get_example(self):
+ if self.example:
+ return '\nExample:\n %s\n' % self.example
+ return ''
+
+
+def main():
+ option_parser = CommandOptionParser(commands_dict=VALID_COMMANDS)
+ argv = sys.argv
+
+ if len(argv) < 2 or argv[1] not in option_parser.commands_dict:
+ # Parse args first, if this is '--help', optparse will print help and exit
+ option_parser.parse_args(argv)
+ option_parser.error('Invalid command.')
+
+ cmd = option_parser.commands_dict[argv[1]]
+ cmd.add_options_func(option_parser)
+ options, args = option_parser.parse_args(argv)
+ return cmd.run_command_func(argv[1], options, args, option_parser)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/emma_instr.pydeps b/deps/v8/build/android/gyp/emma_instr.pydeps
new file mode 100644
index 0000000000..88f752a0f9
--- /dev/null
+++ b/deps/v8/build/android/gyp/emma_instr.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/emma_instr.pydeps build/android/gyp/emma_instr.py
+../../gn_helpers.py
+emma_instr.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/extract_unwind_tables.py b/deps/v8/build/android/gyp/extract_unwind_tables.py
new file mode 100755
index 0000000000..37a8421449
--- /dev/null
+++ b/deps/v8/build/android/gyp/extract_unwind_tables.py
@@ -0,0 +1,288 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Extracts the unwind tables in from breakpad symbol files
+
+Runs dump_syms on the given binary file and extracts the CFI data into the
+given output file.
+The output file is a binary file containing CFI rows ordered based on function
+address. The output file only contains rows that match the most popular rule
+type in CFI table, to reduce the output size and specify data in compact format.
+See doc https://github.com/google/breakpad/blob/master/docs/symbol_files.md.
+1. The CFA rules should be of postfix form "SP <val> +".
+2. The RA rules should be of postfix form "CFA <val> + ^".
+Note: breakpad represents dereferencing address with '^' operator.
+
+The output file has 2 tables UNW_INDEX and UNW_DATA, inspired from ARM EHABI
+format. The first table contains function addresses and an index into the
+UNW_DATA table. The second table contains one or more rows for the function
+unwind information.
+
+The output file starts with 4 bytes counting the size of UNW_INDEX in bytes.
+Then UNW_INDEX table and UNW_DATA table.
+
+UNW_INDEX contains two columns of N rows each, where N is the number of
+functions.
+ 1. First column 4 byte rows of all the function start address as offset from
+ start of the binary, in sorted order.
+ 2. For each function addr, the second column contains 2 byte indices in order.
+ The indices are offsets (in count of 2 bytes) of the CFI data from start of
+ UNW_DATA.
+The last entry in the table always contains CANT_UNWIND index to specify the
+end address of the last function.
+
+UNW_DATA contains data of all the functions. Each function data contains N rows.
+The data found at the address pointed from UNW_INDEX will be:
+ 2 bytes: N - number of rows that belong to current function.
+ N * 4 bytes: N rows of data. 16 bits : Address offset from function start.
+ 14 bits : CFA offset / 4.
+ 2 bits : RA offset / 4.
+
+The function is not added to the unwind table in following conditions:
+C1. If length of the function code (number of instructions) is greater than
+ 0xFFFF (2 byte address span). This is because we use 16 bits to refer to
+ offset of instruction from start of the address.
+C2. If the function moves the SP by more than 0xFFFF bytes. This is because we
+ use 14 bits to denote CFA offset (last 2 bits are 0).
+C3. If the Return Address is stored at an offset >= 16 from the CFA. Some
+ functions which have variable arguments can have offset upto 16.
+ TODO(ssid): We can actually store offset 16 by subtracting 1 from RA/4 since
+ we never have 0.
+C4: Some functions do not have unwind information defined in dwarf info. These
+ functions have index value CANT_UNWIND(0xFFFF) in UNW_INDEX table.
+
+
+Usage:
+ extract_unwind_tables.py --input_path [root path to unstripped chrome.so]
+ --output_path [output path] --dump_syms_path [path to dump_syms binary]
+"""
+
+import argparse
+import re
+import struct
+import subprocess
+import sys
+import tempfile
+
+
+_CFA_REG = '.cfa'
+_RA_REG = '.ra'
+
+_ADDR_ENTRY = 0
+_LENGTH_ENTRY = 1
+
+_CANT_UNWIND = 0xFFFF
+
+
+def _Write4Bytes(output_file, val):
+ """Writes a 32 bit unsigned integer to the given output file."""
+ output_file.write(struct.pack('<L', val));
+
+
+def _Write2Bytes(output_file, val):
+ """Writes a 16 bit unsigned integer to the given output file."""
+ output_file.write(struct.pack('<H', val));
+
+
+def _FindRuleForRegister(cfi_row, reg):
+ """Returns the postfix expression as string for a given register.
+
+ Breakpad CFI row format specifies rules for unwinding each register in postfix
+ expression form separated by space. Each rule starts with register name and a
+ colon. Eg: "CFI R1: <rule> R2: <rule>".
+ """
+ out = []
+ found_register = False
+ for part in cfi_row:
+ if found_register:
+ if part[-1] == ':':
+ break
+ out.append(part)
+ elif part == reg + ':':
+ found_register = True
+ return ' '.join(out)
+
+
+def _GetCfaAndRaOffset(cfi_row):
+ """Returns a tuple with 2 numbers (cfa_offset, ra_offset).
+
+ Returns right values if rule matches the predefined criteria. Returns (0, 0)
+ otherwise. The criteria for CFA rule is postfix form "SP <val> +" and RA rule
+ is postfix form "CFA -<val> + ^".
+ """
+ cfa_offset = 0
+ ra_offset = 0
+ cfa_rule = _FindRuleForRegister(cfi_row, _CFA_REG)
+ ra_rule = _FindRuleForRegister(cfi_row, _RA_REG)
+ if cfa_rule and re.match(r'sp [0-9]+ \+', cfa_rule):
+ cfa_offset = int(cfa_rule.split()[1], 10)
+ if ra_rule:
+ if not re.match(r'.cfa -[0-9]+ \+ \^', ra_rule):
+ return (0, 0)
+ ra_offset = -1 * int(ra_rule.split()[1], 10)
+ return (cfa_offset, ra_offset)
+
+
+def _GetAllCfiRows(symbol_file):
+ """Returns parsed CFI data from given symbol_file.
+
+ Each entry in the cfi data dictionary returned is a map from function start
+ address to array of function rows, starting with FUNCTION type, followed by
+ one or more CFI rows.
+ """
+ cfi_data = {}
+ current_func = []
+ for line in symbol_file:
+ if 'STACK CFI' not in line:
+ continue
+
+ parts = line.split()
+ data = {}
+ if parts[2] == 'INIT':
+ # Add the previous function to the output
+ if len(current_func) > 1:
+ cfi_data[current_func[0][_ADDR_ENTRY]] = current_func
+ current_func = []
+
+ # The function line is of format "STACK CFI INIT <addr> <length> ..."
+ data[_ADDR_ENTRY] = int(parts[3], 16)
+ data[_LENGTH_ENTRY] = int(parts[4], 16)
+
+ # Condition C1: Skip if length is large.
+ if data[_LENGTH_ENTRY] == 0 or data[_LENGTH_ENTRY] > 0xffff:
+ continue # Skip the current function.
+ else:
+ # The current function is skipped.
+ if len(current_func) == 0:
+ continue
+
+ # The CFI row is of format "STACK CFI <addr> .cfa: <expr> .ra: <expr> ..."
+ data[_ADDR_ENTRY] = int(parts[2], 16)
+ (data[_CFA_REG], data[_RA_REG]) = _GetCfaAndRaOffset(parts)
+
+ # Condition C2 and C3: Skip based on limits on offsets.
+ if data[_CFA_REG] == 0 or data[_RA_REG] >= 16 or data[_CFA_REG] > 0xffff:
+ current_func = []
+ continue
+ assert data[_CFA_REG] % 4 == 0
+ # Since we skipped functions with code size larger than 0xffff, we should
+ # have no function offset larger than the same value.
+ assert data[_ADDR_ENTRY] - current_func[0][_ADDR_ENTRY] < 0xffff
+
+ if data[_ADDR_ENTRY] == 0:
+ # Skip current function, delete all previous entries.
+ current_func = []
+ continue
+ assert data[_ADDR_ENTRY] % 2 == 0
+ current_func.append(data)
+
+ # Condition C4: Skip function without CFI rows.
+ if len(current_func) > 1:
+ cfi_data[current_func[0][_ADDR_ENTRY]] = current_func
+ return cfi_data
+
+
+def _WriteCfiData(cfi_data, out_file):
+ """Writes the CFI data in defined format to out_file."""
+ # Stores the final data that will be written to UNW_DATA table, in order
+ # with 2 byte items.
+ unw_data = []
+
+ # Represent all the CFI data of functions as set of numbers and map them to an
+ # index in the |unw_data|. This index is later written to the UNW_INDEX table
+ # for each function. This map is used to find index of the data for functions.
+ data_to_index = {}
+ # Store mapping between the functions to the index.
+ func_addr_to_index = {}
+ previous_func_end = 0
+ for addr, function in sorted(cfi_data.iteritems()):
+ # Add an empty function entry when functions CFIs are missing between 2
+ # functions.
+ if previous_func_end != 0 and addr - previous_func_end > 4:
+ func_addr_to_index[previous_func_end + 2] = _CANT_UNWIND
+ previous_func_end = addr + cfi_data[addr][0][_LENGTH_ENTRY]
+
+ assert len(function) > 1
+ func_data_arr = []
+ func_data = 0
+ # The first row contains the function address and length. The rest of the
+ # rows have CFI data. Create function data array as given in the format.
+ for row in function[1:]:
+ addr_offset = row[_ADDR_ENTRY] - addr
+ cfa_offset = (row[_CFA_REG]) | (row[_RA_REG] / 4)
+
+ func_data_arr.append(addr_offset)
+ func_data_arr.append(cfa_offset)
+
+ # Consider all the rows in the data as one large integer and add it as a key
+ # to the |data_to_index|.
+ for data in func_data_arr:
+ func_data = (func_data << 16) | data
+
+ row_count = len(func_data_arr) / 2
+ if func_data not in data_to_index:
+ # When data is not found, create a new index = len(unw_data), and write
+ # the data to |unw_data|.
+ index = len(unw_data)
+ data_to_index[func_data] = index
+ unw_data.append(row_count)
+ for row in func_data_arr:
+ unw_data.append(row)
+ else:
+ # If the data was found, then use the same index for the function.
+ index = data_to_index[func_data]
+ assert row_count == unw_data[index]
+ func_addr_to_index[addr] = data_to_index[func_data]
+
+ # Mark the end end of last function entry.
+ func_addr_to_index[previous_func_end + 2] = _CANT_UNWIND
+
+ # Write the size of UNW_INDEX file in bytes.
+ _Write4Bytes(out_file, len(func_addr_to_index) * 6)
+
+ # Write the UNW_INDEX table. First list of addresses and then indices.
+ sorted_unw_index = sorted(func_addr_to_index.iteritems())
+ for addr, index in sorted_unw_index:
+ _Write4Bytes(out_file, addr)
+ for addr, index in sorted_unw_index:
+ _Write2Bytes(out_file, index)
+
+ # Write the UNW_DATA table.
+ for data in unw_data:
+ _Write2Bytes(out_file, data)
+
+
+def _ParseCfiData(sym_file, output_path):
+ with open(sym_file, 'r') as f:
+ cfi_data = _GetAllCfiRows(f)
+
+ with open(output_path, 'wb') as out_file:
+ _WriteCfiData(cfi_data, out_file)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--input_path', required=True,
+ help='The input path of the unstripped binary')
+ parser.add_argument(
+ '--output_path', required=True,
+ help='The path of the output file')
+ parser.add_argument(
+ '--dump_syms_path', required=True,
+ help='The path of the dump_syms binary')
+
+ args = parser.parse_args()
+
+ with tempfile.NamedTemporaryFile() as sym_file:
+ out = subprocess.call(
+ ['./' +args.dump_syms_path, args.input_path], stdout=sym_file)
+ assert not out
+ sym_file.flush()
+ _ParseCfiData(sym_file.name, args.output_path)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/extract_unwind_tables_tests.py b/deps/v8/build/android/gyp/extract_unwind_tables_tests.py
new file mode 100755
index 0000000000..02c70eb049
--- /dev/null
+++ b/deps/v8/build/android/gyp/extract_unwind_tables_tests.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for extract_unwind_tables.py
+
+This test suite contains various tests for extracting CFI tables from breakpad
+symbol files.
+"""
+
+import optparse
+import os
+import struct
+import sys
+import tempfile
+import unittest
+
+import extract_unwind_tables
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "gyp"))
+from util import build_utils
+
+
+class TestExtractUnwindTables(unittest.TestCase):
+ def testExtractCfi(self):
+ with tempfile.NamedTemporaryFile() as input_file, \
+ tempfile.NamedTemporaryFile() as output_file:
+ input_file.write("""
+MODULE Linux arm CDE12FE1DF2B37A9C6560B4CBEE056420 lib_chrome.so
+INFO CODE_ID E12FE1CD2BDFA937C6560B4CBEE05642
+FILE 0 ../../base/allocator/allocator_check.cc
+FILE 1 ../../base/allocator/allocator_extension.cc
+FILE 2 ../../base/allocator/allocator_shim.cc
+FUNC 1adcb60 54 0 i2d_name_canon
+1adcb60 1a 509 17054
+3b94c70 2 69 40
+PUBLIC e17001 0 assist_ranker::(anonymous namespace)::FakePredict::Initialize()
+PUBLIC e17005 0 (anonymous namespace)::FileDeleter(base::File)
+STACK CFI INIT e17000 4 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 0 4 .cfa: sp 0 + .ra: lr
+STACK CFI 2 .cfa: sp 4 +
+STACK CFI 4 .cfa: sp 12 + .ra: .cfa -8 + ^ r7: .cfa -12 + ^
+STACK CFI 6 .cfa: sp 16 +
+STACK CFI INIT e1a96e 20 .cfa: sp 0 + .ra: lr
+STACK CFI e1a970 .cfa: sp 4 +
+STACK CFI e1a972 .cfa: sp 12 + .ra: .cfa -8 + ^ r7: .cfa -12 + ^
+STACK CFI e1a974 .cfa: sp 16 +
+STACK CFI INIT e1a1e4 b0 .cfa: sp 0 + .ra: lr
+STACK CFI e1a1e6 .cfa: sp 16 + .ra: .cfa -4 + ^ r4: .cfa -16 + ^ r5: .cfa -12 +
+STACK CFI e1a1e8 .cfa: sp 80 +
+STACK CFI INIT 0 4 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 3b92e24 3c .cfa: sp 0 + .ra: lr
+STACK CFI 3b92e4c .cfa: sp 16 + .ra: .cfa -12 + ^
+STACK CFI INIT e17004 0 .cfa: sp 0 + .ra: lr
+STACK CFI e17004 2 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 3b92e70 38 .cfa: sp 0 + .ra: lr
+STACK CFI 3b92e74 .cfa: sp 8 + .ra: .cfa -4 + ^ r4: .cfa -8 + ^
+STACK CFI 3b92e90 .cfa: sp 0 + .ra: .ra r4: r4
+STACK CFI INIT 3b93114 6c .cfa: sp 0 + .ra: lr
+STACK CFI 3b93118 .cfa: r7 16 + .ra: .cfa -4 + ^
+STACK CFI INIT 3b92114 6c .cfa: sp 0 + .ra: lr
+STACK CFI 3b92118 .cfa: r7 16 + .ra: .cfa -20 + ^
+STACK CFI INIT 3b93214 fffff .cfa: sp 0 + .ra: lr
+STACK CFI 3b93218 .cfa: r7 16 + .ra: .cfa -4 + ^
+""")
+ input_file.flush()
+ extract_unwind_tables._ParseCfiData(input_file.name, output_file.name)
+
+ expected_cfi_data = {
+ 0xe1a1e4 : [0x2, 0x11, 0x4, 0x50],
+ 0xe1a296 : [],
+ 0xe1a96e : [0x2, 0x4, 0x4, 0xe, 0x6, 0x10],
+ 0xe1a990 : [],
+ 0x3b92e24: [0x28, 0x13],
+ 0x3b92e62: [],
+ }
+ expected_function_count = len(expected_cfi_data)
+
+ actual_output = []
+ with open(output_file.name, 'rb') as f:
+ while True:
+ read = f.read(2)
+ if not read:
+ break
+ actual_output.append(struct.unpack('H', read)[0])
+
+ # First value is size of unw_index table.
+ unw_index_size = actual_output[1] << 16 | actual_output[0]
+ # Each function index is 6 bytes data.
+ self.assertEqual(expected_function_count * 6, unw_index_size)
+ # |actual_output| is in blocks of 2 bytes. Skip first 4 bytes representing
+ # size.
+ unw_index_start = 2
+ unw_index_addr_end = unw_index_start + expected_function_count * 2
+ unw_index_end = unw_index_addr_end + expected_function_count
+ unw_index_addr_col = actual_output[unw_index_start : unw_index_addr_end]
+ unw_index_index_col = actual_output[unw_index_addr_end : unw_index_end]
+
+ unw_data_start = unw_index_end
+ unw_data = actual_output[unw_data_start:]
+
+ for func_iter in range(0, expected_function_count):
+ func_addr = (unw_index_addr_col[func_iter * 2 + 1] << 16 |
+ unw_index_addr_col[func_iter * 2])
+ index = unw_index_index_col[func_iter]
+ # If index is CANT_UNWIND then invalid function.
+ if index == 0xFFFF:
+ self.assertEqual(expected_cfi_data[func_addr], [])
+ continue
+
+ func_start = index + 1
+ func_end = func_start + unw_data[index] * 2
+ self.assertEquals(
+ len(expected_cfi_data[func_addr]), func_end - func_start)
+ func_cfi = unw_data[func_start : func_end]
+ self.assertEqual(expected_cfi_data[func_addr], func_cfi)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/gyp/filter_zip.py b/deps/v8/build/android/gyp/filter_zip.py
new file mode 100755
index 0000000000..2182042df5
--- /dev/null
+++ b/deps/v8/build/android/gyp/filter_zip.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import sys
+
+from util import build_utils
+
+
+_RESOURCE_CLASSES = [
+ "R.class",
+ "R##*.class",
+ "Manifest.class",
+ "Manifest##*.class",
+]
+
+
+def _CreatePathTransform(exclude_globs, include_globs,
+ strip_resource_classes_for):
+ exclude_globs = list(exclude_globs or [])
+ if strip_resource_classes_for:
+ exclude_globs.extend(p.replace('.', '/') + '/' + f
+ for p in strip_resource_classes_for
+ for f in _RESOURCE_CLASSES)
+ def path_transform(path):
+ # Exclude filters take precidence over include filters.
+ if build_utils.MatchesGlob(path, exclude_globs):
+ return None
+ if include_globs and not build_utils.MatchesGlob(path, include_globs):
+ return None
+ return path
+
+ return path_transform
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--input', required=True,
+ help='Input zip file.')
+ parser.add_argument('--output', required=True,
+ help='Output zip file')
+ parser.add_argument('--exclude-globs',
+ help='GN list of exclude globs')
+ parser.add_argument('--include-globs',
+ help='GN list of include globs')
+ parser.add_argument('--strip-resource-classes-for',
+ help='GN list of java package names exclude R.class files in.')
+
+ argv = build_utils.ExpandFileArgs(sys.argv[1:])
+ args = parser.parse_args(argv)
+
+ if args.exclude_globs:
+ args.exclude_globs = build_utils.ParseGnList(args.exclude_globs)
+ if args.include_globs:
+ args.include_globs= build_utils.ParseGnList(args.include_globs)
+ if args.strip_resource_classes_for:
+ args.strip_resource_classes_for = build_utils.ParseGnList(
+ args.strip_resource_classes_for)
+
+ path_transform = _CreatePathTransform(
+ args.exclude_globs, args.include_globs, args.strip_resource_classes_for)
+ with build_utils.AtomicOutput(args.output) as f:
+ build_utils.MergeZips(
+ f.name, [args.input], path_transform=path_transform)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gyp/filter_zip.pydeps b/deps/v8/build/android/gyp/filter_zip.pydeps
new file mode 100644
index 0000000000..67c989cf88
--- /dev/null
+++ b/deps/v8/build/android/gyp/filter_zip.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/filter_zip.pydeps build/android/gyp/filter_zip.py
+../../gn_helpers.py
+filter_zip.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/finalize_apk.py b/deps/v8/build/android/gyp/finalize_apk.py
new file mode 100644
index 0000000000..2440fe40a1
--- /dev/null
+++ b/deps/v8/build/android/gyp/finalize_apk.py
@@ -0,0 +1,32 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Signs and aligns an APK."""
+
+import argparse
+import shutil
+import subprocess
+import tempfile
+
+
+def FinalizeApk(apksigner_path, zipalign_path, unsigned_apk_path,
+ final_apk_path, key_path, key_passwd, key_name):
+ # Use a tempfile so that Ctrl-C does not leave the file with a fresh mtime
+ # and a corrupted state.
+ with tempfile.NamedTemporaryFile() as staging_file:
+ # v2 signing requires that zipalign happen first.
+ subprocess.check_output([
+ zipalign_path, '-p', '-f', '4',
+ unsigned_apk_path, staging_file.name])
+ subprocess.check_output([
+ apksigner_path, 'sign',
+ '--in', staging_file.name,
+ '--out', staging_file.name,
+ '--ks', key_path,
+ '--ks-key-alias', key_name,
+ '--ks-pass', 'pass:' + key_passwd,
+ # Force SHA-1 (makes signing faster; insecure is fine for local builds).
+ '--min-sdk-version', '1',
+ ])
+ shutil.move(staging_file.name, final_apk_path)
+ staging_file.delete = False
diff --git a/deps/v8/build/android/gyp/find.py b/deps/v8/build/android/gyp/find.py
new file mode 100755
index 0000000000..a9f1d49855
--- /dev/null
+++ b/deps/v8/build/android/gyp/find.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Finds files in directories.
+"""
+
+import fnmatch
+import optparse
+import os
+import sys
+
+
+def main(argv):
+ parser = optparse.OptionParser()
+ parser.add_option('--pattern', default='*', help='File pattern to match.')
+ options, directories = parser.parse_args(argv)
+
+ for d in directories:
+ if not os.path.exists(d):
+ print >> sys.stderr, '%s does not exist' % d
+ return 1
+ for root, _, filenames in os.walk(d):
+ for f in fnmatch.filter(filenames, options.pattern):
+ print os.path.join(root, f)
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/find_sun_tools_jar.py b/deps/v8/build/android/gyp/find_sun_tools_jar.py
new file mode 100755
index 0000000000..7cd4c33984
--- /dev/null
+++ b/deps/v8/build/android/gyp/find_sun_tools_jar.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This finds the java distribution's tools.jar and copies it somewhere.
+"""
+
+import argparse
+import os
+import re
+import shutil
+import sys
+
+from util import build_utils
+
+RT_JAR_FINDER = re.compile(r'\[Opened (.*)/jre/lib/rt.jar\]')
+
+def main():
+ parser = argparse.ArgumentParser(description='Find Sun Tools Jar')
+ parser.add_argument('--depfile',
+ help='Path to depfile. This must be specified as the '
+ 'action\'s first output.')
+ parser.add_argument('--output', required=True)
+ args = parser.parse_args()
+
+ sun_tools_jar_path = FindSunToolsJarPath()
+
+ if sun_tools_jar_path is None:
+ raise Exception("Couldn\'t find tools.jar")
+
+ # Using copyfile instead of copy() because copy() calls copymode()
+ # We don't want the locked mode because we may copy over this file again
+ shutil.copyfile(sun_tools_jar_path, args.output)
+
+ if args.depfile:
+ build_utils.WriteDepfile(args.depfile, args.output, [sun_tools_jar_path])
+
+
+def FindSunToolsJarPath():
+ # This works with at least openjdk 1.6, 1.7 and sun java 1.6, 1.7
+ stdout = build_utils.CheckOutput(
+ ["java", "-verbose", "-version"], print_stderr=False)
+ for ln in stdout.splitlines():
+ match = RT_JAR_FINDER.match(ln)
+ if match:
+ return os.path.join(match.group(1), 'lib', 'tools.jar')
+
+ return None
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/gcc_preprocess.py b/deps/v8/build/android/gyp/gcc_preprocess.py
new file mode 100755
index 0000000000..8b3444c2b0
--- /dev/null
+++ b/deps/v8/build/android/gyp/gcc_preprocess.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+def DoGcc(options):
+ build_utils.MakeDirectory(os.path.dirname(options.output))
+
+ gcc_cmd = [ 'gcc' ] # invoke host gcc.
+ if options.defines:
+ gcc_cmd.extend(sum(map(lambda w: ['-D', w], options.defines), []))
+
+ with build_utils.AtomicOutput(options.output) as f:
+ gcc_cmd.extend([
+ '-E', # stop after preprocessing.
+ '-D', 'ANDROID', # Specify ANDROID define for pre-processor.
+ '-x', 'c-header', # treat sources as C header files
+ '-P', # disable line markers, i.e. '#line 309'
+ '-I', options.include_path,
+ '-o', f.name,
+ options.template
+ ])
+
+ build_utils.CheckOutput(gcc_cmd)
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_option('--include-path', help='Include path for gcc.')
+ parser.add_option('--template', help='Path to template.')
+ parser.add_option('--output', help='Path for generated file.')
+ parser.add_option('--defines', help='Pre-defines macros', action='append')
+
+ options, _ = parser.parse_args(args)
+
+ DoGcc(options)
+
+ if options.depfile:
+ build_utils.WriteDepfile(options.depfile, options.output, add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/gcc_preprocess.pydeps b/deps/v8/build/android/gyp/gcc_preprocess.pydeps
new file mode 100644
index 0000000000..64e776b633
--- /dev/null
+++ b/deps/v8/build/android/gyp/gcc_preprocess.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/gcc_preprocess.pydeps build/android/gyp/gcc_preprocess.py
+../../gn_helpers.py
+gcc_preprocess.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/generate_android_wrapper.py b/deps/v8/build/android/gyp/generate_android_wrapper.py
new file mode 100755
index 0000000000..f8e1815324
--- /dev/null
+++ b/deps/v8/build/android/gyp/generate_android_wrapper.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import sys
+
+from util import build_utils
+
+sys.path.append(
+ os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', '..', 'util')))
+
+import generate_wrapper
+
+_WRAPPED_PATH_LIST_RE = re.compile(r'@WrappedPathList\(([^,]+), ([^)]+)\)')
+
+
+def ExpandWrappedPathLists(args):
+ expanded_args = []
+ for arg in args:
+ m = _WRAPPED_PATH_LIST_RE.match(arg)
+ if m:
+ for p in build_utils.ParseGnList(m.group(2)):
+ expanded_args.extend([m.group(1), '@WrappedPath(%s)' % p])
+ else:
+ expanded_args.append(arg)
+ return expanded_args
+
+
+def main(raw_args):
+ parser = generate_wrapper.CreateArgumentParser()
+ expanded_raw_args = build_utils.ExpandFileArgs(raw_args)
+ expanded_raw_args = ExpandWrappedPathLists(expanded_raw_args)
+ args = parser.parse_args(expanded_raw_args)
+ return generate_wrapper.Wrap(args)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/generate_linker_version_script.py b/deps/v8/build/android/gyp/generate_linker_version_script.py
new file mode 100755
index 0000000000..34c72eb818
--- /dev/null
+++ b/deps/v8/build/android/gyp/generate_linker_version_script.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env vpython
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Generate linker version scripts for Chrome on Android shared libraries."""
+
+import argparse
+import os
+
+from util import build_utils
+
+_SCRIPT_HEADER = """\
+# AUTO-GENERATED FILE. DO NOT MODIFY.
+#
+# See: %s
+
+{
+ global:
+""" % os.path.relpath(__file__, build_utils.DIR_SOURCE_ROOT)
+
+_SCRIPT_FOOTER = """\
+ local:
+ *;
+};
+"""
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--output',
+ required=True,
+ help='Path to output linker version script file.')
+ parser.add_argument(
+ '--export-java-symbols',
+ action='store_true',
+ help='Export Java_* JNI methods')
+ parser.add_argument(
+ '--export-symbol-whitelist-file',
+ help='Path to input file containing whitelist of extra '
+ 'symbols to export. One symbol per line.')
+ options = parser.parse_args()
+
+ # JNI_OnLoad is always exported.
+ # CrashpadHandlerMain() is the entry point to the Crashpad handler, required
+ # for libcrashpad_handler_trampoline.so.
+ symbol_list = ['CrashpadHandlerMain', 'JNI_OnLoad']
+
+ if options.export_java_symbols:
+ symbol_list.append('Java_*')
+
+ if options.export_symbol_whitelist_file:
+ with open(options.export_symbol_whitelist_file, 'rt') as f:
+ for line in f:
+ line = line.strip()
+ if not line or line[0] == '#':
+ continue
+ symbol_list.append(line)
+
+ script_content = [_SCRIPT_HEADER]
+ for symbol in symbol_list:
+ script_content.append(' %s;\n' % symbol)
+ script_content.append(_SCRIPT_FOOTER)
+
+ script = ''.join(script_content)
+
+ with build_utils.AtomicOutput(options.output) as f:
+ f.write(script)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gyp/generate_linker_version_script.pydeps b/deps/v8/build/android/gyp/generate_linker_version_script.pydeps
new file mode 100644
index 0000000000..d1e3ad6181
--- /dev/null
+++ b/deps/v8/build/android/gyp/generate_linker_version_script.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/generate_linker_version_script.pydeps build/android/gyp/generate_linker_version_script.py
+../../gn_helpers.py
+generate_linker_version_script.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/generate_resource_rewriter.py b/deps/v8/build/android/gyp/generate_resource_rewriter.py
new file mode 100755
index 0000000000..ba635a293d
--- /dev/null
+++ b/deps/v8/build/android/gyp/generate_resource_rewriter.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate ResourceRewriter.java which overwrites the given package's
+ resource id.
+"""
+
+import argparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+# Import jinja2 from third_party/jinja2
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
+ '..',
+ '..',
+ '..',
+ 'third_party')))
+import jinja2
+
+
+RESOURCE_REWRITER_JAVA="ResourceRewriter.java"
+
+RESOURCE_REWRITER="""/* AUTO-GENERATED FILE. DO NOT MODIFY. */
+
+package {{ package }};
+/**
+ * Helper class used to fix up resource ids.
+ */
+class ResourceRewriter {
+ /**
+ * Rewrite the R 'constants' for the WebView.
+ */
+ public static void rewriteRValues(final int packageId) {
+ {% for res_package in res_packages %}
+ {{ res_package }}.R.onResourcesLoaded(packageId);
+ {% endfor %}
+ }
+}
+"""
+
+def ParseArgs(args):
+ """Parses command line options.
+
+ Returns:
+ An Namespace from argparse.parse_args()
+ """
+ parser = argparse.ArgumentParser(prog='generate_resource_rewriter')
+
+ parser.add_argument('--package-name',
+ required=True,
+ help='The package name of ResourceRewriter.')
+ parser.add_argument('--dep-packages',
+ required=True,
+ help='A list of packages whose resource id will be'
+ 'overwritten in ResourceRewriter.')
+ parser.add_argument('--output-dir',
+ help='A output directory of generated'
+ ' ResourceRewriter.java')
+ parser.add_argument('--srcjar',
+ help='The path of generated srcjar which has'
+ ' ResourceRewriter.java')
+
+ return parser.parse_args(args)
+
+
+def CreateResourceRewriter(package, res_packages, output_dir):
+ build_utils.MakeDirectory(output_dir)
+ java_path = os.path.join(output_dir, RESOURCE_REWRITER_JAVA)
+ template = jinja2.Template(RESOURCE_REWRITER,
+ trim_blocks=True,
+ lstrip_blocks=True)
+ output = template.render(package=package, res_packages=res_packages)
+ with open(java_path, 'w') as f:
+ f.write(output)
+
+def CreateResourceRewriterSrcjar(package, res_packages, srcjar_path):
+ with build_utils.TempDir() as temp_dir:
+ output_dir = os.path.join(temp_dir, *package.split('.'))
+ CreateResourceRewriter(package, res_packages, output_dir)
+ build_utils.DoZip([os.path.join(output_dir, RESOURCE_REWRITER_JAVA)],
+ srcjar_path,
+ temp_dir)
+
+
+def main():
+ options = ParseArgs(build_utils.ExpandFileArgs(sys.argv[1:]))
+ package = options.package_name
+ if options.output_dir:
+ output_dir = os.path.join(options.output_dir, *package.split('.'))
+ CreateResourceRewriter(
+ package,
+ build_utils.ParseGnList(options.dep_packages),
+ output_dir)
+ else:
+ CreateResourceRewriterSrcjar(
+ package,
+ build_utils.ParseGnList(options.dep_packages),
+ options.srcjar)
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/generate_v14_compatible_resources.py b/deps/v8/build/android/gyp/generate_v14_compatible_resources.py
new file mode 100755
index 0000000000..f9e8a3783a
--- /dev/null
+++ b/deps/v8/build/android/gyp/generate_v14_compatible_resources.py
@@ -0,0 +1,281 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Convert Android xml resources to API 14 compatible.
+
+There are two reasons that we cannot just use API 17 attributes,
+so we are generating another set of resources by this script.
+
+1. paddingStart attribute can cause a crash on Galaxy Tab 2.
+2. There is a bug that paddingStart does not override paddingLeft on
+ JB-MR1. This is fixed on JB-MR2. b/8654490
+
+Therefore, this resource generation script can be removed when
+we drop the support for JB-MR1.
+
+Please refer to http://crbug.com/235118 for the details.
+"""
+
+import codecs
+import os
+import re
+import shutil
+import sys
+import xml.dom.minidom as minidom
+
+from util import build_utils
+
+# Note that we are assuming 'android:' is an alias of
+# the namespace 'http://schemas.android.com/apk/res/android'.
+
+GRAVITY_ATTRIBUTES = ('android:gravity', 'android:layout_gravity')
+
+# Almost all the attributes that has "Start" or "End" in
+# its name should be mapped.
+ATTRIBUTES_TO_MAP = {'paddingStart' : 'paddingLeft',
+ 'drawableStart' : 'drawableLeft',
+ 'layout_alignStart' : 'layout_alignLeft',
+ 'layout_marginStart' : 'layout_marginLeft',
+ 'layout_alignParentStart' : 'layout_alignParentLeft',
+ 'layout_toStartOf' : 'layout_toLeftOf',
+ 'paddingEnd' : 'paddingRight',
+ 'drawableEnd' : 'drawableRight',
+ 'layout_alignEnd' : 'layout_alignRight',
+ 'layout_marginEnd' : 'layout_marginRight',
+ 'layout_alignParentEnd' : 'layout_alignParentRight',
+ 'layout_toEndOf' : 'layout_toRightOf'}
+
+ATTRIBUTES_TO_MAP = dict(['android:' + k, 'android:' + v] for k, v
+ in ATTRIBUTES_TO_MAP.iteritems())
+
+ATTRIBUTES_TO_MAP_REVERSED = dict([v, k] for k, v
+ in ATTRIBUTES_TO_MAP.iteritems())
+
+
+def IterateXmlElements(node):
+ """minidom helper function that iterates all the element nodes.
+ Iteration order is pre-order depth-first."""
+ if node.nodeType == node.ELEMENT_NODE:
+ yield node
+ for child_node in node.childNodes:
+ for child_node_element in IterateXmlElements(child_node):
+ yield child_node_element
+
+
+def ParseAndReportErrors(filename):
+ try:
+ return minidom.parse(filename)
+ except Exception: # pylint: disable=broad-except
+ import traceback
+ traceback.print_exc()
+ sys.stderr.write('Failed to parse XML file: %s\n' % filename)
+ sys.exit(1)
+
+
+def AssertNotDeprecatedAttribute(name, value, filename):
+ """Raises an exception if the given attribute is deprecated."""
+ msg = None
+ if name in ATTRIBUTES_TO_MAP_REVERSED:
+ msg = '{0} should use {1} instead of {2}'.format(filename,
+ ATTRIBUTES_TO_MAP_REVERSED[name], name)
+ elif name in GRAVITY_ATTRIBUTES and ('left' in value or 'right' in value):
+ msg = '{0} should use start/end instead of left/right for {1}'.format(
+ filename, name)
+
+ if msg:
+ msg += ('\nFor background, see: http://android-developers.blogspot.com/'
+ '2013/03/native-rtl-support-in-android-42.html\n'
+ 'If you have a legitimate need for this attribute, discuss with '
+ 'kkimlabs@chromium.org or newt@chromium.org')
+ raise Exception(msg)
+
+
+def WriteDomToFile(dom, filename):
+ """Write the given dom to filename."""
+ build_utils.MakeDirectory(os.path.dirname(filename))
+ with codecs.open(filename, 'w', 'utf-8') as f:
+ dom.writexml(f, '', ' ', '\n', encoding='utf-8')
+
+
+def HasStyleResource(dom):
+ """Return True if the dom is a style resource, False otherwise."""
+ root_node = IterateXmlElements(dom).next()
+ return bool(root_node.nodeName == 'resources' and
+ list(root_node.getElementsByTagName('style')))
+
+
+def ErrorIfStyleResourceExistsInDir(input_dir):
+ """If a style resource is in input_dir, raises an exception."""
+ for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+ dom = ParseAndReportErrors(input_filename)
+ if HasStyleResource(dom):
+ # Allow style file in third_party to exist in non-v17 directories so long
+ # as they do not contain deprecated attributes.
+ if not 'third_party' in input_dir or (
+ GenerateV14StyleResourceDom(dom, input_filename)):
+ raise Exception('error: style file ' + input_filename +
+ ' should be under ' + input_dir +
+ '-v17 directory. Please refer to '
+ 'http://crbug.com/243952 for the details.')
+
+
+def GenerateV14LayoutResourceDom(dom, filename, assert_not_deprecated=True):
+ """Convert layout resource to API 14 compatible layout resource.
+
+ Args:
+ dom: Parsed minidom object to be modified.
+ filename: Filename that the DOM was parsed from.
+ assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
+ cause an exception to be thrown.
+
+ Returns:
+ True if dom is modified, False otherwise.
+ """
+ is_modified = False
+
+ # Iterate all the elements' attributes to find attributes to convert.
+ for element in IterateXmlElements(dom):
+ for name, value in list(element.attributes.items()):
+ # Convert any API 17 Start/End attributes to Left/Right attributes.
+ # For example, from paddingStart="10dp" to paddingLeft="10dp"
+ # Note: gravity attributes are not necessary to convert because
+ # start/end values are backward-compatible. Explained at
+ # https://plus.sandbox.google.com/+RomanNurik/posts/huuJd8iVVXY?e=Showroom
+ if name in ATTRIBUTES_TO_MAP:
+ element.setAttribute(ATTRIBUTES_TO_MAP[name], value)
+ del element.attributes[name]
+ is_modified = True
+ elif assert_not_deprecated:
+ AssertNotDeprecatedAttribute(name, value, filename)
+
+ return is_modified
+
+
+def GenerateV14StyleResourceDom(dom, filename, assert_not_deprecated=True):
+ """Convert style resource to API 14 compatible style resource.
+
+ Args:
+ dom: Parsed minidom object to be modified.
+ filename: Filename that the DOM was parsed from.
+ assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
+ cause an exception to be thrown.
+
+ Returns:
+ True if dom is modified, False otherwise.
+ """
+ is_modified = False
+
+ for style_element in dom.getElementsByTagName('style'):
+ for item_element in style_element.getElementsByTagName('item'):
+ name = item_element.attributes['name'].value
+ value = item_element.childNodes[0].nodeValue
+ if name in ATTRIBUTES_TO_MAP:
+ item_element.attributes['name'].value = ATTRIBUTES_TO_MAP[name]
+ is_modified = True
+ elif assert_not_deprecated:
+ AssertNotDeprecatedAttribute(name, value, filename)
+
+ return is_modified
+
+
+def GenerateV14LayoutResource(input_filename, output_v14_filename,
+ output_v17_filename):
+ """Convert API 17 layout resource to API 14 compatible layout resource.
+
+ It's mostly a simple replacement, s/Start/Left s/End/Right,
+ on the attribute names.
+ If the generated resource is identical to the original resource,
+ don't do anything. If not, write the generated resource to
+ output_v14_filename, and copy the original resource to output_v17_filename.
+ """
+ dom = ParseAndReportErrors(input_filename)
+ is_modified = GenerateV14LayoutResourceDom(dom, input_filename)
+
+ if is_modified:
+ # Write the generated resource.
+ WriteDomToFile(dom, output_v14_filename)
+
+ # Copy the original resource.
+ build_utils.MakeDirectory(os.path.dirname(output_v17_filename))
+ shutil.copy2(input_filename, output_v17_filename)
+
+
+def GenerateV14StyleResource(input_filename, output_v14_filename):
+ """Convert API 17 style resources to API 14 compatible style resource.
+
+ Write the generated style resource to output_v14_filename.
+ It's mostly a simple replacement, s/Start/Left s/End/Right,
+ on the attribute names.
+ """
+ dom = ParseAndReportErrors(input_filename)
+ GenerateV14StyleResourceDom(dom, input_filename)
+
+ # Write the generated resource.
+ WriteDomToFile(dom, output_v14_filename)
+
+
+def GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir, output_v17_dir):
+ """Convert layout resources to API 14 compatible resources in input_dir."""
+ for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+ rel_filename = os.path.relpath(input_filename, input_dir)
+ output_v14_filename = os.path.join(output_v14_dir, rel_filename)
+ output_v17_filename = os.path.join(output_v17_dir, rel_filename)
+ GenerateV14LayoutResource(input_filename, output_v14_filename,
+ output_v17_filename)
+
+
+def GenerateV14StyleResourcesInDir(input_dir, output_v14_dir):
+ """Convert style resources to API 14 compatible resources in input_dir."""
+ for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+ rel_filename = os.path.relpath(input_filename, input_dir)
+ output_v14_filename = os.path.join(output_v14_dir, rel_filename)
+ GenerateV14StyleResource(input_filename, output_v14_filename)
+
+
+def GenerateV14Resources(res_dir, res_v14_dir):
+ for name in os.listdir(res_dir):
+ if not os.path.isdir(os.path.join(res_dir, name)):
+ continue
+
+ dir_pieces = name.split('-')
+ resource_type = dir_pieces[0]
+ qualifiers = dir_pieces[1:]
+
+ api_level_qualifier_index = -1
+ api_level_qualifier = ''
+ for index, qualifier in enumerate(qualifiers):
+ if re.match('v[0-9]+$', qualifier):
+ api_level_qualifier_index = index
+ api_level_qualifier = qualifier
+ break
+
+ # Android pre-v17 API doesn't support RTL. Skip.
+ if 'ldrtl' in qualifiers:
+ continue
+
+ input_dir = os.path.abspath(os.path.join(res_dir, name))
+
+ # We also need to copy the original v17 resource to *-v17 directory
+ # because the generated v14 resource will hide the original resource.
+ output_v14_dir = os.path.join(res_v14_dir, name)
+ output_v17_dir = os.path.join(res_v14_dir, name + '-v17')
+
+ # We only convert layout resources under layout*/, xml*/,
+ # and style resources under values*/.
+ if resource_type in ('layout', 'xml'):
+ if not api_level_qualifier:
+ GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir,
+ output_v17_dir)
+ elif resource_type == 'values':
+ if api_level_qualifier == 'v17':
+ output_qualifiers = qualifiers[:]
+ del output_qualifiers[api_level_qualifier_index]
+ output_v14_dir = os.path.join(res_v14_dir,
+ '-'.join([resource_type] +
+ output_qualifiers))
+ GenerateV14StyleResourcesInDir(input_dir, output_v14_dir)
+ elif not api_level_qualifier:
+ ErrorIfStyleResourceExistsInDir(input_dir) \ No newline at end of file
diff --git a/deps/v8/build/android/gyp/ijar.py b/deps/v8/build/android/gyp/ijar.py
new file mode 100755
index 0000000000..89108087ed
--- /dev/null
+++ b/deps/v8/build/android/gyp/ijar.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import subprocess
+import sys
+
+from util import build_utils
+
+
+def main():
+ # The point of this wrapper is to use AtomicOutput so that output timestamps
+ # are not updated when outputs are unchanged.
+ ijar_bin, in_jar, out_jar = sys.argv[1:]
+ with build_utils.AtomicOutput(out_jar) as f:
+ subprocess.check_call([ijar_bin, in_jar, f.name])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gyp/ijar.pydeps b/deps/v8/build/android/gyp/ijar.pydeps
new file mode 100644
index 0000000000..ca10697c1f
--- /dev/null
+++ b/deps/v8/build/android/gyp/ijar.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/ijar.pydeps build/android/gyp/ijar.py
+../../gn_helpers.py
+ijar.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/jar.py b/deps/v8/build/android/gyp/jar.py
new file mode 100755
index 0000000000..7f2c9f0b21
--- /dev/null
+++ b/deps/v8/build/android/gyp/jar.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+
+def Jar(class_files,
+ classes_dir,
+ jar_path,
+ provider_configurations=None,
+ additional_files=None):
+ files = [(os.path.relpath(f, classes_dir), f) for f in class_files]
+
+ if additional_files:
+ for filepath, jar_filepath in additional_files:
+ files.append((jar_filepath, filepath))
+
+ if provider_configurations:
+ for config in provider_configurations:
+ files.append(('META-INF/services/' + os.path.basename(config), config))
+
+ # Zeros out timestamps so that builds are hermetic.
+ with build_utils.AtomicOutput(jar_path) as f:
+ build_utils.DoZip(files, f)
+
+
+def JarDirectory(classes_dir,
+ jar_path,
+ predicate=None,
+ provider_configurations=None,
+ additional_files=None):
+ all_files = build_utils.FindInDirectory(classes_dir, '*')
+ if predicate:
+ all_files = [
+ f for f in all_files if predicate(os.path.relpath(f, classes_dir))]
+ all_files.sort()
+
+ Jar(all_files,
+ classes_dir,
+ jar_path,
+ provider_configurations=provider_configurations,
+ additional_files=additional_files)
+
+
+def _CreateFilterPredicate(excluded_classes, included_classes):
+ if not excluded_classes and not included_classes:
+ return None
+
+ def predicate(f):
+ # Exclude filters take precidence over include filters.
+ if build_utils.MatchesGlob(f, excluded_classes):
+ return False
+ if included_classes and not build_utils.MatchesGlob(f, included_classes):
+ return False
+ return True
+
+ return predicate
+
+
+# TODO(agrieve): Change components/cronet/android/BUILD.gn to use filter_zip.py
+# and delete main().
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option('--classes-dir', help='Directory containing .class files.')
+ parser.add_option('--jar-path', help='Jar output path.')
+ parser.add_option('--excluded-classes',
+ help='GN list of .class file patterns to exclude from the jar.')
+ parser.add_option('--included-classes',
+ help='GN list of .class file patterns to include in the jar.')
+
+ args = build_utils.ExpandFileArgs(sys.argv[1:])
+ options, _ = parser.parse_args(args)
+
+ excluded_classes = []
+ if options.excluded_classes:
+ excluded_classes = build_utils.ParseGnList(options.excluded_classes)
+ included_classes = []
+ if options.included_classes:
+ included_classes = build_utils.ParseGnList(options.included_classes)
+
+ predicate = _CreateFilterPredicate(excluded_classes, included_classes)
+ JarDirectory(options.classes_dir, options.jar_path, predicate=predicate)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/java_cpp_enum.py b/deps/v8/build/android/gyp/java_cpp_enum.py
new file mode 100755
index 0000000000..bacc8e3d46
--- /dev/null
+++ b/deps/v8/build/android/gyp/java_cpp_enum.py
@@ -0,0 +1,435 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+from datetime import date
+import re
+import optparse
+import os
+from string import Template
+import sys
+import textwrap
+import zipfile
+
+from util import build_utils
+from util import java_cpp_utils
+
+# List of C++ types that are compatible with the Java code generated by this
+# script.
+#
+# This script can parse .idl files however, at present it ignores special
+# rules such as [cpp_enum_prefix_override="ax_attr"].
+ENUM_FIXED_TYPE_WHITELIST = ['char', 'unsigned char',
+ 'short', 'unsigned short',
+ 'int', 'int8_t', 'int16_t', 'int32_t', 'uint8_t', 'uint16_t']
+
+class EnumDefinition(object):
+ def __init__(self, original_enum_name=None, class_name_override=None,
+ enum_package=None, entries=None, comments=None, fixed_type=None):
+ self.original_enum_name = original_enum_name
+ self.class_name_override = class_name_override
+ self.enum_package = enum_package
+ self.entries = collections.OrderedDict(entries or [])
+ self.comments = collections.OrderedDict(comments or [])
+ self.prefix_to_strip = None
+ self.fixed_type = fixed_type
+
+ def AppendEntry(self, key, value):
+ if key in self.entries:
+ raise Exception('Multiple definitions of key %s found.' % key)
+ self.entries[key] = value
+
+ def AppendEntryComment(self, key, value):
+ if key in self.comments:
+ raise Exception('Multiple definitions of key %s found.' % key)
+ self.comments[key] = value
+
+ @property
+ def class_name(self):
+ return self.class_name_override or self.original_enum_name
+
+ def Finalize(self):
+ self._Validate()
+ self._AssignEntryIndices()
+ self._StripPrefix()
+ self._NormalizeNames()
+
+ def _Validate(self):
+ assert self.class_name
+ assert self.enum_package
+ assert self.entries
+ if self.fixed_type and self.fixed_type not in ENUM_FIXED_TYPE_WHITELIST:
+ raise Exception('Fixed type %s for enum %s not whitelisted.' %
+ (self.fixed_type, self.class_name))
+
+ def _AssignEntryIndices(self):
+ # Enums, if given no value, are given the value of the previous enum + 1.
+ if not all(self.entries.values()):
+ prev_enum_value = -1
+ for key, value in self.entries.iteritems():
+ if not value:
+ self.entries[key] = prev_enum_value + 1
+ elif value in self.entries:
+ self.entries[key] = self.entries[value]
+ else:
+ try:
+ self.entries[key] = int(value)
+ except ValueError:
+ raise Exception('Could not interpret integer from enum value "%s" '
+ 'for key %s.' % (value, key))
+ prev_enum_value = self.entries[key]
+
+
+ def _StripPrefix(self):
+ prefix_to_strip = self.prefix_to_strip
+ if not prefix_to_strip:
+ shout_case = self.original_enum_name
+ shout_case = re.sub('(?!^)([A-Z]+)', r'_\1', shout_case).upper()
+ shout_case += '_'
+
+ prefixes = [shout_case, self.original_enum_name,
+ 'k' + self.original_enum_name]
+
+ for prefix in prefixes:
+ if all([w.startswith(prefix) for w in self.entries.keys()]):
+ prefix_to_strip = prefix
+ break
+ else:
+ prefix_to_strip = ''
+
+ def StripEntries(entries):
+ ret = collections.OrderedDict()
+ for k, v in entries.iteritems():
+ stripped_key = k.replace(prefix_to_strip, '', 1)
+ if isinstance(v, basestring):
+ stripped_value = v.replace(prefix_to_strip, '')
+ else:
+ stripped_value = v
+ ret[stripped_key] = stripped_value
+
+ return ret
+
+ self.entries = StripEntries(self.entries)
+ self.comments = StripEntries(self.comments)
+
+ def _NormalizeNames(self):
+ self.entries = _TransformKeys(self.entries, java_cpp_utils.KCamelToShouty)
+ self.comments = _TransformKeys(self.comments, java_cpp_utils.KCamelToShouty)
+
+
+def _TransformKeys(d, func):
+ """Normalize keys in |d| and update references to old keys in |d| values."""
+ normal_keys = {k: func(k) for k in d}
+ ret = collections.OrderedDict()
+ for k, v in d.iteritems():
+ # Need to transform values as well when the entry value was explicitly set
+ # (since it could contain references to other enum entry values).
+ if isinstance(v, basestring):
+ for normal_key in normal_keys:
+ v = v.replace(normal_key, normal_keys[normal_key])
+ ret[normal_keys[k]] = v
+ return ret
+
+
+class DirectiveSet(object):
+ class_name_override_key = 'CLASS_NAME_OVERRIDE'
+ enum_package_key = 'ENUM_PACKAGE'
+ prefix_to_strip_key = 'PREFIX_TO_STRIP'
+
+ known_keys = [class_name_override_key, enum_package_key, prefix_to_strip_key]
+
+ def __init__(self):
+ self._directives = {}
+
+ def Update(self, key, value):
+ if key not in DirectiveSet.known_keys:
+ raise Exception("Unknown directive: " + key)
+ self._directives[key] = value
+
+ @property
+ def empty(self):
+ return len(self._directives) == 0
+
+ def UpdateDefinition(self, definition):
+ definition.class_name_override = self._directives.get(
+ DirectiveSet.class_name_override_key, '')
+ definition.enum_package = self._directives.get(
+ DirectiveSet.enum_package_key)
+ definition.prefix_to_strip = self._directives.get(
+ DirectiveSet.prefix_to_strip_key)
+
+
+class HeaderParser(object):
+ single_line_comment_re = re.compile(r'\s*//\s*([^\n]*)')
+ multi_line_comment_start_re = re.compile(r'\s*/\*')
+ enum_line_re = re.compile(r'^\s*(\w+)(\s*\=\s*([^,\n]+))?,?')
+ enum_end_re = re.compile(r'^\s*}\s*;\.*$')
+ generator_error_re = re.compile(r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*$')
+ generator_directive_re = re.compile(
+ r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*([\.\w]+)$')
+ multi_line_generator_directive_start_re = re.compile(
+ r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*\(([\.\w]*)$')
+ multi_line_directive_continuation_re = re.compile(r'^\s*//\s+([\.\w]+)$')
+ multi_line_directive_end_re = re.compile(r'^\s*//\s+([\.\w]*)\)$')
+
+ optional_class_or_struct_re = r'(class|struct)?'
+ enum_name_re = r'(\w+)'
+ optional_fixed_type_re = r'(\:\s*(\w+\s*\w+?))?'
+ enum_start_re = re.compile(r'^\s*(?:\[cpp.*\])?\s*enum\s+' +
+ optional_class_or_struct_re + '\s*' + enum_name_re + '\s*' +
+ optional_fixed_type_re + '\s*{\s*')
+ enum_single_line_re = re.compile(
+ r'^\s*(?:\[cpp.*\])?\s*enum.*{(?P<enum_entries>.*)}.*$')
+
+ def __init__(self, lines, path=''):
+ self._lines = lines
+ self._path = path
+ self._enum_definitions = []
+ self._in_enum = False
+ self._current_definition = None
+ self._current_comments = []
+ self._generator_directives = DirectiveSet()
+ self._multi_line_generator_directive = None
+ self._current_enum_entry = ''
+
+ def _ApplyGeneratorDirectives(self):
+ self._generator_directives.UpdateDefinition(self._current_definition)
+ self._generator_directives = DirectiveSet()
+
+ def ParseDefinitions(self):
+ for line in self._lines:
+ self._ParseLine(line)
+ return self._enum_definitions
+
+ def _ParseLine(self, line):
+ if self._multi_line_generator_directive:
+ self._ParseMultiLineDirectiveLine(line)
+ elif not self._in_enum:
+ self._ParseRegularLine(line)
+ else:
+ self._ParseEnumLine(line)
+
+ def _ParseEnumLine(self, line):
+ if HeaderParser.multi_line_comment_start_re.match(line):
+ raise Exception('Multi-line comments in enums are not supported in ' +
+ self._path)
+
+ enum_comment = HeaderParser.single_line_comment_re.match(line)
+ if enum_comment:
+ comment = enum_comment.groups()[0]
+ if comment:
+ self._current_comments.append(comment)
+ elif HeaderParser.enum_end_re.match(line):
+ self._FinalizeCurrentEnumDefinition()
+ else:
+ self._AddToCurrentEnumEntry(line)
+ if ',' in line:
+ self._ParseCurrentEnumEntry()
+
+ def _ParseSingleLineEnum(self, line):
+ for entry in line.split(','):
+ self._AddToCurrentEnumEntry(entry)
+ self._ParseCurrentEnumEntry()
+
+ self._FinalizeCurrentEnumDefinition()
+
+ def _ParseCurrentEnumEntry(self):
+ if not self._current_enum_entry:
+ return
+
+ enum_entry = HeaderParser.enum_line_re.match(self._current_enum_entry)
+ if not enum_entry:
+ raise Exception('Unexpected error while attempting to parse %s as enum '
+ 'entry.' % self._current_enum_entry)
+
+ enum_key = enum_entry.groups()[0]
+ enum_value = enum_entry.groups()[2]
+ self._current_definition.AppendEntry(enum_key, enum_value)
+ if self._current_comments:
+ self._current_definition.AppendEntryComment(
+ enum_key, ' '.join(self._current_comments))
+ self._current_comments = []
+ self._current_enum_entry = ''
+
+ def _AddToCurrentEnumEntry(self, line):
+ self._current_enum_entry += ' ' + line.strip()
+
+ def _FinalizeCurrentEnumDefinition(self):
+ if self._current_enum_entry:
+ self._ParseCurrentEnumEntry()
+ self._ApplyGeneratorDirectives()
+ self._current_definition.Finalize()
+ self._enum_definitions.append(self._current_definition)
+ self._current_definition = None
+ self._in_enum = False
+
+ def _ParseMultiLineDirectiveLine(self, line):
+ multi_line_directive_continuation = (
+ HeaderParser.multi_line_directive_continuation_re.match(line))
+ multi_line_directive_end = (
+ HeaderParser.multi_line_directive_end_re.match(line))
+
+ if multi_line_directive_continuation:
+ value_cont = multi_line_directive_continuation.groups()[0]
+ self._multi_line_generator_directive[1].append(value_cont)
+ elif multi_line_directive_end:
+ directive_name = self._multi_line_generator_directive[0]
+ directive_value = "".join(self._multi_line_generator_directive[1])
+ directive_value += multi_line_directive_end.groups()[0]
+ self._multi_line_generator_directive = None
+ self._generator_directives.Update(directive_name, directive_value)
+ else:
+ raise Exception('Malformed multi-line directive declaration in ' +
+ self._path)
+
+ def _ParseRegularLine(self, line):
+ enum_start = HeaderParser.enum_start_re.match(line)
+ generator_directive_error = HeaderParser.generator_error_re.match(line)
+ generator_directive = HeaderParser.generator_directive_re.match(line)
+ multi_line_generator_directive_start = (
+ HeaderParser.multi_line_generator_directive_start_re.match(line))
+ single_line_enum = HeaderParser.enum_single_line_re.match(line)
+
+ if generator_directive_error:
+ raise Exception('Malformed directive declaration in ' + self._path +
+ '. Use () for multi-line directives. E.g.\n' +
+ '// GENERATED_JAVA_ENUM_PACKAGE: (\n' +
+ '// foo.package)')
+ elif generator_directive:
+ directive_name = generator_directive.groups()[0]
+ directive_value = generator_directive.groups()[1]
+ self._generator_directives.Update(directive_name, directive_value)
+ elif multi_line_generator_directive_start:
+ directive_name = multi_line_generator_directive_start.groups()[0]
+ directive_value = multi_line_generator_directive_start.groups()[1]
+ self._multi_line_generator_directive = (directive_name, [directive_value])
+ elif enum_start or single_line_enum:
+ if self._generator_directives.empty:
+ return
+ self._current_definition = EnumDefinition(
+ original_enum_name=enum_start.groups()[1],
+ fixed_type=enum_start.groups()[3])
+ self._in_enum = True
+ if single_line_enum:
+ self._ParseSingleLineEnum(single_line_enum.group('enum_entries'))
+
+
+def DoGenerate(source_paths):
+ for source_path in source_paths:
+ enum_definitions = DoParseHeaderFile(source_path)
+ if not enum_definitions:
+ raise Exception('No enums found in %s\n'
+ 'Did you forget prefixing enums with '
+ '"// GENERATED_JAVA_ENUM_PACKAGE: foo"?' %
+ source_path)
+ for enum_definition in enum_definitions:
+ package_path = enum_definition.enum_package.replace('.', os.path.sep)
+ file_name = enum_definition.class_name + '.java'
+ output_path = os.path.join(package_path, file_name)
+ output = GenerateOutput(source_path, enum_definition)
+ yield output_path, output
+
+
+def DoParseHeaderFile(path):
+ with open(path) as f:
+ return HeaderParser(f.readlines(), path).ParseDefinitions()
+
+
+def GenerateOutput(source_path, enum_definition):
+ template = Template("""
+// Copyright ${YEAR} The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+// ${SCRIPT_NAME}
+// From
+// ${SOURCE_PATH}
+
+package ${PACKAGE};
+
+import android.support.annotation.IntDef;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@IntDef({
+${INT_DEF}
+})
+@Retention(RetentionPolicy.SOURCE)
+public @interface ${CLASS_NAME} {
+${ENUM_ENTRIES}
+}
+""")
+
+ enum_template = Template(' int ${NAME} = ${VALUE};')
+ enum_entries_string = []
+ enum_names = []
+ for enum_name, enum_value in enum_definition.entries.iteritems():
+ values = {
+ 'NAME': enum_name,
+ 'VALUE': enum_value,
+ }
+ enum_comments = enum_definition.comments.get(enum_name)
+ if enum_comments:
+ enum_comments_indent = ' * '
+ comments_line_wrapper = textwrap.TextWrapper(
+ initial_indent=enum_comments_indent,
+ subsequent_indent=enum_comments_indent,
+ width=100)
+ enum_entries_string.append(' /**')
+ enum_entries_string.append('\n'.join(
+ comments_line_wrapper.wrap(enum_comments)))
+ enum_entries_string.append(' */')
+ enum_entries_string.append(enum_template.substitute(values))
+ if enum_name != "NUM_ENTRIES":
+ enum_names.append(enum_definition.class_name + '.' + enum_name)
+ enum_entries_string = '\n'.join(enum_entries_string)
+
+ enum_names_indent = ' ' * 4
+ wrapper = textwrap.TextWrapper(initial_indent = enum_names_indent,
+ subsequent_indent = enum_names_indent,
+ width = 100)
+ enum_names_string = '\n'.join(wrapper.wrap(', '.join(enum_names)))
+
+ values = {
+ 'CLASS_NAME': enum_definition.class_name,
+ 'ENUM_ENTRIES': enum_entries_string,
+ 'PACKAGE': enum_definition.enum_package,
+ 'INT_DEF': enum_names_string,
+ 'SCRIPT_NAME': java_cpp_utils.GetScriptName(),
+ 'SOURCE_PATH': source_path,
+ 'YEAR': str(date.today().year)
+ }
+ return template.substitute(values)
+
+
+def DoMain(argv):
+ usage = 'usage: %prog [options] [output_dir] input_file(s)...'
+ parser = optparse.OptionParser(usage=usage)
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_option('--srcjar',
+ help='When specified, a .srcjar at the given path is '
+ 'created instead of individual .java files.')
+
+ options, args = parser.parse_args(argv)
+
+ if not args:
+ parser.error('Need to specify at least one input file')
+ input_paths = args
+
+ with build_utils.AtomicOutput(options.srcjar) as f:
+ with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
+ for output_path, data in DoGenerate(input_paths):
+ build_utils.AddToZipHermetic(srcjar, output_path, data=data)
+
+ if options.depfile:
+ build_utils.WriteDepfile(options.depfile, options.srcjar, add_pydeps=False)
+
+
+if __name__ == '__main__':
+ DoMain(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/java_cpp_enum.pydeps b/deps/v8/build/android/gyp/java_cpp_enum.pydeps
new file mode 100644
index 0000000000..d5869edddf
--- /dev/null
+++ b/deps/v8/build/android/gyp/java_cpp_enum.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_enum.pydeps build/android/gyp/java_cpp_enum.py
+../../gn_helpers.py
+java_cpp_enum.py
+util/__init__.py
+util/build_utils.py
+util/java_cpp_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/java_cpp_enum_tests.py b/deps/v8/build/android/gyp/java_cpp_enum_tests.py
new file mode 100755
index 0000000000..5717047c7a
--- /dev/null
+++ b/deps/v8/build/android/gyp/java_cpp_enum_tests.py
@@ -0,0 +1,747 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for enum_preprocess.py.
+
+This test suite contains various tests for the C++ -> Java enum generator.
+"""
+
+import collections
+from datetime import date
+import unittest
+
+import java_cpp_enum
+from java_cpp_enum import EnumDefinition, GenerateOutput
+from java_cpp_enum import HeaderParser
+from util import java_cpp_utils
+
+
+class TestPreprocess(unittest.TestCase):
+ def testOutput(self):
+ definition = EnumDefinition(original_enum_name='ClassName',
+ enum_package='some.package',
+ entries=[('E1', 1), ('E2', '2 << 2')],
+ comments=[('E2', 'This is a comment.'),
+ ('E1', 'This is a multiple line '
+ 'comment that is really long. '
+ 'This is a multiple line '
+ 'comment that is really '
+ 'really long.')])
+ output = GenerateOutput('path/to/file', definition)
+ expected = """
+// Copyright %d The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+// %s
+// From
+// path/to/file
+
+package some.package;
+
+import android.support.annotation.IntDef;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@IntDef({
+ ClassName.E1, ClassName.E2
+})
+@Retention(RetentionPolicy.SOURCE)
+public @interface ClassName {
+ /**
+ * %s
+ * really really long.
+ */
+ int E1 = 1;
+ /**
+ * This is a comment.
+ */
+ int E2 = 2 << 2;
+}
+"""
+ long_comment = ('This is a multiple line comment that is really long. '
+ 'This is a multiple line comment that is')
+ self.assertEqual(
+ expected % (date.today().year, java_cpp_utils.GetScriptName(),
+ long_comment), output)
+
+ def testParseSimpleEnum(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumName {
+ VALUE_ZERO,
+ VALUE_ONE,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('EnumName', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('VALUE_ZERO', 0),
+ ('VALUE_ONE', 1)]),
+ definition.entries)
+
+ def testParseBitShifts(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumName {
+ VALUE_ZERO = 1 << 0,
+ VALUE_ONE = 1 << 1,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumName {
+ ENUM_NAME_ZERO = 1 << 0,
+ ENUM_NAME_ONE = 1 << 1,
+ ENUM_NAME_TWO = ENUM_NAME_ZERO | ENUM_NAME_ONE,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(2, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('EnumName', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('VALUE_ZERO', '1 << 0'),
+ ('VALUE_ONE', '1 << 1')]),
+ definition.entries)
+
+ definition = definitions[1]
+ expected_entries = collections.OrderedDict([
+ ('ZERO', '1 << 0'),
+ ('ONE', '1 << 1'),
+ ('TWO', 'ZERO | ONE')])
+ self.assertEqual(expected_entries, definition.entries)
+
+ def testParseMultilineEnumEntry(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace
+ enum Foo {
+ VALUE_ZERO = 1 << 0,
+ VALUE_ONE =
+ SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | ControlKey,
+ VALUE_TWO = 1 << 18,
+ };
+ """.split('\n')
+ expected_entries = collections.OrderedDict([
+ ('VALUE_ZERO', '1 << 0'),
+ ('VALUE_ONE', 'SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | '
+ 'ControlKey'),
+ ('VALUE_TWO', '1 << 18')])
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('bar.namespace', definition.enum_package)
+ self.assertEqual(expected_entries, definition.entries)
+
+ def testParseEnumEntryWithTrailingMultilineEntry(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace
+ enum Foo {
+ VALUE_ZERO = 1,
+ VALUE_ONE =
+ SymbolKey | FnKey | AltGrKey | MetaKey |
+ AltKey | ControlKey | ShiftKey,
+ };
+ """.split('\n')
+ expected_entries = collections.OrderedDict([
+ ('VALUE_ZERO', '1'),
+ ('VALUE_ONE', 'SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | '
+ 'ControlKey | ShiftKey')])
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('bar.namespace', definition.enum_package)
+ self.assertEqual(expected_entries, definition.entries)
+
+ def testParseNoCommaAfterLastEntry(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace
+ enum Foo {
+ VALUE_ZERO = 1,
+
+ // This is a multiline
+ //
+ // comment with an empty line.
+ VALUE_ONE = 2
+ };
+ """.split('\n')
+ expected_entries = collections.OrderedDict([
+ ('VALUE_ZERO', '1'),
+ ('VALUE_ONE', '2')])
+ expected_comments = collections.OrderedDict([
+ ('VALUE_ONE', 'This is a multiline comment with an empty line.')])
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('bar.namespace', definition.enum_package)
+ self.assertEqual(expected_entries, definition.entries)
+ self.assertEqual(expected_comments, definition.comments)
+
+ def testParseClassNameOverride(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+ enum EnumName {
+ FOO
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OtherOverride
+ enum PrefixTest {
+ PREFIX_TEST_A,
+ PREFIX_TEST_B,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(2, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('OverrideName', definition.class_name)
+
+ definition = definitions[1]
+ self.assertEqual('OtherOverride', definition.class_name)
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1)]),
+ definition.entries)
+
+ def testParsePreservesCommentsWhenPrefixStripping(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumOne {
+ ENUM_ONE_A = 1,
+ // Comment there
+ ENUM_ONE_B = A,
+ };
+
+ enum EnumIgnore {
+ C, D, E
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: other.package
+ // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+ enum EnumTwo {
+ P_A,
+ // This comment spans
+ // two lines.
+ P_B
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(2, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('EnumOne', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('A', '1'),
+ ('B', 'A')]),
+ definition.entries)
+ self.assertEqual(collections.OrderedDict([('B', 'Comment there')]),
+ definition.comments)
+ definition = definitions[1]
+ self.assertEqual('EnumTwo', definition.class_name)
+ self.assertEqual('other.package', definition.enum_package)
+ self.assertEqual(collections.OrderedDict(
+ [('B', 'This comment spans two lines.')]), definition.comments)
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1)]),
+ definition.entries)
+
+ def testParseTwoEnums(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum AnEnum {
+ ENUM_ONE_A = 1,
+ ENUM_ONE_B = A,
+ };
+
+ enum EnumIgnore {
+ C, D, E
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: other.package
+ enum EnumTwo {
+ P_A,
+ P_B
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(2, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('AnEnum', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('ENUM_ONE_A', '1'),
+ ('ENUM_ONE_B', 'A')]),
+ definition.entries)
+ definition = definitions[1]
+ self.assertEqual('EnumTwo', definition.class_name)
+ self.assertEqual('other.package', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('P_A', 0),
+ ('P_B', 1)]),
+ definition.entries)
+
+ def testParseSingleLineEnum(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: other.package
+ // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+ enum EnumTwo { P_A, P_B };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ definition = definitions[0]
+ self.assertEqual('EnumTwo', definition.class_name)
+ self.assertEqual('other.package', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1)]),
+ definition.entries)
+
+ def testParseWithStrippingAndRelativeReferences(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: other.package
+ // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+ enum EnumTwo {
+ P_A = 1,
+ // P_A is old-don't use P_A.
+ P_B = P_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ definition = definitions[0]
+ self.assertEqual('EnumTwo', definition.class_name)
+ self.assertEqual('other.package', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('A', '1'),
+ ('B', 'A')]),
+ definition.entries)
+ self.assertEqual(collections.OrderedDict([('B', 'A is old-don\'t use A.')]),
+ definition.comments)
+
+ def testParseSingleLineAndRegularEnum(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumOne {
+ ENUM_ONE_A = 1,
+ // Comment there
+ ENUM_ONE_B = A,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: other.package
+ enum EnumTwo { P_A, P_B };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+ enum EnumName {
+ ENUM_NAME_FOO
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ definition = definitions[0]
+ self.assertEqual(
+ collections.OrderedDict([('A', '1'), ('B', 'A')]), definition.entries)
+ self.assertEqual(collections.OrderedDict([('B', 'Comment there')]),
+ definition.comments)
+
+ self.assertEqual(3, len(definitions))
+ definition = definitions[1]
+ self.assertEqual(
+ collections.OrderedDict([('P_A', 0), ('P_B', 1)]), definition.entries)
+
+ definition = definitions[2]
+ self.assertEqual(collections.OrderedDict([('FOO', 0)]), definition.entries)
+
+ def testParseWithCamelCaseNames(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumTest {
+ EnumTestA = 1,
+ // comment for EnumTestB.
+ EnumTestB = 2,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ // GENERATED_JAVA_PREFIX_TO_STRIP: Test
+ enum AnEnum {
+ TestHTTPOption,
+ TestHTTPSOption,
+ };
+
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ definition = definitions[0]
+ self.assertEqual(
+ collections.OrderedDict([('A', '1'), ('B', '2')]),
+ definition.entries)
+ self.assertEqual(
+ collections.OrderedDict([('B', 'comment for B.')]),
+ definition.comments)
+
+ definition = definitions[1]
+ self.assertEqual(
+ collections.OrderedDict([('HTTP_OPTION', 0), ('HTTPS_OPTION', 1)]),
+ definition.entries)
+
+ def testParseWithKCamelCaseNames(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumOne {
+ kEnumOne = 1,
+ // comment for kEnumTwo.
+ kEnumTwo = 2,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+ enum EnumName {
+ kEnumNameFoo,
+ kEnumNameBar
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumName {
+ kEnumNameFoo,
+ kEnumBar,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum Keys {
+ kSymbolKey = 1 << 0,
+ kAltKey = 1 << 1,
+ kUpKey = 1 << 2,
+ kKeyModifiers = kSymbolKey | kAltKey | kUpKey | kKeyModifiers,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum Mixed {
+ kTestVal,
+ kCodecMPEG2
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ definition = definitions[0]
+ self.assertEqual(
+ collections.OrderedDict([('ENUM_ONE', '1'), ('ENUM_TWO', '2')]),
+ definition.entries)
+ self.assertEqual(
+ collections.OrderedDict([('ENUM_TWO', 'comment for ENUM_TWO.')]),
+ definition.comments)
+
+ definition = definitions[1]
+ self.assertEqual(
+ collections.OrderedDict([('FOO', 0), ('BAR', 1)]),
+ definition.entries)
+
+ definition = definitions[2]
+ self.assertEqual(
+ collections.OrderedDict([('ENUM_NAME_FOO', 0), ('ENUM_BAR', 1)]),
+ definition.entries)
+
+ definition = definitions[3]
+ expected_entries = collections.OrderedDict([
+ ('SYMBOL_KEY', '1 << 0'),
+ ('ALT_KEY', '1 << 1'),
+ ('UP_KEY', '1 << 2'),
+ ('KEY_MODIFIERS', 'SYMBOL_KEY | ALT_KEY | UP_KEY | KEY_MODIFIERS')])
+ self.assertEqual(expected_entries, definition.entries)
+
+ definition = definitions[4]
+ self.assertEqual(
+ collections.OrderedDict([('TEST_VAL', 0), ('CODEC_MPEG2', 1)]),
+ definition.entries)
+
+ def testParseThrowsOnUnknownDirective(self):
+ test_data = """
+ // GENERATED_JAVA_UNKNOWN: Value
+ enum EnumName {
+ VALUE_ONE,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testParseReturnsEmptyListWithoutDirectives(self):
+ test_data = """
+ enum EnumName {
+ VALUE_ONE,
+ };
+ """.split('\n')
+ self.assertEqual([], HeaderParser(test_data).ParseDefinitions())
+
+ def testParseEnumClass(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum class Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('A', 0)]),
+ definition.entries)
+
+ def testParseEnumStruct(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum struct Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('A', 0)]),
+ definition.entries)
+
+ def testParseFixedTypeEnum(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum Foo : int {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual('int', definition.fixed_type)
+ self.assertEqual(collections.OrderedDict([('A', 0)]),
+ definition.entries)
+
+ def testParseFixedTypeEnumClass(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum class Foo: unsigned short {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual('unsigned short', definition.fixed_type)
+ self.assertEqual(collections.OrderedDict([('A', 0)]),
+ definition.entries)
+
+ def testParseUnknownFixedTypeRaises(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum class Foo: foo_type {
+ FOO_A,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testParseSimpleMultiLineDirective(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (
+ // test.namespace)
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual('test.namespace', definitions[0].enum_package)
+ self.assertEqual('Bar', definitions[0].class_name)
+
+ def testParseMultiLineDirective(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (te
+ // st.name
+ // space)
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual('test.namespace', definitions[0].enum_package)
+
+ def testParseMultiLineDirectiveWithOtherDirective(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (
+ // test.namespace)
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: (
+ // Ba
+ // r
+ // )
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual('test.namespace', definitions[0].enum_package)
+ self.assertEqual('Bar', definitions[0].class_name)
+
+ def testParseMalformedMultiLineDirectiveWithOtherDirective(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (
+ // test.name
+ // space
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testParseMalformedMultiLineDirective(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (
+ // test.name
+ // space
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testParseMalformedMultiLineDirectiveShort(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testParseMalformedMultiLineDirectiveMissingBrackets(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE:
+ // test.namespace
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testEnumValueAssignmentNoneDefined(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', None)
+ definition.AppendEntry('C', None)
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1),
+ ('C', 2)]),
+ definition.entries)
+
+ def testEnumValueAssignmentAllDefined(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', '1')
+ definition.AppendEntry('B', '2')
+ definition.AppendEntry('C', '3')
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', '1'),
+ ('B', '2'),
+ ('C', '3')]),
+ definition.entries)
+
+ def testEnumValueAssignmentReferences(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', 'A')
+ definition.AppendEntry('C', None)
+ definition.AppendEntry('D', 'C')
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 0),
+ ('C', 1),
+ ('D', 1)]),
+ definition.entries)
+
+ def testEnumValueAssignmentSet(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', '2')
+ definition.AppendEntry('C', None)
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 2),
+ ('C', 3)]),
+ definition.entries)
+
+ def testEnumValueAssignmentSetReferences(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', 'A')
+ definition.AppendEntry('C', 'B')
+ definition.AppendEntry('D', None)
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 0),
+ ('C', 0),
+ ('D', 1)]),
+ definition.entries)
+
+ def testEnumValueAssignmentRaises(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', 'foo')
+ definition.AppendEntry('C', None)
+ with self.assertRaises(Exception):
+ definition.Finalize()
+
+ def testExplicitPrefixStripping(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('P_A', None)
+ definition.AppendEntry('B', None)
+ definition.AppendEntry('P_C', None)
+ definition.AppendEntry('P_LAST', 'P_C')
+ definition.prefix_to_strip = 'P_'
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1),
+ ('C', 2),
+ ('LAST', 2)]),
+ definition.entries)
+
+ def testImplicitPrefixStripping(self):
+ definition = EnumDefinition(original_enum_name='ClassName',
+ enum_package='p')
+ definition.AppendEntry('CLASS_NAME_A', None)
+ definition.AppendEntry('CLASS_NAME_B', None)
+ definition.AppendEntry('CLASS_NAME_C', None)
+ definition.AppendEntry('CLASS_NAME_LAST', 'CLASS_NAME_C')
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1),
+ ('C', 2),
+ ('LAST', 2)]),
+ definition.entries)
+
+ def testImplicitPrefixStrippingRequiresAllConstantsToBePrefixed(self):
+ definition = EnumDefinition(original_enum_name='Name',
+ enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', None)
+ definition.AppendEntry('NAME_LAST', None)
+ definition.Finalize()
+ self.assertEqual(['A', 'B', 'NAME_LAST'], definition.entries.keys())
+
+ def testGenerateThrowsOnEmptyInput(self):
+ with self.assertRaises(Exception):
+ original_do_parse = java_cpp_enum.DoParseHeaderFile
+ try:
+ java_cpp_enum.DoParseHeaderFile = lambda _: []
+ for _ in java_cpp_enum.DoGenerate(['file']):
+ pass
+ finally:
+ java_cpp_enum.DoParseHeaderFile = original_do_parse
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/gyp/java_cpp_strings.py b/deps/v8/build/android/gyp/java_cpp_strings.py
new file mode 100755
index 0000000000..acaaf223ef
--- /dev/null
+++ b/deps/v8/build/android/gyp/java_cpp_strings.py
@@ -0,0 +1,213 @@
+#!/user/bin/env python
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import java_cpp_utils
+
+
+def _ToUpper(match):
+ return match.group(1).upper()
+
+
+def _GetClassName(source_path):
+ name = os.path.basename(os.path.abspath(source_path))
+ (name, _) = os.path.splitext(name)
+ name = re.sub(r'_([a-z])', _ToUpper, name)
+ name = re.sub(r'^(.)', _ToUpper, name)
+ return name
+
+
+class _String(object):
+
+ def __init__(self, name, value, comments):
+ self.name = java_cpp_utils.KCamelToShouty(name)
+ self.value = value
+ self.comments = '\n'.join(' ' + x for x in comments)
+
+ def Format(self):
+ return '%s\n public static final String %s = %s;' % (
+ self.comments, self.name, self.value)
+
+
+def ParseTemplateFile(lines):
+ package_re = re.compile(r'^package (.*);')
+ class_re = re.compile(r'.*class (.*) {')
+ package = ''
+ class_name = ''
+ for line in lines:
+ package_line = package_re.match(line)
+ if package_line:
+ package = package_line.groups()[0]
+ class_line = class_re.match(line)
+ if class_line:
+ class_name = class_line.groups()[0]
+ break
+ return package, class_name
+
+
+# TODO(crbug.com/937282): It should be possible to parse a file for more than
+# string constants. However, this currently only handles extracting string
+# constants from a file (and all string constants from that file). Work will
+# be needed if we want to annotate specific constants or non string constants
+# in the file to be parsed.
+class StringFileParser(object):
+ SINGLE_LINE_COMMENT_RE = re.compile(r'\s*(// [^\n]*)')
+ STRING_RE = re.compile(r'\s*const char k(.*)\[\]\s*=\s*(?:(".*"))?')
+ VALUE_RE = re.compile(r'\s*("[^"]*")')
+
+ def __init__(self, lines, path=''):
+ self._lines = lines
+ self._path = path
+ self._in_string = False
+ self._in_comment = False
+ self._package = ''
+ self._current_comments = []
+ self._current_name = ''
+ self._current_value = ''
+ self._strings = []
+
+ def _Reset(self):
+ self._current_comments = []
+ self._current_name = ''
+ self._current_value = ''
+ self._in_string = False
+ self._in_comment = False
+
+ def _AppendString(self):
+ self._strings.append(
+ _String(self._current_name, self._current_value,
+ self._current_comments))
+ self._Reset()
+
+ def _ParseValue(self, line):
+ value_line = StringFileParser.VALUE_RE.match(line)
+ if value_line:
+ self._current_value = value_line.groups()[0]
+ self._AppendString()
+ else:
+ self._Reset()
+
+ def _ParseComment(self, line):
+ comment_line = StringFileParser.SINGLE_LINE_COMMENT_RE.match(line)
+ if comment_line:
+ self._current_comments.append(comment_line.groups()[0])
+ self._in_comment = True
+ self._in_string = True
+ return True
+ else:
+ self._in_comment = False
+ return False
+
+ def _ParseString(self, line):
+ string_line = StringFileParser.STRING_RE.match(line)
+ if string_line:
+ self._current_name = string_line.groups()[0]
+ if string_line.groups()[1]:
+ self._current_value = string_line.groups()[1]
+ self._AppendString()
+ return True
+ else:
+ self._in_string = False
+ return False
+
+ def _ParseLine(self, line):
+ if not self._in_string:
+ if not self._ParseString(line):
+ self._ParseComment(line)
+ return
+
+ if self._in_comment:
+ if self._ParseComment(line):
+ return
+ if not self._ParseString(line):
+ self._Reset()
+ return
+
+ if self._in_string:
+ self._ParseValue(line)
+
+ def Parse(self):
+ for line in self._lines:
+ self._ParseLine(line)
+ return self._strings
+
+
+def _GenerateOutput(template, source_path, template_path, strings):
+ description_template = """
+ // This following string constants were inserted by
+ // {SCRIPT_NAME}
+ // From
+ // {SOURCE_PATH}
+ // Into
+ // {TEMPLATE_PATH}
+
+"""
+ values = {
+ 'SCRIPT_NAME': java_cpp_utils.GetScriptName(),
+ 'SOURCE_PATH': source_path,
+ 'TEMPLATE_PATH': template_path,
+ }
+ description = description_template.format(**values)
+ native_strings = '\n\n'.join(x.Format() for x in strings)
+
+ values = {
+ 'NATIVE_STRINGS': description + native_strings,
+ }
+ return template.format(**values)
+
+
+def _ParseStringFile(path):
+ with open(path) as f:
+ return StringFileParser(f.readlines(), path).Parse()
+
+
+def _Generate(source_paths, template_path):
+ with open(template_path) as f:
+ lines = f.readlines()
+ template = ''.join(lines)
+ for source_path in source_paths:
+ strings = _ParseStringFile(source_path)
+ package, class_name = ParseTemplateFile(lines)
+ package_path = package.replace('.', os.path.sep)
+ file_name = class_name + '.java'
+ output_path = os.path.join(package_path, file_name)
+ output = _GenerateOutput(template, source_path, template_path, strings)
+ yield output, output_path
+
+
+def _Main(argv):
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument(
+ '--srcjar',
+ required=True,
+ help='When specified, a .srcjar at the given path is '
+ 'created instead of individual .java files.')
+
+ parser.add_argument(
+ '--template',
+ required=True,
+ help='Can be used to provide a context into which the'
+ 'new string constants will be inserted.')
+
+ parser.add_argument(
+ 'inputs', nargs='+', help='Input file(s)', metavar='INPUTFILE')
+ args = parser.parse_args(argv)
+
+ with build_utils.AtomicOutput(args.srcjar) as f:
+ with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
+ for data, path in _Generate(args.inputs, args.template):
+ build_utils.AddToZipHermetic(srcjar, path, data=data)
+
+
+if __name__ == '__main__':
+ _Main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/java_cpp_strings.pydeps b/deps/v8/build/android/gyp/java_cpp_strings.pydeps
new file mode 100644
index 0000000000..901b580e89
--- /dev/null
+++ b/deps/v8/build/android/gyp/java_cpp_strings.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_strings.pydeps build/android/gyp/java_cpp_strings.py
+../../gn_helpers.py
+java_cpp_strings.py
+util/__init__.py
+util/build_utils.py
+util/java_cpp_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/java_cpp_strings_tests.py b/deps/v8/build/android/gyp/java_cpp_strings_tests.py
new file mode 100755
index 0000000000..acf51e428e
--- /dev/null
+++ b/deps/v8/build/android/gyp/java_cpp_strings_tests.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for java_cpp_strings.py.
+
+This test suite contains various tests for the C++ -> Java string generator.
+"""
+
+import unittest
+
+import java_cpp_strings
+
+
+class _TestStringsParser(unittest.TestCase):
+
+ def testParseComments(self):
+ test_data = """
+/**
+ * This should be ignored as well.
+ */
+
+// Comment followed by a blank line.
+
+// Comment followed by unrelated code.
+int foo() { return 3; }
+
+// Real comment.
+const char kASwitch[] = "a-value";
+
+// Real comment that spans
+// multiple lines.
+const char kAnotherSwitch[] = "another-value";
+
+// Comment followed by nothing.
+""".split('\n')
+ strings = java_cpp_strings.StringFileParser(test_data).Parse()
+ self.assertEqual(2, len(strings))
+ self.assertEqual('A_SWITCH', strings[0].name)
+ self.assertEqual('"a-value"', strings[0].value)
+ self.assertEqual(1, len(strings[0].comments.split('\n')))
+ self.assertEqual('ANOTHER_SWITCH', strings[1].name)
+ self.assertEqual('"another-value"', strings[1].value)
+ self.assertEqual(2, len(strings[1].comments.split('\n')))
+
+ def testStringValues(self):
+ test_data = """
+// Single line string constants.
+const char kAString[] = "a-value";
+const char kNoComment[] = "no-comment";
+
+// Single line switch with a big space.
+const char kAStringWithSpace[] = "a-value";
+
+// Wrapped constant definition.
+const char kAStringWithAVeryLongNameThatWillHaveToWrap[] =
+ "a-string-with-a-very-long-name-that-will-have-to-wrap";
+
+// This is erroneous and should be ignored.
+const char kInvalidLineBreak[] =
+
+ "invalid-line-break";
+""".split('\n')
+ strings = java_cpp_strings.StringFileParser(test_data).Parse()
+ self.assertEqual(4, len(strings))
+ self.assertEqual('A_STRING', strings[0].name)
+ self.assertEqual('"a-value"', strings[0].value)
+ self.assertEqual('NO_COMMENT', strings[1].name)
+ self.assertEqual('"no-comment"', strings[1].value)
+ self.assertEqual('A_STRING_WITH_SPACE', strings[2].name)
+ self.assertEqual('"a-value"', strings[2].value)
+ self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP',
+ strings[3].name)
+ self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap"',
+ strings[3].value)
+
+ def testTemplateParsing(self):
+ test_data = """
+// Copyright {YEAR} The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+// {SCRIPT_NAME}
+// From
+// {SOURCE_PATH}, and
+// {TEMPLATE_PATH}
+
+package my.java.package;
+
+public any sort of class MyClass {{
+
+{NATIVE_STRINGS}
+
+}}
+""".split('\n')
+ package, class_name = java_cpp_strings.ParseTemplateFile(test_data)
+ self.assertEqual('my.java.package', package)
+ self.assertEqual('MyClass', class_name)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/gyp/java_google_api_keys.py b/deps/v8/build/android/gyp/java_google_api_keys.py
new file mode 100755
index 0000000000..349821a8fc
--- /dev/null
+++ b/deps/v8/build/android/gyp/java_google_api_keys.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Generates a Java file with API keys.
+
+import argparse
+import os
+import string
+import sys
+import zipfile
+
+from util import build_utils
+
+sys.path.append(
+ os.path.abspath(os.path.join(sys.path[0], '../../../google_apis')))
+import google_api_keys
+
+
+PACKAGE = 'org.chromium.chrome'
+CLASSNAME = 'GoogleAPIKeys'
+
+
+def GetScriptName():
+ return os.path.relpath(__file__, build_utils.DIR_SOURCE_ROOT)
+
+
+def GenerateOutput(constant_definitions):
+ template = string.Template("""
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+// ${SCRIPT_NAME}
+// From
+// ${SOURCE_PATH}
+
+package ${PACKAGE};
+
+public class ${CLASS_NAME} {
+${CONSTANT_ENTRIES}
+}
+""")
+
+ constant_template = string.Template(
+ ' public static final String ${NAME} = "${VALUE}";')
+ constant_entries_list = []
+ for constant_name, constant_value in constant_definitions.iteritems():
+ values = {
+ 'NAME': constant_name,
+ 'VALUE': constant_value,
+ }
+ constant_entries_list.append(constant_template.substitute(values))
+ constant_entries_string = '\n'.join(constant_entries_list)
+
+ values = {
+ 'CLASS_NAME': CLASSNAME,
+ 'CONSTANT_ENTRIES': constant_entries_string,
+ 'PACKAGE': PACKAGE,
+ 'SCRIPT_NAME': GetScriptName(),
+ 'SOURCE_PATH': 'google_api_keys/google_api_keys.h',
+ }
+ return template.substitute(values)
+
+
+def _DoWriteJavaOutput(output_path, constant_definition):
+ folder = os.path.dirname(output_path)
+ if folder and not os.path.exists(folder):
+ os.makedirs(folder)
+ with open(output_path, 'w') as out_file:
+ out_file.write(GenerateOutput(constant_definition))
+
+
+def _DoWriteJarOutput(output_path, constant_definition):
+ folder = os.path.dirname(output_path)
+ if folder and not os.path.exists(folder):
+ os.makedirs(folder)
+ with zipfile.ZipFile(output_path, 'w') as srcjar:
+ path = '%s/%s' % (PACKAGE.replace('.', '/'), CLASSNAME + '.java')
+ data = GenerateOutput(constant_definition)
+ build_utils.AddToZipHermetic(srcjar, path, data=data)
+
+
+def _DoMain(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--out", help="Path for java output.")
+ parser.add_argument("--srcjar", help="Path for srcjar output.")
+ options = parser.parse_args(argv)
+ if not options.out and not options.srcjar:
+ parser.print_help()
+ sys.exit(-1)
+
+ values = {}
+ values['GOOGLE_API_KEY'] = google_api_keys.GetAPIKey()
+ values['GOOGLE_API_KEY_PHYSICAL_WEB_TEST'] = (google_api_keys.
+ GetAPIKeyPhysicalWebTest())
+ values['GOOGLE_CLIENT_ID_MAIN'] = google_api_keys.GetClientID('MAIN')
+ values['GOOGLE_CLIENT_SECRET_MAIN'] = google_api_keys.GetClientSecret('MAIN')
+ values['GOOGLE_CLIENT_ID_CLOUD_PRINT'] = google_api_keys.GetClientID(
+ 'CLOUD_PRINT')
+ values['GOOGLE_CLIENT_SECRET_CLOUD_PRINT'] = google_api_keys.GetClientSecret(
+ 'CLOUD_PRINT')
+ values['GOOGLE_CLIENT_ID_REMOTING'] = google_api_keys.GetClientID('REMOTING')
+ values['GOOGLE_CLIENT_SECRET_REMOTING'] = google_api_keys.GetClientSecret(
+ 'REMOTING')
+ values['GOOGLE_CLIENT_ID_REMOTING_HOST'] = google_api_keys.GetClientID(
+ 'REMOTING_HOST')
+ values['GOOGLE_CLIENT_SECRET_REMOTING_HOST'] = (google_api_keys.
+ GetClientSecret('REMOTING_HOST'))
+ values['GOOGLE_CLIENT_ID_REMOTING_IDENTITY_API'] = (google_api_keys.
+ GetClientID('REMOTING_IDENTITY_API'))
+
+ if options.out:
+ _DoWriteJavaOutput(options.out, values)
+ if options.srcjar:
+ _DoWriteJarOutput(options.srcjar, values)
+
+
+if __name__ == '__main__':
+ _DoMain(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/java_google_api_keys_tests.py b/deps/v8/build/android/gyp/java_google_api_keys_tests.py
new file mode 100755
index 0000000000..6529a5397e
--- /dev/null
+++ b/deps/v8/build/android/gyp/java_google_api_keys_tests.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for java_google_api_keys.py.
+
+This test suite contains various tests for the C++ -> Java Google API Keys
+generator.
+"""
+
+import unittest
+
+import java_google_api_keys
+
+
+class TestJavaGoogleAPIKeys(unittest.TestCase):
+ def testOutput(self):
+ definition = {'E1': 'abc', 'E2': 'defgh'}
+ output = java_google_api_keys.GenerateOutput(definition)
+ expected = """
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+// %s
+// From
+// google_api_keys/google_api_keys.h
+
+package org.chromium.chrome;
+
+public class GoogleAPIKeys {
+ public static final String E1 = "abc";
+ public static final String E2 = "defgh";
+}
+"""
+ self.assertEqual(expected % java_google_api_keys.GetScriptName(), output)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/gyp/javac.py b/deps/v8/build/android/gyp/javac.py
new file mode 100755
index 0000000000..7cbe74c4b1
--- /dev/null
+++ b/deps/v8/build/android/gyp/javac.py
@@ -0,0 +1,595 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import distutils.spawn
+import itertools
+import logging
+import multiprocessing
+import optparse
+import os
+import shutil
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import md5_check
+from util import jar_info_utils
+
+import jar
+
+sys.path.insert(
+ 0,
+ os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src'))
+import colorama
+
+
+ERRORPRONE_WARNINGS_TO_TURN_OFF = [
+ # TODO(crbug.com/834807): Follow steps in bug
+ 'DoubleBraceInitialization',
+ # TODO(crbug.com/834790): Follow steps in bug.
+ 'CatchAndPrintStackTrace',
+ # TODO(crbug.com/801210): Follow steps in bug.
+ 'SynchronizeOnNonFinalField',
+ # TODO(crbug.com/802073): Follow steps in bug.
+ 'TypeParameterUnusedInFormals',
+ # TODO(crbug.com/803484): Follow steps in bug.
+ 'CatchFail',
+ # TODO(crbug.com/803485): Follow steps in bug.
+ 'JUnitAmbiguousTestClass',
+ # Android platform default is always UTF-8.
+ # https://developer.android.com/reference/java/nio/charset/Charset.html#defaultCharset()
+ 'DefaultCharset',
+ # Low priority since the alternatives still work.
+ 'JdkObsolete',
+ # We don't use that many lambdas.
+ 'FunctionalInterfaceClash',
+ # There are lots of times when we just want to post a task.
+ 'FutureReturnValueIgnored',
+ # Nice to be explicit about operators, but not necessary.
+ 'OperatorPrecedence',
+ # Just false positives in our code.
+ 'ThreadJoinLoop',
+ # Low priority corner cases with String.split.
+ # Linking Guava and using Splitter was rejected
+ # in the https://chromium-review.googlesource.com/c/chromium/src/+/871630.
+ 'StringSplitter',
+ # Preferred to use another method since it propagates exceptions better.
+ 'ClassNewInstance',
+ # Nice to have static inner classes but not necessary.
+ 'ClassCanBeStatic',
+ # Explicit is better than implicit.
+ 'FloatCast',
+ # Results in false positives.
+ 'ThreadLocalUsage',
+ # Also just false positives.
+ 'Finally',
+ # False positives for Chromium.
+ 'FragmentNotInstantiable',
+ # Low priority to fix.
+ 'HidingField',
+ # Low priority.
+ 'IntLongMath',
+ # Low priority.
+ 'BadComparable',
+ # Low priority.
+ 'EqualsHashCode',
+ # Nice to fix but low priority.
+ 'TypeParameterShadowing',
+ # Good to have immutable enums, also low priority.
+ 'ImmutableEnumChecker',
+ # False positives for testing.
+ 'InputStreamSlowMultibyteRead',
+ # Nice to have better primitives.
+ 'BoxedPrimitiveConstructor',
+ # Not necessary for tests.
+ 'OverrideThrowableToString',
+ # Nice to have better type safety.
+ 'CollectionToArraySafeParameter',
+ # Makes logcat debugging more difficult, and does not provide obvious
+ # benefits in the Chromium codebase.
+ 'ObjectToString',
+]
+
+ERRORPRONE_WARNINGS_TO_ERROR = [
+ # Add warnings to this after fixing/suppressing all instances in our codebase.
+ 'ArgumentSelectionDefectChecker',
+ 'AssertionFailureIgnored',
+ 'FloatingPointLiteralPrecision',
+ 'JavaLangClash',
+ 'MissingFail',
+ 'MissingOverride',
+ 'NarrowingCompoundAssignment',
+ 'OrphanedFormatString',
+ 'ParameterName',
+ 'ParcelableCreator',
+ 'ReferenceEquality',
+ 'StaticGuardedByInstance',
+ 'StaticQualifiedUsingExpression',
+ 'UseCorrectAssertInTests',
+]
+
+
+def ProcessJavacOutput(output):
+ fileline_prefix = r'(?P<fileline>(?P<file>[-.\w/\\]+.java):(?P<line>[0-9]+):)'
+ warning_re = re.compile(
+ fileline_prefix + r'(?P<full_message> warning: (?P<message>.*))$')
+ error_re = re.compile(
+ fileline_prefix + r'(?P<full_message> (?P<message>.*))$')
+ marker_re = re.compile(r'\s*(?P<marker>\^)\s*$')
+
+ # These warnings cannot be suppressed even for third party code. Deprecation
+ # warnings especially do not help since we must support older android version.
+ deprecated_re = re.compile(
+ r'(Note: .* uses? or overrides? a deprecated API.)$')
+ unchecked_re = re.compile(
+ r'(Note: .* uses? unchecked or unsafe operations.)$')
+ recompile_re = re.compile(r'(Note: Recompile with -Xlint:.* for details.)$')
+
+ warning_color = ['full_message', colorama.Fore.YELLOW + colorama.Style.DIM]
+ error_color = ['full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT]
+ marker_color = ['marker', colorama.Fore.BLUE + colorama.Style.BRIGHT]
+
+ def Colorize(line, regex, color):
+ match = regex.match(line)
+ start = match.start(color[0])
+ end = match.end(color[0])
+ return (line[:start]
+ + color[1] + line[start:end]
+ + colorama.Fore.RESET + colorama.Style.RESET_ALL
+ + line[end:])
+
+ def ApplyFilters(line):
+ return not (deprecated_re.match(line)
+ or unchecked_re.match(line)
+ or recompile_re.match(line))
+
+ def ApplyColors(line):
+ if warning_re.match(line):
+ line = Colorize(line, warning_re, warning_color)
+ elif error_re.match(line):
+ line = Colorize(line, error_re, error_color)
+ elif marker_re.match(line):
+ line = Colorize(line, marker_re, marker_color)
+ return line
+
+ return '\n'.join(map(ApplyColors, filter(ApplyFilters, output.split('\n'))))
+
+
+def _ExtractClassFiles(jar_path, dest_dir, java_files):
+ """Extracts all .class files not corresponding to |java_files|."""
+ # Two challenges exist here:
+ # 1. |java_files| have prefixes that are not represented in the the jar paths.
+ # 2. A single .java file results in multiple .class files when it contains
+ # nested classes.
+ # Here's an example:
+ # source path: ../../base/android/java/src/org/chromium/Foo.java
+ # jar paths: org/chromium/Foo.class, org/chromium/Foo$Inner.class
+ # To extract only .class files not related to the given .java files, we strip
+ # off ".class" and "$*.class" and use a substring match against java_files.
+ def extract_predicate(path):
+ if not path.endswith('.class'):
+ return False
+ path_without_suffix = re.sub(r'(?:\$|\.)[^/]*class$', '', path)
+ partial_java_path = path_without_suffix + '.java'
+ return not any(p.endswith(partial_java_path) for p in java_files)
+
+ logging.info('Extracting class files from %s', jar_path)
+ build_utils.ExtractAll(jar_path, path=dest_dir, predicate=extract_predicate)
+ for path in build_utils.FindInDirectory(dest_dir, '*.class'):
+ shutil.copystat(jar_path, path)
+
+
+def _ParsePackageAndClassNames(java_file):
+ package_name = ''
+ class_names = []
+ with open(java_file) as f:
+ for l in f:
+ # Strip unindented comments.
+ # Considers a leading * as a continuation of a multi-line comment (our
+ # linter doesn't enforce a space before it like there should be).
+ l = re.sub(r'^(?://.*|/?\*.*?(?:\*/\s*|$))', '', l)
+
+ m = re.match(r'package\s+(.*?);', l)
+ if m and not package_name:
+ package_name = m.group(1)
+
+ # Not exactly a proper parser, but works for sources that Chrome uses.
+ # In order to not match nested classes, it just checks for lack of indent.
+ m = re.match(r'(?:\S.*?)?(?:class|@?interface|enum)\s+(.+?)\b', l)
+ if m:
+ class_names.append(m.group(1))
+ return package_name, class_names
+
+
+def _CheckPathMatchesClassName(java_file, package_name, class_name):
+ parts = package_name.split('.') + [class_name + '.java']
+ expected_path_suffix = os.path.sep.join(parts)
+ if not java_file.endswith(expected_path_suffix):
+ raise Exception(('Java package+class name do not match its path.\n'
+ 'Actual path: %s\nExpected path: %s') %
+ (java_file, expected_path_suffix))
+
+
+def _MoveGeneratedJavaFilesToGenDir(classes_dir, generated_java_dir):
+ # Move any Annotation Processor-generated .java files into $out/gen
+ # so that codesearch can find them.
+ javac_generated_sources = []
+ for src_path in build_utils.FindInDirectory(classes_dir, '*.java'):
+ dst_path = os.path.join(generated_java_dir,
+ os.path.relpath(src_path, classes_dir))
+ build_utils.MakeDirectory(os.path.dirname(dst_path))
+ shutil.move(src_path, dst_path)
+ javac_generated_sources.append(dst_path)
+ return javac_generated_sources
+
+
+def _ProcessJavaFileForInfo(java_file):
+ package_name, class_names = _ParsePackageAndClassNames(java_file)
+ return java_file, package_name, class_names
+
+
+def _ProcessInfo(java_file, package_name, class_names, source, chromium_code):
+ for class_name in class_names:
+ yield '{}.{}'.format(package_name, class_name)
+ # Skip aidl srcjars since they don't indent code correctly.
+ if '_aidl.srcjar' in source:
+ continue
+ assert not chromium_code or len(class_names) == 1, (
+ 'Chromium java files must only have one class: {}'.format(source))
+ if chromium_code:
+ # This check is not necessary but nice to check this somewhere.
+ _CheckPathMatchesClassName(java_file, package_name, class_names[0])
+
+
+def _CreateInfoFile(java_files, jar_path, chromium_code, srcjar_files,
+ classes_dir, generated_java_dir):
+ """Writes a .jar.info file.
+
+ This maps fully qualified names for classes to either the java file that they
+ are defined in or the path of the srcjar that they came from.
+ """
+ output_path = jar_path + '.info'
+ logging.info('Start creating info file: %s', output_path)
+ javac_generated_sources = _MoveGeneratedJavaFilesToGenDir(
+ classes_dir, generated_java_dir)
+ logging.info('Finished moving generated java files: %s', output_path)
+ # 2 processes saves ~0.9s, 3 processes saves ~1.2s, 4 processes saves ~1.2s.
+ pool = multiprocessing.Pool(processes=3)
+ results = pool.imap_unordered(
+ _ProcessJavaFileForInfo,
+ itertools.chain(java_files, javac_generated_sources),
+ chunksize=10)
+ pool.close()
+ all_info_data = {}
+ for java_file, package_name, class_names in results:
+ source = srcjar_files.get(java_file, java_file)
+ for fully_qualified_name in _ProcessInfo(
+ java_file, package_name, class_names, source, chromium_code):
+ all_info_data[fully_qualified_name] = java_file
+ logging.info('Writing info file: %s', output_path)
+ with build_utils.AtomicOutput(output_path) as f:
+ jar_info_utils.WriteJarInfoFile(f, all_info_data, srcjar_files)
+ logging.info('Completed info file: %s', output_path)
+
+
+def _CreateJarFile(jar_path, provider_configurations, additional_jar_files,
+ classes_dir):
+ logging.info('Start creating jar file: %s', jar_path)
+ with build_utils.AtomicOutput(jar_path) as f:
+ jar.JarDirectory(
+ classes_dir,
+ f.name,
+ # Avoid putting generated java files into the jar since
+ # _MoveGeneratedJavaFilesToGenDir has not completed yet
+ predicate=lambda name: not name.endswith('.java'),
+ provider_configurations=provider_configurations,
+ additional_files=additional_jar_files)
+ logging.info('Completed jar file: %s', jar_path)
+
+
+def _OnStaleMd5(options, javac_cmd, java_files, classpath):
+ logging.info('Starting _OnStaleMd5')
+
+ # Compiles with Error Prone take twice as long to run as pure javac. Thus GN
+ # rules run both in parallel, with Error Prone only used for checks.
+ save_outputs = not options.enable_errorprone
+
+ with build_utils.TempDir() as temp_dir:
+ srcjars = options.java_srcjars
+
+ classes_dir = os.path.join(temp_dir, 'classes')
+ os.makedirs(classes_dir)
+
+ if save_outputs:
+ generated_java_dir = options.generated_dir
+ else:
+ generated_java_dir = os.path.join(temp_dir, 'gen')
+
+ shutil.rmtree(generated_java_dir, True)
+
+ srcjar_files = {}
+ if srcjars:
+ logging.info('Extracting srcjars to %s', generated_java_dir)
+ build_utils.MakeDirectory(generated_java_dir)
+ jar_srcs = []
+ for srcjar in options.java_srcjars:
+ extracted_files = build_utils.ExtractAll(
+ srcjar, no_clobber=True, path=generated_java_dir, pattern='*.java')
+ for path in extracted_files:
+ # We want the path inside the srcjar so the viewer can have a tree
+ # structure.
+ srcjar_files[path] = '{}/{}'.format(
+ srcjar, os.path.relpath(path, generated_java_dir))
+ jar_srcs.extend(extracted_files)
+ logging.info('Done extracting srcjars')
+ java_files.extend(jar_srcs)
+
+ if java_files:
+ # Don't include the output directory in the initial set of args since it
+ # being in a temp dir makes it unstable (breaks md5 stamping).
+ cmd = javac_cmd + ['-d', classes_dir]
+
+ # Pass classpath and source paths as response files to avoid extremely
+ # long command lines that are tedius to debug.
+ if classpath:
+ cmd += ['-classpath', ':'.join(classpath)]
+
+ java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt')
+ with open(java_files_rsp_path, 'w') as f:
+ f.write(' '.join(java_files))
+ cmd += ['@' + java_files_rsp_path]
+
+ logging.debug('Build command %s', cmd)
+ build_utils.CheckOutput(
+ cmd,
+ print_stdout=options.chromium_code,
+ stderr_filter=ProcessJavacOutput)
+ logging.info('Finished build command')
+
+ if save_outputs:
+ # Creating the jar file takes the longest, start it first on a separate
+ # process to unblock the rest of the post-processing steps.
+ jar_file_worker = multiprocessing.Process(
+ target=_CreateJarFile,
+ args=(options.jar_path, options.provider_configurations,
+ options.additional_jar_files, classes_dir))
+ jar_file_worker.start()
+ else:
+ jar_file_worker = None
+ build_utils.Touch(options.jar_path)
+
+ if save_outputs:
+ _CreateInfoFile(java_files, options.jar_path, options.chromium_code,
+ srcjar_files, classes_dir, generated_java_dir)
+ else:
+ build_utils.Touch(options.jar_path + '.info')
+
+ if jar_file_worker:
+ jar_file_worker.join()
+ logging.info('Completed all steps in _OnStaleMd5')
+
+
+def _ParseOptions(argv):
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_option(
+ '--java-srcjars',
+ action='append',
+ default=[],
+ help='List of srcjars to include in compilation.')
+ parser.add_option(
+ '--generated-dir',
+ help='Subdirectory within target_gen_dir to place extracted srcjars and '
+ 'annotation processor output for codesearch to find.')
+ parser.add_option(
+ '--bootclasspath',
+ action='append',
+ default=[],
+ help='Boot classpath for javac. If this is specified multiple times, '
+ 'they will all be appended to construct the classpath.')
+ parser.add_option(
+ '--java-version',
+ help='Java language version to use in -source and -target args to javac.')
+ parser.add_option(
+ '--full-classpath',
+ action='append',
+ help='Classpath to use when annotation processors are present.')
+ parser.add_option(
+ '--interface-classpath',
+ action='append',
+ help='Classpath to use when no annotation processors are present.')
+ parser.add_option(
+ '--processors',
+ action='append',
+ help='GN list of annotation processor main classes.')
+ parser.add_option(
+ '--processorpath',
+ action='append',
+ help='GN list of jars that comprise the classpath used for Annotation '
+ 'Processors.')
+ parser.add_option(
+ '--processor-arg',
+ dest='processor_args',
+ action='append',
+ help='key=value arguments for the annotation processors.')
+ parser.add_option(
+ '--provider-configuration',
+ dest='provider_configurations',
+ action='append',
+ help='File to specify a service provider. Will be included '
+ 'in the jar under META-INF/services.')
+ parser.add_option(
+ '--additional-jar-file',
+ dest='additional_jar_files',
+ action='append',
+ help='Additional files to package into jar. By default, only Java .class '
+ 'files are packaged into the jar. Files should be specified in '
+ 'format <filename>:<path to be placed in jar>.')
+ parser.add_option(
+ '--chromium-code',
+ type='int',
+ help='Whether code being compiled should be built with stricter '
+ 'warnings for chromium code.')
+ parser.add_option(
+ '--errorprone-path', help='Use the Errorprone compiler at this path.')
+ parser.add_option(
+ '--enable-errorprone',
+ action='store_true',
+ help='Enable errorprone checks')
+ parser.add_option('--jar-path', help='Jar output path.')
+ parser.add_option(
+ '--javac-arg',
+ action='append',
+ default=[],
+ help='Additional arguments to pass to javac.')
+
+ options, args = parser.parse_args(argv)
+ build_utils.CheckOptions(options, parser, required=('jar_path',))
+
+ options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
+ options.full_classpath = build_utils.ParseGnList(options.full_classpath)
+ options.interface_classpath = build_utils.ParseGnList(
+ options.interface_classpath)
+ options.processorpath = build_utils.ParseGnList(options.processorpath)
+ options.processors = build_utils.ParseGnList(options.processors)
+ options.java_srcjars = build_utils.ParseGnList(options.java_srcjars)
+
+ if options.java_version == '1.8' and options.bootclasspath:
+ # Android's boot jar doesn't contain all java 8 classes.
+ # See: https://github.com/evant/gradle-retrolambda/issues/23.
+ # Get the path of the jdk folder by searching for the 'jar' executable. We
+ # cannot search for the 'javac' executable because goma provides a custom
+ # version of 'javac'.
+ jar_path = os.path.realpath(distutils.spawn.find_executable('jar'))
+ jdk_dir = os.path.dirname(os.path.dirname(jar_path))
+ rt_jar = os.path.join(jdk_dir, 'jre', 'lib', 'rt.jar')
+ options.bootclasspath.append(rt_jar)
+
+ additional_jar_files = []
+ for arg in options.additional_jar_files or []:
+ filepath, jar_filepath = arg.split(':')
+ additional_jar_files.append((filepath, jar_filepath))
+ options.additional_jar_files = additional_jar_files
+
+ java_files = []
+ for arg in args:
+ # Interpret a path prefixed with @ as a file containing a list of sources.
+ if arg.startswith('@'):
+ java_files.extend(build_utils.ReadSourcesList(arg[1:]))
+ else:
+ java_files.append(arg)
+
+ return options, java_files
+
+
+def main(argv):
+ logging.basicConfig(
+ level=logging.INFO if os.environ.get('_JAVAC_DEBUG') else logging.WARNING,
+ format='%(levelname).1s %(relativeCreated)6d %(message)s')
+ colorama.init()
+
+ argv = build_utils.ExpandFileArgs(argv)
+ options, java_files = _ParseOptions(argv)
+
+ # Until we add a version of javac via DEPS, use errorprone with all checks
+ # disabled rather than javac. This ensures builds are reproducible.
+ # https://crbug.com/693079
+ # As of Jan 2019, on a z920, compiling chrome_java times:
+ # * With javac: 17 seconds
+ # * With errorprone (checks disabled): 20 seconds
+ # * With errorprone (checks enabled): 30 seconds
+ if options.errorprone_path:
+ javac_path = options.errorprone_path
+ else:
+ javac_path = distutils.spawn.find_executable('javac')
+
+ javac_cmd = [
+ javac_path,
+ '-g',
+ # Chromium only allows UTF8 source files. Being explicit avoids
+ # javac pulling a default encoding from the user's environment.
+ '-encoding',
+ 'UTF-8',
+ # Prevent compiler from compiling .java files not listed as inputs.
+ # See: http://blog.ltgt.net/most-build-tools-misuse-javac/
+ '-sourcepath',
+ ':',
+ ]
+
+ if options.enable_errorprone:
+ for warning in ERRORPRONE_WARNINGS_TO_TURN_OFF:
+ javac_cmd.append('-Xep:{}:OFF'.format(warning))
+ for warning in ERRORPRONE_WARNINGS_TO_ERROR:
+ javac_cmd.append('-Xep:{}:ERROR'.format(warning))
+ elif options.errorprone_path:
+ javac_cmd.append('-XepDisableAllChecks')
+
+ if options.java_version:
+ javac_cmd.extend([
+ '-source', options.java_version,
+ '-target', options.java_version,
+ ])
+
+ if options.chromium_code:
+ javac_cmd.extend(['-Werror'])
+ else:
+ # XDignore.symbol.file makes javac compile against rt.jar instead of
+ # ct.sym. This means that using a java internal package/class will not
+ # trigger a compile warning or error.
+ javac_cmd.extend(['-XDignore.symbol.file'])
+
+ if options.processors:
+ javac_cmd.extend(['-processor', ','.join(options.processors)])
+
+ if options.bootclasspath:
+ javac_cmd.extend(['-bootclasspath', ':'.join(options.bootclasspath)])
+
+ # Annotation processors crash when given interface jars.
+ active_classpath = (
+ options.full_classpath
+ if options.processors else options.interface_classpath)
+ classpath = []
+ if active_classpath:
+ classpath.extend(active_classpath)
+
+ if options.processorpath:
+ javac_cmd.extend(['-processorpath', ':'.join(options.processorpath)])
+ if options.processor_args:
+ for arg in options.processor_args:
+ javac_cmd.extend(['-A%s' % arg])
+
+ javac_cmd.extend(options.javac_arg)
+
+ classpath_inputs = (options.bootclasspath + options.interface_classpath +
+ options.processorpath)
+
+ # GN already knows of java_files, so listing them just make things worse when
+ # they change.
+ depfile_deps = [javac_path] + classpath_inputs + options.java_srcjars
+ input_paths = depfile_deps + java_files
+
+ output_paths = [
+ options.jar_path,
+ options.jar_path + '.info',
+ ]
+
+ # List python deps in input_strings rather than input_paths since the contents
+ # of them does not change what gets written to the depsfile.
+ build_utils.CallAndWriteDepfileIfStale(
+ lambda: _OnStaleMd5(options, javac_cmd, java_files, classpath),
+ options,
+ depfile_deps=depfile_deps,
+ input_paths=input_paths,
+ input_strings=javac_cmd + classpath,
+ output_paths=output_paths,
+ add_pydeps=False)
+ logging.info('Script complete: %s', __file__)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/javac.pydeps b/deps/v8/build/android/gyp/javac.pydeps
new file mode 100644
index 0000000000..a9d257b95f
--- /dev/null
+++ b/deps/v8/build/android/gyp/javac.pydeps
@@ -0,0 +1,15 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/javac.pydeps build/android/gyp/javac.py
+../../../third_party/colorama/src/colorama/__init__.py
+../../../third_party/colorama/src/colorama/ansi.py
+../../../third_party/colorama/src/colorama/ansitowin32.py
+../../../third_party/colorama/src/colorama/initialise.py
+../../../third_party/colorama/src/colorama/win32.py
+../../../third_party/colorama/src/colorama/winterm.py
+../../gn_helpers.py
+jar.py
+javac.py
+util/__init__.py
+util/build_utils.py
+util/jar_info_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/jinja_template.py b/deps/v8/build/android/gyp/jinja_template.py
new file mode 100755
index 0000000000..4d5c403dfe
--- /dev/null
+++ b/deps/v8/build/android/gyp/jinja_template.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Renders one or more template files using the Jinja template engine."""
+
+import codecs
+import argparse
+import os
+import sys
+
+from util import build_utils
+from util import resource_utils
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+from pylib.constants import host_paths
+
+# Import jinja2 from third_party/jinja2
+sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'))
+import jinja2 # pylint: disable=F0401
+
+
+class _RecordingFileSystemLoader(jinja2.FileSystemLoader):
+ def __init__(self, searchpath):
+ jinja2.FileSystemLoader.__init__(self, searchpath)
+ self.loaded_templates = set()
+
+ def get_source(self, environment, template):
+ contents, filename, uptodate = jinja2.FileSystemLoader.get_source(
+ self, environment, template)
+ self.loaded_templates.add(os.path.relpath(filename))
+ return contents, filename, uptodate
+
+
+class JinjaProcessor(object):
+ """Allows easy rendering of jinja templates with input file tracking."""
+ def __init__(self, loader_base_dir, variables=None):
+ self.loader_base_dir = loader_base_dir
+ self.variables = variables or {}
+ self.loader = _RecordingFileSystemLoader(loader_base_dir)
+ self.env = jinja2.Environment(loader=self.loader)
+ self.env.undefined = jinja2.StrictUndefined
+ self.env.line_comment_prefix = '##'
+ self.env.trim_blocks = True
+ self.env.lstrip_blocks = True
+ self._template_cache = {} # Map of path -> Template
+
+ def Render(self, input_filename, variables=None):
+ input_rel_path = os.path.relpath(input_filename, self.loader_base_dir)
+ template = self._template_cache.get(input_rel_path)
+ if not template:
+ template = self.env.get_template(input_rel_path)
+ self._template_cache[input_rel_path] = template
+ return template.render(variables or self.variables)
+
+ def GetLoadedTemplates(self):
+ return list(self.loader.loaded_templates)
+
+
+def _ProcessFile(processor, input_filename, output_filename):
+ output = processor.Render(input_filename)
+
+ # If |output| is same with the file content, we skip update and
+ # ninja's restat will avoid rebuilding things that depend on it.
+ if os.path.isfile(output_filename):
+ with codecs.open(output_filename, 'r', 'utf-8') as f:
+ if f.read() == output:
+ return
+
+ with codecs.open(output_filename, 'w', 'utf-8') as output_file:
+ output_file.write(output)
+
+
+def _ProcessFiles(processor, input_filenames, inputs_base_dir, outputs_zip):
+ with build_utils.TempDir() as temp_dir:
+ files_to_zip = dict()
+ for input_filename in input_filenames:
+ relpath = os.path.relpath(os.path.abspath(input_filename),
+ os.path.abspath(inputs_base_dir))
+ if relpath.startswith(os.pardir):
+ raise Exception('input file %s is not contained in inputs base dir %s'
+ % (input_filename, inputs_base_dir))
+
+ output_filename = os.path.join(temp_dir, relpath)
+ parent_dir = os.path.dirname(output_filename)
+ build_utils.MakeDirectory(parent_dir)
+ _ProcessFile(processor, input_filename, output_filename)
+ files_to_zip[relpath] = input_filename
+
+ resource_utils.CreateResourceInfoFile(files_to_zip, outputs_zip)
+ build_utils.ZipDir(outputs_zip, temp_dir)
+
+
+def _ParseVariables(variables_arg, error_func):
+ variables = {}
+ for v in build_utils.ParseGnList(variables_arg):
+ if '=' not in v:
+ error_func('--variables argument must contain "=": ' + v)
+ name, _, value = v.partition('=')
+ variables[name] = value
+ return variables
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--inputs', required=True,
+ help='GN-list of template files to process.')
+ parser.add_argument('--includes', default='',
+ help="GN-list of files that get {% include %}'ed.")
+ parser.add_argument('--output', help='The output file to generate. Valid '
+ 'only if there is a single input.')
+ parser.add_argument('--outputs-zip', help='A zip file for the processed '
+ 'templates. Required if there are multiple inputs.')
+ parser.add_argument('--inputs-base-dir', help='A common ancestor directory '
+ 'of the inputs. Each output\'s path in the output zip '
+ 'will match the relative path from INPUTS_BASE_DIR to '
+ 'the input. Required if --output-zip is given.')
+ parser.add_argument('--loader-base-dir', help='Base path used by the '
+ 'template loader. Must be a common ancestor directory of '
+ 'the inputs. Defaults to DIR_SOURCE_ROOT.',
+ default=host_paths.DIR_SOURCE_ROOT)
+ parser.add_argument('--variables', help='Variables to be made available in '
+ 'the template processing environment, as a GYP list '
+ '(e.g. --variables "channel=beta mstone=39")', default='')
+ parser.add_argument('--check-includes', action='store_true',
+ help='Enable inputs and includes checks.')
+ options = parser.parse_args()
+
+ inputs = build_utils.ParseGnList(options.inputs)
+ includes = build_utils.ParseGnList(options.includes)
+
+ if (options.output is None) == (options.outputs_zip is None):
+ parser.error('Exactly one of --output and --output-zip must be given')
+ if options.output and len(inputs) != 1:
+ parser.error('--output cannot be used with multiple inputs')
+ if options.outputs_zip and not options.inputs_base_dir:
+ parser.error('--inputs-base-dir must be given when --output-zip is used')
+
+ variables = _ParseVariables(options.variables, parser.error)
+ processor = JinjaProcessor(options.loader_base_dir, variables=variables)
+
+ if options.output:
+ _ProcessFile(processor, inputs[0], options.output)
+ else:
+ _ProcessFiles(processor, inputs, options.inputs_base_dir,
+ options.outputs_zip)
+
+ if options.check_includes:
+ all_inputs = set(processor.GetLoadedTemplates())
+ all_inputs.difference_update(inputs)
+ all_inputs.difference_update(includes)
+ if all_inputs:
+ raise Exception('Found files not listed via --includes:\n' +
+ '\n'.join(sorted(all_inputs)))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gyp/jinja_template.pydeps b/deps/v8/build/android/gyp/jinja_template.pydeps
new file mode 100644
index 0000000000..a2a38176bf
--- /dev/null
+++ b/deps/v8/build/android/gyp/jinja_template.pydeps
@@ -0,0 +1,41 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jinja_template.pydeps build/android/gyp/jinja_template.py
+../../../third_party/catapult/devil/devil/__init__.py
+../../../third_party/catapult/devil/devil/android/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../../third_party/catapult/devil/devil/constants/__init__.py
+../../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../pylib/__init__.py
+../pylib/constants/__init__.py
+../pylib/constants/host_paths.py
+jinja_template.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/resource_utils.py
diff --git a/deps/v8/build/android/gyp/lint.py b/deps/v8/build/android/gyp/lint.py
new file mode 100755
index 0000000000..b2f90c7e9e
--- /dev/null
+++ b/deps/v8/build/android/gyp/lint.py
@@ -0,0 +1,399 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs Android's lint tool."""
+
+
+import argparse
+import os
+import re
+import sys
+import traceback
+from xml.dom import minidom
+
+from util import build_utils
+
+_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/master/build/android/docs/lint.md' # pylint: disable=line-too-long
+
+
+def _OnStaleMd5(lint_path, config_path, processed_config_path,
+ manifest_path, result_path, product_dir, sources, jar_path,
+ cache_dir, android_sdk_version, srcjars, resource_sources,
+ disable=None, classpath=None, can_fail_build=False,
+ include_unexpected=False, silent=False):
+ def _RebasePath(path):
+ """Returns relative path to top-level src dir.
+
+ Args:
+ path: A path relative to cwd.
+ """
+ ret = os.path.relpath(os.path.abspath(path), build_utils.DIR_SOURCE_ROOT)
+ # If it's outside of src/, just use abspath.
+ if ret.startswith('..'):
+ ret = os.path.abspath(path)
+ return ret
+
+ def _ProcessConfigFile():
+ if not config_path or not processed_config_path:
+ return
+ if not build_utils.IsTimeStale(processed_config_path, [config_path]):
+ return
+
+ with open(config_path, 'rb') as f:
+ content = f.read().replace(
+ 'PRODUCT_DIR', _RebasePath(product_dir))
+
+ with open(processed_config_path, 'wb') as f:
+ f.write(content)
+
+ def _ProcessResultFile():
+ with open(result_path, 'rb') as f:
+ content = f.read().replace(
+ _RebasePath(product_dir), 'PRODUCT_DIR')
+
+ with open(result_path, 'wb') as f:
+ f.write(content)
+
+ def _ParseAndShowResultFile():
+ dom = minidom.parse(result_path)
+ issues = dom.getElementsByTagName('issue')
+ if not silent:
+ print >> sys.stderr
+ for issue in issues:
+ issue_id = issue.attributes['id'].value
+ message = issue.attributes['message'].value
+ location_elem = issue.getElementsByTagName('location')[0]
+ path = location_elem.attributes['file'].value
+ line = location_elem.getAttribute('line')
+ if line:
+ error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id)
+ else:
+ # Issues in class files don't have a line number.
+ error = '%s %s: %s [warning]' % (path, message, issue_id)
+ print >> sys.stderr, error.encode('utf-8')
+ for attr in ['errorLine1', 'errorLine2']:
+ error_line = issue.getAttribute(attr)
+ if error_line:
+ print >> sys.stderr, error_line.encode('utf-8')
+ return len(issues)
+
+ with build_utils.TempDir() as temp_dir:
+ _ProcessConfigFile()
+
+ cmd = [
+ _RebasePath(lint_path), '-Werror', '--exitcode', '--showall',
+ '--xml', _RebasePath(result_path),
+ ]
+ if jar_path:
+ # --classpath is just for .class files for this one target.
+ cmd.extend(['--classpath', _RebasePath(jar_path)])
+ if processed_config_path:
+ cmd.extend(['--config', _RebasePath(processed_config_path)])
+
+ tmp_dir_counter = [0]
+ def _NewTempSubdir(prefix, append_digit=True):
+ # Helper function to create a new sub directory based on the number of
+ # subdirs created earlier.
+ if append_digit:
+ tmp_dir_counter[0] += 1
+ prefix += str(tmp_dir_counter[0])
+ new_dir = os.path.join(temp_dir, prefix)
+ os.makedirs(new_dir)
+ return new_dir
+
+ resource_dirs = []
+ for resource_source in resource_sources:
+ if os.path.isdir(resource_source):
+ resource_dirs.append(resource_source)
+ else:
+ # This is a zip file with generated resources (e. g. strings from GRD).
+ # Extract it to temporary folder.
+ resource_dir = _NewTempSubdir(resource_source, append_digit=False)
+ resource_dirs.append(resource_dir)
+ build_utils.ExtractAll(resource_source, path=resource_dir)
+
+ for resource_dir in resource_dirs:
+ cmd.extend(['--resources', _RebasePath(resource_dir)])
+
+ if classpath:
+ # --libraries is the classpath (excluding active target).
+ cp = ':'.join(_RebasePath(p) for p in classpath)
+ cmd.extend(['--libraries', cp])
+
+ # There may be multiple source files with the same basename (but in
+ # different directories). It is difficult to determine what part of the path
+ # corresponds to the java package, and so instead just link the source files
+ # into temporary directories (creating a new one whenever there is a name
+ # conflict).
+ def PathInDir(d, src):
+ subpath = os.path.join(d, _RebasePath(src))
+ subdir = os.path.dirname(subpath)
+ if not os.path.exists(subdir):
+ os.makedirs(subdir)
+ return subpath
+
+ src_dirs = []
+ for src in sources:
+ src_dir = None
+ for d in src_dirs:
+ if not os.path.exists(PathInDir(d, src)):
+ src_dir = d
+ break
+ if not src_dir:
+ src_dir = _NewTempSubdir('SRC_ROOT')
+ src_dirs.append(src_dir)
+ cmd.extend(['--sources', _RebasePath(src_dir)])
+ os.symlink(os.path.abspath(src), PathInDir(src_dir, src))
+
+ if srcjars:
+ srcjar_paths = build_utils.ParseGnList(srcjars)
+ if srcjar_paths:
+ srcjar_dir = _NewTempSubdir('SRC_ROOT')
+ cmd.extend(['--sources', _RebasePath(srcjar_dir)])
+ for srcjar in srcjar_paths:
+ build_utils.ExtractAll(srcjar, path=srcjar_dir)
+
+ if disable:
+ cmd.extend(['--disable', ','.join(disable)])
+
+ project_dir = _NewTempSubdir('SRC_ROOT')
+ if android_sdk_version:
+ # Create dummy project.properies file in a temporary "project" directory.
+ # It is the only way to add Android SDK to the Lint's classpath. Proper
+ # classpath is necessary for most source-level checks.
+ with open(os.path.join(project_dir, 'project.properties'), 'w') \
+ as propfile:
+ print >> propfile, 'target=android-{}'.format(android_sdk_version)
+
+ # Put the manifest in a temporary directory in order to avoid lint detecting
+ # sibling res/ and src/ directories (which should be pass explicitly if they
+ # are to be included).
+ if not manifest_path:
+ manifest_path = os.path.join(
+ build_utils.DIR_SOURCE_ROOT, 'build', 'android',
+ 'AndroidManifest.xml')
+ os.symlink(os.path.abspath(manifest_path),
+ os.path.join(project_dir, 'AndroidManifest.xml'))
+ cmd.append(project_dir)
+
+ if os.path.exists(result_path):
+ os.remove(result_path)
+
+ env = os.environ.copy()
+ stderr_filter = None
+ if cache_dir:
+ env['_JAVA_OPTIONS'] = '-Duser.home=%s' % _RebasePath(cache_dir)
+ # When _JAVA_OPTIONS is set, java prints to stderr:
+ # Picked up _JAVA_OPTIONS: ...
+ #
+ # We drop all lines that contain _JAVA_OPTIONS from the output
+ stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l)
+
+ def fail_func(returncode, stderr):
+ if returncode != 0:
+ return True
+ if (include_unexpected and
+ 'Unexpected failure during lint analysis' in stderr):
+ return True
+ return False
+
+ try:
+ build_utils.CheckOutput(cmd, cwd=build_utils.DIR_SOURCE_ROOT,
+ env=env or None, stderr_filter=stderr_filter,
+ fail_func=fail_func)
+ except build_utils.CalledProcessError:
+ # There is a problem with lint usage
+ if not os.path.exists(result_path):
+ raise
+
+ # Sometimes produces empty (almost) files:
+ if os.path.getsize(result_path) < 10:
+ if can_fail_build:
+ raise
+ elif not silent:
+ traceback.print_exc()
+ return
+
+ # There are actual lint issues
+ try:
+ num_issues = _ParseAndShowResultFile()
+ except Exception: # pylint: disable=broad-except
+ if not silent:
+ print 'Lint created unparseable xml file...'
+ print 'File contents:'
+ with open(result_path) as f:
+ print f.read()
+ if can_fail_build:
+ traceback.print_exc()
+ if can_fail_build:
+ raise
+ else:
+ return
+
+ _ProcessResultFile()
+ if num_issues == 0 and include_unexpected:
+ msg = 'Please refer to output above for unexpected lint failures.\n'
+ else:
+ msg = ('\nLint found %d new issues.\n'
+ ' - For full explanation, please refer to %s\n'
+ ' - For more information about lint and how to fix lint issues,'
+ ' please refer to %s\n' %
+ (num_issues, _RebasePath(result_path), _LINT_MD_URL))
+ if not silent:
+ print >> sys.stderr, msg
+ if can_fail_build:
+ raise Exception('Lint failed.')
+
+
+def _FindInDirectories(directories, filename_filter):
+ all_files = []
+ for directory in directories:
+ all_files.extend(build_utils.FindInDirectory(directory, filename_filter))
+ return all_files
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_argument('--lint-path', required=True,
+ help='Path to lint executable.')
+ parser.add_argument('--product-dir', required=True,
+ help='Path to product dir.')
+ parser.add_argument('--result-path', required=True,
+ help='Path to XML lint result file.')
+ parser.add_argument('--cache-dir', required=True,
+ help='Path to the directory in which the android cache '
+ 'directory tree should be stored.')
+ parser.add_argument('--platform-xml-path', required=True,
+ help='Path to api-platforms.xml')
+ parser.add_argument('--android-sdk-version',
+ help='Version (API level) of the Android SDK used for '
+ 'building.')
+ parser.add_argument('--create-cache', action='store_true',
+ help='Mark the lint cache file as an output rather than '
+ 'an input.')
+ parser.add_argument('--can-fail-build', action='store_true',
+ help='If set, script will exit with nonzero exit status'
+ ' if lint errors are present')
+ parser.add_argument('--include-unexpected-failures', action='store_true',
+ help='If set, script will exit with nonzero exit status'
+ ' if lint itself crashes with unexpected failures.')
+ parser.add_argument('--config-path',
+ help='Path to lint suppressions file.')
+ parser.add_argument('--disable',
+ help='List of checks to disable.')
+ parser.add_argument('--jar-path',
+ help='Jar file containing class files.')
+ parser.add_argument('--java-sources-file',
+ help='File containing a list of java files.')
+ parser.add_argument('--manifest-path',
+ help='Path to AndroidManifest.xml')
+ parser.add_argument('--classpath', default=[], action='append',
+ help='GYP-list of classpath .jar files')
+ parser.add_argument('--processed-config-path',
+ help='Path to processed lint suppressions file.')
+ parser.add_argument('--resource-dir',
+ help='Path to resource dir.')
+ parser.add_argument('--resource-sources', default=[], action='append',
+ help='GYP-list of resource sources (directories with '
+ 'resources or archives created by resource-generating '
+ 'tasks.')
+ parser.add_argument('--silent', action='store_true',
+ help='If set, script will not log anything.')
+ parser.add_argument('--src-dirs',
+ help='Directories containing java files.')
+ parser.add_argument('--srcjars',
+ help='GN list of included srcjars.')
+
+ args = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:]))
+
+ sources = []
+ if args.src_dirs:
+ src_dirs = build_utils.ParseGnList(args.src_dirs)
+ sources = _FindInDirectories(src_dirs, '*.java')
+ elif args.java_sources_file:
+ sources.extend(build_utils.ReadSourcesList(args.java_sources_file))
+
+ if args.config_path and not args.processed_config_path:
+ parser.error('--config-path specified without --processed-config-path')
+ elif args.processed_config_path and not args.config_path:
+ parser.error('--processed-config-path specified without --config-path')
+
+ input_paths = [
+ args.lint_path,
+ args.platform_xml_path,
+ ]
+ if args.config_path:
+ input_paths.append(args.config_path)
+ if args.jar_path:
+ input_paths.append(args.jar_path)
+ if args.manifest_path:
+ input_paths.append(args.manifest_path)
+ if sources:
+ input_paths.extend(sources)
+ classpath = []
+ for gyp_list in args.classpath:
+ classpath.extend(build_utils.ParseGnList(gyp_list))
+ input_paths.extend(classpath)
+
+ resource_sources = []
+ if args.resource_dir:
+ # Backward compatibility with GYP
+ resource_sources += [ args.resource_dir ]
+
+ for gyp_list in args.resource_sources:
+ resource_sources += build_utils.ParseGnList(gyp_list)
+
+ for resource_source in resource_sources:
+ if os.path.isdir(resource_source):
+ input_paths.extend(build_utils.FindInDirectory(resource_source, '*'))
+ else:
+ input_paths.append(resource_source)
+
+ input_strings = [
+ args.can_fail_build,
+ args.include_unexpected_failures,
+ args.silent,
+ ]
+ if args.android_sdk_version:
+ input_strings.append(args.android_sdk_version)
+ if args.processed_config_path:
+ input_strings.append(args.processed_config_path)
+
+ disable = []
+ if args.disable:
+ disable = build_utils.ParseGnList(args.disable)
+ input_strings.extend(disable)
+
+ output_paths = [args.result_path, args.processed_config_path]
+
+ build_utils.CallAndWriteDepfileIfStale(
+ lambda: _OnStaleMd5(args.lint_path,
+ args.config_path,
+ args.processed_config_path,
+ args.manifest_path, args.result_path,
+ args.product_dir, sources,
+ args.jar_path,
+ args.cache_dir,
+ args.android_sdk_version,
+ args.srcjars,
+ resource_sources,
+ disable=disable,
+ classpath=classpath,
+ can_fail_build=args.can_fail_build,
+ include_unexpected=args.include_unexpected_failures,
+ silent=args.silent),
+ args,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ depfile_deps=classpath,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/lint.pydeps b/deps/v8/build/android/gyp/lint.pydeps
new file mode 100644
index 0000000000..a8616e4d37
--- /dev/null
+++ b/deps/v8/build/android/gyp/lint.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/lint.pydeps build/android/gyp/lint.py
+../../gn_helpers.py
+lint.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/main_dex_list.py b/deps/v8/build/android/gyp/main_dex_list.py
new file mode 100755
index 0000000000..2435859099
--- /dev/null
+++ b/deps/v8/build/android/gyp/main_dex_list.py
@@ -0,0 +1,174 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import json
+import os
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+from util import proguard_util
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--shrinked-android-path', required=True,
+ help='Path to shrinkedAndroid.jar')
+ parser.add_argument('--dx-path', required=True,
+ help='Path to dx.jar')
+ parser.add_argument('--main-dex-rules-path', action='append', default=[],
+ dest='main_dex_rules_paths',
+ help='A file containing a list of proguard rules to use '
+ 'in determining the class to include in the '
+ 'main dex.')
+ parser.add_argument('--main-dex-list-path', required=True,
+ help='The main dex list file to generate.')
+ parser.add_argument('--inputs',
+ help='JARs for which a main dex list should be '
+ 'generated.')
+ parser.add_argument('--proguard-path', required=True,
+ help='Path to the proguard executable.')
+ parser.add_argument('--negative-main-dex-globs',
+ help='GN-list of globs of .class names (e.g. org/chromium/foo/Bar.class) '
+ 'that will fail the build if they match files in the main dex.')
+
+ parser.add_argument('paths', nargs='*', default=[],
+ help='JARs for which a main dex list should be '
+ 'generated.')
+
+ args = parser.parse_args(build_utils.ExpandFileArgs(args))
+
+ depfile_deps = []
+ if args.inputs:
+ args.inputs = build_utils.ParseGnList(args.inputs)
+ depfile_deps = args.inputs
+ args.paths.extend(args.inputs)
+
+ if args.negative_main_dex_globs:
+ args.negative_main_dex_globs = build_utils.ParseGnList(
+ args.negative_main_dex_globs)
+
+ proguard_cmd = [
+ 'java', '-jar', args.proguard_path,
+ '-forceprocessing',
+ '-dontwarn', '-dontoptimize', '-dontobfuscate', '-dontpreverify',
+ '-libraryjars', args.shrinked_android_path,
+ ]
+ for m in args.main_dex_rules_paths:
+ proguard_cmd.extend(['-include', m])
+
+ main_dex_list_cmd = [
+ 'java', '-cp', args.dx_path,
+ 'com.android.multidex.MainDexListBuilder',
+ # This workaround significantly increases main dex size and doesn't seem to
+ # be needed by Chrome. See comment in the source:
+ # https://android.googlesource.com/platform/dalvik/+/master/dx/src/com/android/multidex/MainDexListBuilder.java
+ '--disable-annotation-resolution-workaround',
+ ]
+
+ input_paths = list(args.paths)
+ input_paths += [
+ args.shrinked_android_path,
+ args.dx_path,
+ ]
+ input_paths += args.main_dex_rules_paths
+
+ input_strings = [
+ proguard_cmd,
+ main_dex_list_cmd,
+ ]
+ if args.negative_main_dex_globs:
+ input_strings += args.negative_main_dex_globs
+
+ output_paths = [
+ args.main_dex_list_path,
+ ]
+
+ build_utils.CallAndWriteDepfileIfStale(
+ lambda: _OnStaleMd5(proguard_cmd, main_dex_list_cmd, args.paths,
+ args.main_dex_list_path,
+ args.negative_main_dex_globs),
+ args,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ depfile_deps=depfile_deps,
+ add_pydeps=False)
+
+ return 0
+
+
+def _CheckForUnwanted(kept_classes, proguard_cmd, negative_main_dex_globs):
+ # Check if ProGuard kept any unwanted classes.
+ found_unwanted_classes = sorted(
+ p for p in kept_classes
+ if build_utils.MatchesGlob(p, negative_main_dex_globs))
+
+ if found_unwanted_classes:
+ first_class = found_unwanted_classes[0].replace(
+ '.class', '').replace('/', '.')
+ proguard_cmd += ['-whyareyoukeeping', 'class', first_class, '{}']
+ output = build_utils.CheckOutput(
+ proguard_cmd, print_stderr=False,
+ stdout_filter=proguard_util.ProguardOutputFilter())
+ raise Exception(
+ ('Found classes that should not be in the main dex:\n {}\n\n'
+ 'Here is the -whyareyoukeeping output for {}: \n{}').format(
+ '\n '.join(found_unwanted_classes), first_class, output))
+
+
+def _OnStaleMd5(proguard_cmd, main_dex_list_cmd, paths, main_dex_list_path,
+ negative_main_dex_globs):
+ paths_arg = ':'.join(paths)
+ main_dex_list = ''
+ try:
+ with tempfile.NamedTemporaryFile(suffix='.jar') as temp_jar:
+ # Step 1: Use ProGuard to find all @MainDex code, and all code reachable
+ # from @MainDex code (recursive).
+ proguard_cmd += [
+ '-injars', paths_arg,
+ '-outjars', temp_jar.name
+ ]
+ build_utils.CheckOutput(proguard_cmd, print_stderr=False)
+
+ # Record the classes kept by ProGuard. Not used by the build, but useful
+ # for debugging what classes are kept by ProGuard vs. MainDexListBuilder.
+ with zipfile.ZipFile(temp_jar.name) as z:
+ kept_classes = [p for p in z.namelist() if p.endswith('.class')]
+ with open(main_dex_list_path + '.partial', 'w') as f:
+ f.write('\n'.join(kept_classes) + '\n')
+
+ if negative_main_dex_globs:
+ # Perform assertions before MainDexListBuilder because:
+ # a) MainDexListBuilder is not recursive, so being included by it isn't
+ # a huge deal.
+ # b) Errors are much more actionable.
+ _CheckForUnwanted(kept_classes, proguard_cmd, negative_main_dex_globs)
+
+ # Step 2: Expand inclusion list to all classes referenced by the .class
+ # files of kept classes (non-recursive).
+ main_dex_list_cmd += [
+ temp_jar.name, paths_arg
+ ]
+ main_dex_list = build_utils.CheckOutput(main_dex_list_cmd)
+
+ except build_utils.CalledProcessError as e:
+ if 'output jar is empty' in e.output:
+ pass
+ elif "input doesn't contain any classes" in e.output:
+ pass
+ else:
+ raise
+
+ with open(main_dex_list_path, 'w') as main_dex_list_file:
+ main_dex_list_file.write(main_dex_list)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/main_dex_list.pydeps b/deps/v8/build/android/gyp/main_dex_list.pydeps
new file mode 100644
index 0000000000..8c482dfa52
--- /dev/null
+++ b/deps/v8/build/android/gyp/main_dex_list.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/main_dex_list.pydeps build/android/gyp/main_dex_list.py
+../../gn_helpers.py
+main_dex_list.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/proguard_util.py
diff --git a/deps/v8/build/android/gyp/merge_manifest.py b/deps/v8/build/android/gyp/merge_manifest.py
new file mode 100755
index 0000000000..0637d43492
--- /dev/null
+++ b/deps/v8/build/android/gyp/merge_manifest.py
@@ -0,0 +1,174 @@
+#!/usr/bin/env python
+
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Merges dependency Android manifests into a root manifest."""
+
+import argparse
+import contextlib
+import os
+import shlex
+import sys
+import tempfile
+import xml.dom.minidom as minidom
+import xml.etree.ElementTree as ElementTree
+
+from util import build_utils
+from util import diff_utils
+
+# Tools library directory - relative to Android SDK root
+_SDK_TOOLS_LIB_DIR = os.path.join('tools', 'lib')
+
+_MANIFEST_MERGER_MAIN_CLASS = 'com.android.manifmerger.Merger'
+_MANIFEST_MERGER_JARS = [
+ 'common{suffix}.jar',
+ 'manifest-merger{suffix}.jar',
+ 'sdk-common{suffix}.jar',
+ 'sdklib{suffix}.jar',
+]
+
+_TOOLS_NAMESPACE_PREFIX = 'tools'
+_TOOLS_NAMESPACE = 'http://schemas.android.com/tools'
+_ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android'
+
+# Without registering namespaces ElementTree converts them to "ns0" and "ns1"
+ElementTree.register_namespace('tools', _TOOLS_NAMESPACE)
+ElementTree.register_namespace('android', _ANDROID_NAMESPACE)
+
+
+@contextlib.contextmanager
+def _ProcessManifest(manifest_path):
+ """Patches an Android manifest to always include the 'tools' namespace
+ declaration, as it is not propagated by the manifest merger from the SDK.
+
+ See https://issuetracker.google.com/issues/63411481
+ """
+ doc = minidom.parse(manifest_path)
+ manifests = doc.getElementsByTagName('manifest')
+ assert len(manifests) == 1
+ manifest = manifests[0]
+ package = manifest.getAttribute('package')
+
+ manifest.setAttribute('xmlns:%s' % _TOOLS_NAMESPACE_PREFIX, _TOOLS_NAMESPACE)
+
+ tmp_prefix = os.path.basename(manifest_path)
+ with tempfile.NamedTemporaryFile(prefix=tmp_prefix) as patched_manifest:
+ doc.writexml(patched_manifest)
+ patched_manifest.flush()
+ yield patched_manifest.name, package
+
+
+def _BuildManifestMergerClasspath(build_vars):
+ return ':'.join([
+ os.path.join(
+ build_vars['android_sdk_root'], _SDK_TOOLS_LIB_DIR,
+ jar.format(suffix=build_vars['android_sdk_tools_version_suffix']))
+ for jar in _MANIFEST_MERGER_JARS
+ ])
+
+
+def _SortAndStripElementTree(tree, reverse_toplevel=False):
+ for node in tree:
+ if node.text and node.text.isspace():
+ node.text = None
+ _SortAndStripElementTree(node)
+ tree[:] = sorted(tree, key=ElementTree.tostring, reverse=reverse_toplevel)
+
+
+def _NormalizeManifest(path):
+ with open(path) as f:
+ # This also strips comments and sorts node attributes alphabetically.
+ root = ElementTree.fromstring(f.read())
+
+ # Sort nodes alphabetically, recursively.
+ _SortAndStripElementTree(root, reverse_toplevel=True)
+
+ # Fix up whitespace/indentation.
+ dom = minidom.parseString(ElementTree.tostring(root))
+ lines = []
+ for l in dom.toprettyxml(indent=' ').splitlines():
+ if l.strip():
+ if len(l) > 100:
+ indent = ' ' * l.find('<')
+ attributes = shlex.split(l, posix=False)
+ lines.append('{}{}'.format(indent, attributes[0]))
+ for attribute in attributes[1:]:
+ lines.append('{} {}'.format(indent, attribute))
+ else:
+ lines.append(l)
+
+ return '\n'.join(lines)
+
+
+def main(argv):
+ argv = build_utils.ExpandFileArgs(argv)
+ parser = argparse.ArgumentParser(description=__doc__)
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--build-vars',
+ help='Path to GN build vars file',
+ required=True)
+ parser.add_argument('--root-manifest',
+ help='Root manifest which to merge into',
+ required=True)
+ parser.add_argument(
+ '--expected-manifest', help='Expected contents for the merged manifest.')
+ parser.add_argument('--normalized-output', help='Normalized merged manifest.')
+ parser.add_argument(
+ '--verify-expected-manifest',
+ action='store_true',
+ help='Fail if expected contents do not match merged manifest contents.')
+ parser.add_argument('--output', help='Output manifest path', required=True)
+ parser.add_argument('--extras',
+ help='GN list of additional manifest to merge')
+ args = parser.parse_args(argv)
+
+ classpath = _BuildManifestMergerClasspath(
+ build_utils.ReadBuildVars(args.build_vars))
+
+ with build_utils.AtomicOutput(args.output) as output:
+ cmd = [
+ 'java',
+ '-cp',
+ classpath,
+ _MANIFEST_MERGER_MAIN_CLASS,
+ '--out',
+ output.name,
+ ]
+
+ extras = build_utils.ParseGnList(args.extras)
+ if extras:
+ cmd += ['--libs', ':'.join(extras)]
+
+ with _ProcessManifest(args.root_manifest) as tup:
+ root_manifest, package = tup
+ cmd += ['--main', root_manifest, '--property', 'PACKAGE=' + package]
+ build_utils.CheckOutput(cmd,
+ # https://issuetracker.google.com/issues/63514300:
+ # The merger doesn't set a nonzero exit code for failures.
+ fail_func=lambda returncode, stderr: returncode != 0 or
+ build_utils.IsTimeStale(output.name, [root_manifest] + extras))
+
+ if args.expected_manifest:
+ with build_utils.AtomicOutput(args.normalized_output) as normalized_output:
+ normalized_output.write(_NormalizeManifest(args.output))
+ msg = diff_utils.DiffFileContents(args.expected_manifest,
+ args.normalized_output)
+ if msg:
+ sys.stderr.write("""\
+AndroidManifest.xml expectations file needs updating. For details see:
+https://chromium.googlesource.com/chromium/src/+/HEAD/chrome/android/java/README.md
+""")
+ sys.stderr.write(msg)
+ if args.verify_expected_manifest:
+ sys.exit(1)
+
+ if args.depfile:
+ inputs = extras + classpath.split(':')
+ build_utils.WriteDepfile(args.depfile, args.output, inputs=inputs,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/merge_manifest.pydeps b/deps/v8/build/android/gyp/merge_manifest.pydeps
new file mode 100644
index 0000000000..797cd5fbd6
--- /dev/null
+++ b/deps/v8/build/android/gyp/merge_manifest.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/merge_manifest.pydeps build/android/gyp/merge_manifest.py
+../../gn_helpers.py
+merge_manifest.py
+util/__init__.py
+util/build_utils.py
+util/diff_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/prepare_resources.py b/deps/v8/build/android/gyp/prepare_resources.py
new file mode 100755
index 0000000000..a463f29645
--- /dev/null
+++ b/deps/v8/build/android/gyp/prepare_resources.py
@@ -0,0 +1,324 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Process Android resource directories to generate .resources.zip, R.txt and
+.srcjar files."""
+
+import argparse
+import collections
+import os
+import re
+import shutil
+import sys
+
+import generate_v14_compatible_resources
+
+from util import build_utils
+from util import resource_utils
+
+_AAPT_IGNORE_PATTERN = ':'.join([
+ 'OWNERS', # Allow OWNERS files within res/
+ '*.py', # PRESUBMIT.py sometimes exist.
+ '*.pyc',
+ '*~', # Some editors create these as temp files.
+ '.*', # Never makes sense to include dot(files/dirs).
+ '*.d.stamp', # Ignore stamp files
+ ])
+
+def _ParseArgs(args):
+ """Parses command line options.
+
+ Returns:
+ An options object as from argparse.ArgumentParser.parse_args()
+ """
+ parser, input_opts, output_opts = resource_utils.ResourceArgsParser()
+
+ input_opts.add_argument(
+ '--aapt-path', required=True, help='Path to the Android aapt tool')
+
+ input_opts.add_argument('--resource-dirs',
+ default='[]',
+ help='A list of input directories containing resources '
+ 'for this target.')
+
+ input_opts.add_argument(
+ '--shared-resources',
+ action='store_true',
+ help='Make resources shareable by generating an onResourcesLoaded() '
+ 'method in the R.java source file.')
+
+ input_opts.add_argument('--custom-package',
+ help='Optional Java package for main R.java.')
+
+ input_opts.add_argument(
+ '--android-manifest',
+ help='Optional AndroidManifest.xml path. Only used to extract a package '
+ 'name for R.java if a --custom-package is not provided.')
+
+ output_opts.add_argument(
+ '--resource-zip-out',
+ help='Path to a zip archive containing all resources from '
+ '--resource-dirs, merged into a single directory tree. This will '
+ 'also include auto-generated v14-compatible resources unless '
+ '--v14-skip is used.')
+
+ output_opts.add_argument('--srcjar-out',
+ help='Path to .srcjar to contain the generated R.java.')
+
+ output_opts.add_argument('--r-text-out',
+ help='Path to store the generated R.txt file.')
+
+ input_opts.add_argument(
+ '--v14-skip',
+ action="store_true",
+ help='Do not generate nor verify v14 resources.')
+
+ input_opts.add_argument(
+ '--strip-drawables',
+ action="store_true",
+ help='Remove drawables from the resources.')
+
+ options = parser.parse_args(args)
+
+ resource_utils.HandleCommonOptions(options)
+
+ options.resource_dirs = build_utils.ParseGnList(options.resource_dirs)
+
+ return options
+
+
+def _GenerateGlobs(pattern):
+ # This function processes the aapt ignore assets pattern into a list of globs
+ # to be used to exclude files on the python side. It removes the '!', which is
+ # used by aapt to mean 'not chatty' so it does not output if the file is
+ # ignored (we dont output anyways, so it is not required). This function does
+ # not handle the <dir> and <file> prefixes used by aapt and are assumed not to
+ # be included in the pattern string.
+ return pattern.replace('!', '').split(':')
+
+
+def _ZipResources(resource_dirs, zip_path, ignore_pattern):
+ # Python zipfile does not provide a way to replace a file (it just writes
+ # another file with the same name). So, first collect all the files to put
+ # in the zip (with proper overriding), and then zip them.
+ # ignore_pattern is a string of ':' delimited list of globs used to ignore
+ # files that should not be part of the final resource zip.
+ files_to_zip = dict()
+ files_to_zip_without_generated = dict()
+ globs = _GenerateGlobs(ignore_pattern)
+ for d in resource_dirs:
+ for root, _, files in os.walk(d):
+ for f in files:
+ archive_path = f
+ parent_dir = os.path.relpath(root, d)
+ if parent_dir != '.':
+ archive_path = os.path.join(parent_dir, f)
+ path = os.path.join(root, f)
+ if build_utils.MatchesGlob(archive_path, globs):
+ continue
+ # We want the original resource dirs in the .info file rather than the
+ # generated overridden path.
+ if not path.startswith('/tmp'):
+ files_to_zip_without_generated[archive_path] = path
+ files_to_zip[archive_path] = path
+ resource_utils.CreateResourceInfoFile(files_to_zip_without_generated,
+ zip_path)
+ build_utils.DoZip(files_to_zip.iteritems(), zip_path)
+
+
+def _GenerateRTxt(options, dep_subdirs, gen_dir):
+ """Generate R.txt file.
+
+ Args:
+ options: The command-line options tuple.
+ dep_subdirs: List of directories containing extracted dependency resources.
+ gen_dir: Locates where the aapt-generated files will go. In particular
+ the output file is always generated as |{gen_dir}/R.txt|.
+ """
+ # NOTE: This uses aapt rather than aapt2 because 'aapt2 compile' does not
+ # support the --output-text-symbols option yet (https://crbug.com/820460).
+ package_command = [options.aapt_path,
+ 'package',
+ '-m',
+ '-M', resource_utils.EMPTY_ANDROID_MANIFEST_PATH,
+ '--no-crunch',
+ '--auto-add-overlay',
+ '--no-version-vectors',
+ ]
+ for j in options.include_resources:
+ package_command += ['-I', j]
+
+ ignore_pattern = _AAPT_IGNORE_PATTERN
+ if options.strip_drawables:
+ ignore_pattern += ':*drawable*'
+ package_command += [
+ '--output-text-symbols',
+ gen_dir,
+ '-J',
+ gen_dir, # Required for R.txt generation.
+ '--ignore-assets',
+ ignore_pattern
+ ]
+
+ # Adding all dependencies as sources is necessary for @type/foo references
+ # to symbols within dependencies to resolve. However, it has the side-effect
+ # that all Java symbols from dependencies are copied into the new R.java.
+ # E.g.: It enables an arguably incorrect usage of
+ # "mypackage.R.id.lib_symbol" where "libpackage.R.id.lib_symbol" would be
+ # more correct. This is just how Android works.
+ for d in dep_subdirs:
+ package_command += ['-S', d]
+
+ for d in options.resource_dirs:
+ package_command += ['-S', d]
+
+ # Only creates an R.txt
+ build_utils.CheckOutput(
+ package_command, print_stdout=False, print_stderr=False)
+
+
+def _GenerateResourcesZip(output_resource_zip, input_resource_dirs, v14_skip,
+ strip_drawables, temp_dir):
+ """Generate a .resources.zip file fron a list of input resource dirs.
+
+ Args:
+ output_resource_zip: Path to the output .resources.zip file.
+ input_resource_dirs: A list of input resource directories.
+ v14_skip: If False, then v14-compatible resource will also be
+ generated in |{temp_dir}/v14| and added to the final zip.
+ temp_dir: Path to temporary directory.
+ """
+ if not v14_skip:
+ # Generate v14-compatible resources in temp_dir.
+ v14_dir = os.path.join(temp_dir, 'v14')
+ build_utils.MakeDirectory(v14_dir)
+
+ for resource_dir in input_resource_dirs:
+ generate_v14_compatible_resources.GenerateV14Resources(
+ resource_dir,
+ v14_dir)
+
+ input_resource_dirs.append(v14_dir)
+
+ ignore_pattern = _AAPT_IGNORE_PATTERN
+ if strip_drawables:
+ ignore_pattern += ':*drawable*'
+ _ZipResources(input_resource_dirs, output_resource_zip, ignore_pattern)
+
+
+def _OnStaleMd5(options):
+ with resource_utils.BuildContext() as build:
+ if options.r_text_in:
+ r_txt_path = options.r_text_in
+ else:
+ # Extract dependencies to resolve @foo/type references into
+ # dependent packages.
+ dep_subdirs = resource_utils.ExtractDeps(options.dependencies_res_zips,
+ build.deps_dir)
+
+ _GenerateRTxt(options, dep_subdirs, build.gen_dir)
+ r_txt_path = build.r_txt_path
+
+ # 'aapt' doesn't generate any R.txt file if res/ was empty.
+ if not os.path.exists(r_txt_path):
+ build_utils.Touch(r_txt_path)
+
+ if options.r_text_out:
+ shutil.copyfile(r_txt_path, options.r_text_out)
+
+ if options.srcjar_out:
+ package = options.custom_package
+ if not package and options.android_manifest:
+ package = resource_utils.ExtractPackageFromManifest(
+ options.android_manifest)
+
+ # Don't create a .java file for the current resource target when no
+ # package name was provided (either by manifest or build rules).
+ if package:
+ # All resource IDs should be non-final here, but the
+ # onResourcesLoaded() method should only be generated if
+ # --shared-resources is used.
+ rjava_build_options = resource_utils.RJavaBuildOptions()
+ rjava_build_options.ExportAllResources()
+ rjava_build_options.ExportAllStyleables()
+ if options.shared_resources:
+ rjava_build_options.GenerateOnResourcesLoaded()
+
+ resource_utils.CreateRJavaFiles(
+ build.srcjar_dir, package, r_txt_path,
+ options.extra_res_packages,
+ options.extra_r_text_files,
+ rjava_build_options)
+
+ build_utils.ZipDir(options.srcjar_out, build.srcjar_dir)
+
+ if options.resource_zip_out:
+ _GenerateResourcesZip(options.resource_zip_out, options.resource_dirs,
+ options.v14_skip, options.strip_drawables,
+ build.temp_dir)
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseArgs(args)
+
+ # Order of these must match order specified in GN so that the correct one
+ # appears first in the depfile.
+ possible_output_paths = [
+ options.resource_zip_out,
+ options.r_text_out,
+ options.srcjar_out,
+ ]
+ output_paths = [x for x in possible_output_paths if x]
+
+ # List python deps in input_strings rather than input_paths since the contents
+ # of them does not change what gets written to the depsfile.
+ input_strings = options.extra_res_packages + [
+ options.custom_package,
+ options.shared_resources,
+ options.v14_skip,
+ options.strip_drawables,
+ ]
+
+ possible_input_paths = [
+ options.aapt_path,
+ options.android_manifest,
+ ]
+ possible_input_paths += options.include_resources
+ input_paths = [x for x in possible_input_paths if x]
+ input_paths.extend(options.dependencies_res_zips)
+ input_paths.extend(options.extra_r_text_files)
+
+ # Resource files aren't explicitly listed in GN. Listing them in the depfile
+ # ensures the target will be marked stale when resource files are removed.
+ depfile_deps = []
+ resource_names = []
+ for resource_dir in options.resource_dirs:
+ for resource_file in build_utils.FindInDirectory(resource_dir, '*'):
+ # Don't list the empty .keep file in depfile. Since it doesn't end up
+ # included in the .zip, it can lead to -w 'dupbuild=err' ninja errors
+ # if ever moved.
+ if not resource_file.endswith(os.path.join('empty', '.keep')):
+ input_paths.append(resource_file)
+ depfile_deps.append(resource_file)
+ resource_names.append(os.path.relpath(resource_file, resource_dir))
+
+ # Resource filenames matter to the output, so add them to strings as well.
+ # This matters if a file is renamed but not changed (http://crbug.com/597126).
+ input_strings.extend(sorted(resource_names))
+
+ build_utils.CallAndWriteDepfileIfStale(
+ lambda: _OnStaleMd5(options),
+ options,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ depfile_deps=depfile_deps,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/prepare_resources.pydeps b/deps/v8/build/android/gyp/prepare_resources.pydeps
new file mode 100644
index 0000000000..0e9ccfbe5e
--- /dev/null
+++ b/deps/v8/build/android/gyp/prepare_resources.pydeps
@@ -0,0 +1,30 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/prepare_resources.pydeps build/android/gyp/prepare_resources.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+generate_v14_compatible_resources.py
+prepare_resources.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/resource_utils.py
diff --git a/deps/v8/build/android/gyp/proguard.py b/deps/v8/build/android/gyp/proguard.py
new file mode 100755
index 0000000000..bb86b2dca6
--- /dev/null
+++ b/deps/v8/build/android/gyp/proguard.py
@@ -0,0 +1,290 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import cStringIO
+import optparse
+import os
+import shutil
+import sys
+import tempfile
+
+from util import build_utils
+from util import diff_utils
+from util import proguard_util
+
+_GENERATED_PROGUARD_HEADER = """
+################################################################################
+# Dynamically generated from build/android/gyp/proguard.py
+################################################################################
+"""
+
+# Example:
+# android.arch.core.internal.SafeIterableMap$Entry -> b:
+# 1:1:java.lang.Object getKey():353:353 -> getKey
+# 2:2:java.lang.Object getValue():359:359 -> getValue
+def _RemoveMethodMappings(orig_path, out_fd):
+ with open(orig_path) as in_fd:
+ for line in in_fd:
+ if line[:1] != ' ':
+ out_fd.write(line)
+ out_fd.flush()
+
+
+def _ParseOptions(args):
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_option('--proguard-path',
+ help='Path to the proguard.jar to use.')
+ parser.add_option('--r8-path',
+ help='Path to the R8.jar to use.')
+ parser.add_option('--input-paths',
+ help='Paths to the .jar files proguard should run on.')
+ parser.add_option('--output-path', help='Path to the generated .jar file.')
+ parser.add_option('--proguard-configs', action='append',
+ help='Paths to proguard configuration files.')
+ parser.add_option('--proguard-config-exclusions',
+ default='',
+ help='GN list of paths to proguard configuration files '
+ 'included by --proguard-configs, but that should '
+ 'not actually be included.')
+ parser.add_option(
+ '--apply-mapping', help='Path to proguard mapping to apply.')
+ parser.add_option('--mapping-output',
+ help='Path for proguard to output mapping file to.')
+ parser.add_option(
+ '--output-config',
+ help='Path to write the merged proguard config file to.')
+ parser.add_option(
+ '--expected-configs-file',
+ help='Path to a file containing the expected merged proguard configs')
+ parser.add_option(
+ '--verify-expected-configs',
+ action='store_true',
+ help='Fail if the expected merged proguard configs differ from the '
+ 'generated merged proguard configs.')
+ parser.add_option('--classpath', action='append',
+ help='Classpath for proguard.')
+ parser.add_option('--main-dex-rules-path', action='append',
+ help='Paths to main dex rules for multidex'
+ '- only works with R8.')
+ parser.add_option('--min-api', default='',
+ help='Minimum Android API level compatibility.')
+ parser.add_option('--verbose', '-v', action='store_true',
+ help='Print all proguard output')
+ parser.add_option(
+ '--repackage-classes',
+ help='Unique package name given to an asynchronously proguarded module')
+
+ options, _ = parser.parse_args(args)
+
+ assert not options.main_dex_rules_path or options.r8_path, \
+ 'R8 must be enabled to pass main dex rules.'
+
+ classpath = []
+ for arg in options.classpath:
+ classpath += build_utils.ParseGnList(arg)
+ options.classpath = classpath
+
+ configs = []
+ for arg in options.proguard_configs:
+ configs += build_utils.ParseGnList(arg)
+ options.proguard_configs = configs
+ options.proguard_config_exclusions = (
+ build_utils.ParseGnList(options.proguard_config_exclusions))
+
+ options.input_paths = build_utils.ParseGnList(options.input_paths)
+
+ if not options.mapping_output:
+ options.mapping_output = options.output_path + '.mapping'
+
+ if options.apply_mapping:
+ options.apply_mapping = os.path.abspath(options.apply_mapping)
+
+
+ return options
+
+
+def _VerifyExpectedConfigs(expected_path, actual_path, fail_on_exit):
+ msg = diff_utils.DiffFileContents(expected_path, actual_path)
+ if not msg:
+ return
+
+ sys.stderr.write("""\
+Proguard flag expectations file needs updating. For details see:
+https://chromium.googlesource.com/chromium/src/+/HEAD/chrome/android/java/README.md
+""")
+ sys.stderr.write(msg)
+ if fail_on_exit:
+ sys.exit(1)
+
+
+def _MoveTempDexFile(tmp_dex_dir, dex_path):
+ """Move the temp dex file out of |tmp_dex_dir|.
+
+ Args:
+ tmp_dex_dir: Path to temporary directory created with tempfile.mkdtemp().
+ The directory should have just a single file.
+ dex_path: Target path to move dex file to.
+
+ Raises:
+ Exception if there are multiple files in |tmp_dex_dir|.
+ """
+ tempfiles = os.listdir(tmp_dex_dir)
+ if len(tempfiles) > 1:
+ raise Exception('%d files created, expected 1' % len(tempfiles))
+
+ tmp_dex_path = os.path.join(tmp_dex_dir, tempfiles[0])
+ shutil.move(tmp_dex_path, dex_path)
+
+
+def _CreateR8Command(options, map_output_path, output_dir, tmp_config_path,
+ libraries):
+ cmd = [
+ 'java', '-jar', options.r8_path,
+ '--no-desugaring',
+ '--no-data-resources',
+ '--output', output_dir,
+ '--pg-map-output', map_output_path,
+ ]
+
+ for lib in libraries:
+ cmd += ['--lib', lib]
+
+ for config_file in options.proguard_configs:
+ cmd += ['--pg-conf', config_file]
+
+ temp_config_string = ''
+ if options.apply_mapping or options.repackage_classes or options.min_api:
+ with open(tmp_config_path, 'w') as f:
+ if options.apply_mapping:
+ temp_config_string += '-applymapping \'%s\'\n' % (options.apply_mapping)
+ if options.repackage_classes:
+ temp_config_string += '-repackageclasses \'%s\'\n' % (
+ options.repackage_classes)
+ if options.min_api:
+ temp_config_string += (
+ '-assumevalues class android.os.Build$VERSION {\n' +
+ ' public static final int SDK_INT return ' + options.min_api +
+ '..9999;\n}\n')
+ f.write(temp_config_string)
+ cmd += ['--pg-conf', tmp_config_path]
+
+ if options.main_dex_rules_path:
+ for main_dex_rule in options.main_dex_rules_path:
+ cmd += ['--main-dex-rules', main_dex_rule]
+
+ cmd += options.input_paths
+ return cmd, temp_config_string
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseOptions(args)
+
+ libraries = []
+ for p in options.classpath:
+ # If a jar is part of input no need to include it as library jar.
+ if p not in libraries and p not in options.input_paths:
+ libraries.append(p)
+
+ # TODO(agrieve): Remove proguard usages.
+ if options.r8_path:
+ temp_config_string = ''
+ with build_utils.TempDir() as tmp_dir:
+ tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
+ tmp_proguard_config_path = os.path.join(tmp_dir, 'proguard_config.txt')
+ # If there is no output (no classes are kept), this prevents this script
+ # from failing.
+ build_utils.Touch(tmp_mapping_path)
+
+ f = cStringIO.StringIO()
+ proguard_util.WriteFlagsFile(
+ options.proguard_configs, f, exclude_generated=True)
+ merged_configs = f.getvalue()
+ # Fix up line endings (third_party configs can have windows endings)
+ merged_configs = merged_configs.replace('\r', '')
+ f.close()
+ print_stdout = '-whyareyoukeeping' in merged_configs
+
+ if options.output_path.endswith('.dex'):
+ with build_utils.TempDir() as tmp_dex_dir:
+ cmd, temp_config_string = _CreateR8Command(
+ options, tmp_mapping_path, tmp_dex_dir, tmp_proguard_config_path,
+ libraries)
+ build_utils.CheckOutput(cmd, print_stdout=print_stdout)
+ _MoveTempDexFile(tmp_dex_dir, options.output_path)
+ else:
+ cmd, temp_config_string = _CreateR8Command(
+ options, tmp_mapping_path, options.output_path,
+ tmp_proguard_config_path, libraries)
+ build_utils.CheckOutput(cmd, print_stdout=print_stdout)
+
+ # Copy output files to correct locations.
+ with build_utils.AtomicOutput(options.mapping_output) as mapping:
+ # Mapping files generated by R8 include comments that may break
+ # some of our tooling so remove those.
+ with open(tmp_mapping_path) as tmp:
+ mapping.writelines(l for l in tmp if not l.startswith('#'))
+
+ with build_utils.AtomicOutput(options.output_config) as f:
+ f.write(merged_configs)
+ if temp_config_string:
+ f.write(_GENERATED_PROGUARD_HEADER)
+ f.write(temp_config_string)
+
+ if options.expected_configs_file:
+ _VerifyExpectedConfigs(options.expected_configs_file,
+ options.output_config,
+ options.verify_expected_configs)
+
+ other_inputs = []
+ if options.apply_mapping:
+ other_inputs += options.apply_mapping
+
+ build_utils.WriteDepfile(
+ options.depfile,
+ options.output_path,
+ inputs=options.proguard_configs + options.input_paths + libraries +
+ other_inputs,
+ add_pydeps=False)
+ else:
+ proguard = proguard_util.ProguardCmdBuilder(options.proguard_path)
+ proguard.injars(options.input_paths)
+ proguard.configs(options.proguard_configs)
+ proguard.config_exclusions(options.proguard_config_exclusions)
+ proguard.outjar(options.output_path)
+ proguard.mapping_output(options.mapping_output)
+ proguard.libraryjars(libraries)
+ proguard.verbose(options.verbose)
+ proguard.min_api(options.min_api)
+ # Do not consider the temp file as an input since its name is random.
+ input_paths = proguard.GetInputs()
+
+ with tempfile.NamedTemporaryFile() as f:
+ if options.apply_mapping:
+ input_paths.append(options.apply_mapping)
+ # Maintain only class name mappings in the .mapping file in order to
+ # work around what appears to be a ProGuard bug in -applymapping:
+ # method 'int close()' is not being kept as 'a', but remapped to 'c'
+ _RemoveMethodMappings(options.apply_mapping, f)
+ proguard.mapping(f.name)
+
+ input_strings = proguard.build()
+ if f.name in input_strings:
+ input_strings[input_strings.index(f.name)] = '$M'
+
+ build_utils.CallAndWriteDepfileIfStale(
+ proguard.CheckOutput,
+ options,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=proguard.GetOutputs(),
+ depfile_deps=proguard.GetDepfileDeps(),
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/proguard.pydeps b/deps/v8/build/android/gyp/proguard.pydeps
new file mode 100644
index 0000000000..fd870a0e4b
--- /dev/null
+++ b/deps/v8/build/android/gyp/proguard.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/proguard.pydeps build/android/gyp/proguard.py
+../../gn_helpers.py
+proguard.py
+util/__init__.py
+util/build_utils.py
+util/diff_utils.py
+util/md5_check.py
+util/proguard_util.py
diff --git a/deps/v8/build/android/gyp/test/BUILD.gn b/deps/v8/build/android/gyp/test/BUILD.gn
new file mode 100644
index 0000000000..2deac1d56f
--- /dev/null
+++ b/deps/v8/build/android/gyp/test/BUILD.gn
@@ -0,0 +1,13 @@
+import("//build/config/android/rules.gni")
+
+java_library("hello_world_java") {
+ java_files = [ "java/org/chromium/helloworld/HelloWorldPrinter.java" ]
+}
+
+java_binary("hello_world") {
+ deps = [
+ ":hello_world_java",
+ ]
+ java_files = [ "java/org/chromium/helloworld/HelloWorldMain.java" ]
+ main_class = "org.chromium.helloworld.HelloWorldMain"
+}
diff --git a/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java b/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java
new file mode 100644
index 0000000000..10860d8332
--- /dev/null
+++ b/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java
@@ -0,0 +1,15 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.helloworld;
+
+public class HelloWorldMain {
+ public static void main(String[] args) {
+ if (args.length > 0) {
+ System.exit(Integer.parseInt(args[0]));
+ }
+ HelloWorldPrinter.print();
+ }
+}
+
diff --git a/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java b/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java
new file mode 100644
index 0000000000..b09673e21f
--- /dev/null
+++ b/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java
@@ -0,0 +1,12 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.helloworld;
+
+public class HelloWorldPrinter {
+ public static void print() {
+ System.out.println("Hello, world!");
+ }
+}
+
diff --git a/deps/v8/build/android/gyp/util/__init__.py b/deps/v8/build/android/gyp/util/__init__.py
new file mode 100644
index 0000000000..96196cffb2
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/gyp/util/build_utils.py b/deps/v8/build/android/gyp/util/build_utils.py
new file mode 100644
index 0000000000..e4d7cc6128
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/build_utils.py
@@ -0,0 +1,650 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Contains common helpers for GN action()s."""
+
+import collections
+import contextlib
+import filecmp
+import fnmatch
+import json
+import os
+import pipes
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tempfile
+import zipfile
+
+# Any new non-system import must be added to:
+# //build/config/android/internal_rules.gni
+
+from util import md5_check
+
+sys.path.append(os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir, os.pardir))
+import gn_helpers
+
+# Definition copied from pylib/constants/__init__.py to avoid adding
+# a dependency on pylib.
+DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT',
+ os.path.abspath(os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir, os.pardir, os.pardir)))
+
+HERMETIC_TIMESTAMP = (2001, 1, 1, 0, 0, 0)
+_HERMETIC_FILE_ATTR = (0o644 << 16)
+
+
+@contextlib.contextmanager
+def TempDir():
+ dirname = tempfile.mkdtemp()
+ try:
+ yield dirname
+ finally:
+ shutil.rmtree(dirname)
+
+
+def MakeDirectory(dir_path):
+ try:
+ os.makedirs(dir_path)
+ except OSError:
+ pass
+
+
+def DeleteDirectory(dir_path):
+ if os.path.exists(dir_path):
+ shutil.rmtree(dir_path)
+
+
+def Touch(path, fail_if_missing=False):
+ if fail_if_missing and not os.path.exists(path):
+ raise Exception(path + ' doesn\'t exist.')
+
+ MakeDirectory(os.path.dirname(path))
+ with open(path, 'a'):
+ os.utime(path, None)
+
+
+def FindInDirectory(directory, filename_filter):
+ files = []
+ for root, _dirnames, filenames in os.walk(directory):
+ matched_files = fnmatch.filter(filenames, filename_filter)
+ files.extend((os.path.join(root, f) for f in matched_files))
+ return files
+
+
+def ReadBuildVars(path):
+ """Parses a build_vars.txt into a dict."""
+ with open(path) as f:
+ return dict(l.rstrip().split('=', 1) for l in f)
+
+
+def ParseGnList(value):
+ """Converts a "GN-list" command-line parameter into a list.
+
+ Conversions handled:
+ * None -> []
+ * '' -> []
+ * 'asdf' -> ['asdf']
+ * '["a", "b"]' -> ['a', 'b']
+ * ['["a", "b"]', 'c'] -> ['a', 'b', 'c'] (flattened list)
+
+ The common use for this behavior is in the Android build where things can
+ take lists of @FileArg references that are expanded via ExpandFileArgs.
+ """
+ # Convert None to [].
+ if not value:
+ return []
+ # Convert a list of GN lists to a flattened list.
+ if isinstance(value, list):
+ ret = []
+ for arg in value:
+ ret.extend(ParseGnList(arg))
+ return ret
+ # Convert normal GN list.
+ if value.startswith('['):
+ return gn_helpers.GNValueParser(value).ParseList()
+ # Convert a single string value to a list.
+ return [value]
+
+
+def CheckOptions(options, parser, required=None):
+ if not required:
+ return
+ for option_name in required:
+ if getattr(options, option_name) is None:
+ parser.error('--%s is required' % option_name.replace('_', '-'))
+
+
+def WriteJson(obj, path, only_if_changed=False):
+ old_dump = None
+ if os.path.exists(path):
+ with open(path, 'r') as oldfile:
+ old_dump = oldfile.read()
+
+ new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '))
+
+ if not only_if_changed or old_dump != new_dump:
+ with open(path, 'w') as outfile:
+ outfile.write(new_dump)
+
+
+@contextlib.contextmanager
+def AtomicOutput(path, only_if_changed=True):
+ """Helper to prevent half-written outputs.
+
+ Args:
+ path: Path to the final output file, which will be written atomically.
+ only_if_changed: If True (the default), do not touch the filesystem
+ if the content has not changed.
+ Returns:
+ A python context manager that yelds a NamedTemporaryFile instance
+ that must be used by clients to write the data to. On exit, the
+ manager will try to replace the final output file with the
+ temporary one if necessary. The temporary file is always destroyed
+ on exit.
+ Example:
+ with build_utils.AtomicOutput(output_path) as tmp_file:
+ subprocess.check_call(['prog', '--output', tmp_file.name])
+ """
+ # Create in same directory to ensure same filesystem when moving.
+ with tempfile.NamedTemporaryFile(suffix=os.path.basename(path),
+ dir=os.path.dirname(path),
+ delete=False) as f:
+ try:
+ yield f
+
+ # file should be closed before comparison/move.
+ f.close()
+ if not (only_if_changed and os.path.exists(path) and
+ filecmp.cmp(f.name, path)):
+ shutil.move(f.name, path)
+ finally:
+ if os.path.exists(f.name):
+ os.unlink(f.name)
+
+
+class CalledProcessError(Exception):
+ """This exception is raised when the process run by CheckOutput
+ exits with a non-zero exit code."""
+
+ def __init__(self, cwd, args, output):
+ super(CalledProcessError, self).__init__()
+ self.cwd = cwd
+ self.args = args
+ self.output = output
+
+ def __str__(self):
+ # A user should be able to simply copy and paste the command that failed
+ # into their shell.
+ copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd),
+ ' '.join(map(pipes.quote, self.args)))
+ return 'Command failed: {}\n{}'.format(copyable_command, self.output)
+
+
+def FilterLines(output, filter_string):
+ """Output filter from build_utils.CheckOutput.
+
+ Args:
+ output: Executable output as from build_utils.CheckOutput.
+ filter_string: An RE string that will filter (remove) matching
+ lines from |output|.
+
+ Returns:
+ The filtered output, as a single string.
+ """
+ re_filter = re.compile(filter_string)
+ return '\n'.join(
+ line for line in output.splitlines() if not re_filter.search(line))
+
+
+# This can be used in most cases like subprocess.check_output(). The output,
+# particularly when the command fails, better highlights the command's failure.
+# If the command fails, raises a build_utils.CalledProcessError.
+def CheckOutput(args, cwd=None, env=None,
+ print_stdout=False, print_stderr=True,
+ stdout_filter=None,
+ stderr_filter=None,
+ fail_func=lambda returncode, stderr: returncode != 0):
+ if not cwd:
+ cwd = os.getcwd()
+
+ child = subprocess.Popen(args,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env)
+ stdout, stderr = child.communicate()
+
+ if stdout_filter is not None:
+ stdout = stdout_filter(stdout)
+
+ if stderr_filter is not None:
+ stderr = stderr_filter(stderr)
+
+ if fail_func(child.returncode, stderr):
+ raise CalledProcessError(cwd, args, stdout + stderr)
+
+ if print_stdout:
+ sys.stdout.write(stdout)
+ if print_stderr:
+ sys.stderr.write(stderr)
+
+ return stdout
+
+
+def GetModifiedTime(path):
+ # For a symlink, the modified time should be the greater of the link's
+ # modified time and the modified time of the target.
+ return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
+
+
+def IsTimeStale(output, inputs):
+ if not os.path.exists(output):
+ return True
+
+ output_time = GetModifiedTime(output)
+ for i in inputs:
+ if GetModifiedTime(i) > output_time:
+ return True
+ return False
+
+
+def _CheckZipPath(name):
+ if os.path.normpath(name) != name:
+ raise Exception('Non-canonical zip path: %s' % name)
+ if os.path.isabs(name):
+ raise Exception('Absolute zip path: %s' % name)
+
+
+def _IsSymlink(zip_file, name):
+ zi = zip_file.getinfo(name)
+
+ # The two high-order bytes of ZipInfo.external_attr represent
+ # UNIX permissions and file type bits.
+ return stat.S_ISLNK(zi.external_attr >> 16)
+
+
+def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None,
+ predicate=None):
+ if path is None:
+ path = os.getcwd()
+ elif not os.path.exists(path):
+ MakeDirectory(path)
+
+ if not zipfile.is_zipfile(zip_path):
+ raise Exception('Invalid zip file: %s' % zip_path)
+
+ extracted = []
+ with zipfile.ZipFile(zip_path) as z:
+ for name in z.namelist():
+ if name.endswith('/'):
+ MakeDirectory(os.path.join(path, name))
+ continue
+ if pattern is not None:
+ if not fnmatch.fnmatch(name, pattern):
+ continue
+ if predicate and not predicate(name):
+ continue
+ _CheckZipPath(name)
+ if no_clobber:
+ output_path = os.path.join(path, name)
+ if os.path.exists(output_path):
+ raise Exception(
+ 'Path already exists from zip: %s %s %s'
+ % (zip_path, name, output_path))
+ if _IsSymlink(z, name):
+ dest = os.path.join(path, name)
+ MakeDirectory(os.path.dirname(dest))
+ os.symlink(z.read(name), dest)
+ extracted.append(dest)
+ else:
+ z.extract(name, path)
+ extracted.append(os.path.join(path, name))
+
+ return extracted
+
+
+def AddToZipHermetic(zip_file, zip_path, src_path=None, data=None,
+ compress=None):
+ """Adds a file to the given ZipFile with a hard-coded modified time.
+
+ Args:
+ zip_file: ZipFile instance to add the file to.
+ zip_path: Destination path within the zip file.
+ src_path: Path of the source file. Mutually exclusive with |data|.
+ data: File data as a string.
+ compress: Whether to enable compression. Default is taken from ZipFile
+ constructor.
+ """
+ assert (src_path is None) != (data is None), (
+ '|src_path| and |data| are mutually exclusive.')
+ _CheckZipPath(zip_path)
+ zipinfo = zipfile.ZipInfo(filename=zip_path, date_time=HERMETIC_TIMESTAMP)
+ zipinfo.external_attr = _HERMETIC_FILE_ATTR
+
+ if src_path and os.path.islink(src_path):
+ zipinfo.filename = zip_path
+ zipinfo.external_attr |= stat.S_IFLNK << 16 # mark as a symlink
+ zip_file.writestr(zipinfo, os.readlink(src_path))
+ return
+
+ # zipfile.write() does
+ # external_attr = (os.stat(src_path)[0] & 0xFFFF) << 16
+ # but we want to use _HERMETIC_FILE_ATTR, so manually set
+ # the few attr bits we care about.
+ if src_path:
+ st = os.stat(src_path)
+ for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH):
+ if st.st_mode & mode:
+ zipinfo.external_attr |= mode << 16
+
+ if src_path:
+ with open(src_path, 'rb') as f:
+ data = f.read()
+
+ # zipfile will deflate even when it makes the file bigger. To avoid
+ # growing files, disable compression at an arbitrary cut off point.
+ if len(data) < 16:
+ compress = False
+
+ # None converts to ZIP_STORED, when passed explicitly rather than the
+ # default passed to the ZipFile constructor.
+ compress_type = zip_file.compression
+ if compress is not None:
+ compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
+ zip_file.writestr(zipinfo, data, compress_type)
+
+
+def DoZip(inputs, output, base_dir=None, compress_fn=None,
+ zip_prefix_path=None):
+ """Creates a zip file from a list of files.
+
+ Args:
+ inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
+ output: Path, fileobj, or ZipFile instance to add files to.
+ base_dir: Prefix to strip from inputs.
+ compress_fn: Applied to each input to determine whether or not to compress.
+ By default, items will be |zipfile.ZIP_STORED|.
+ zip_prefix_path: Path prepended to file path in zip file.
+ """
+ if base_dir is None:
+ base_dir = '.'
+ input_tuples = []
+ for tup in inputs:
+ if isinstance(tup, basestring):
+ tup = (os.path.relpath(tup, base_dir), tup)
+ input_tuples.append(tup)
+
+ # Sort by zip path to ensure stable zip ordering.
+ input_tuples.sort(key=lambda tup: tup[0])
+
+ out_zip = output
+ if not isinstance(output, zipfile.ZipFile):
+ out_zip = zipfile.ZipFile(output, 'w')
+
+ try:
+ for zip_path, fs_path in input_tuples:
+ if zip_prefix_path:
+ zip_path = os.path.join(zip_prefix_path, zip_path)
+ compress = compress_fn(zip_path) if compress_fn else None
+ AddToZipHermetic(out_zip, zip_path, src_path=fs_path, compress=compress)
+ finally:
+ if output is not out_zip:
+ out_zip.close()
+
+
+def ZipDir(output, base_dir, compress_fn=None, zip_prefix_path=None):
+ """Creates a zip file from a directory."""
+ inputs = []
+ for root, _, files in os.walk(base_dir):
+ for f in files:
+ inputs.append(os.path.join(root, f))
+
+ with AtomicOutput(output) as f:
+ DoZip(inputs, f, base_dir, compress_fn=compress_fn,
+ zip_prefix_path=zip_prefix_path)
+
+
+def MatchesGlob(path, filters):
+ """Returns whether the given path matches any of the given glob patterns."""
+ return filters and any(fnmatch.fnmatch(path, f) for f in filters)
+
+
+def MergeZips(output, input_zips, path_transform=None, compress=None):
+ """Combines all files from |input_zips| into |output|.
+
+ Args:
+ output: Path, fileobj, or ZipFile instance to add files to.
+ input_zips: Iterable of paths to zip files to merge.
+ path_transform: Called for each entry path. Returns a new path, or None to
+ skip the file.
+ compress: Overrides compression setting from origin zip entries.
+ """
+ path_transform = path_transform or (lambda p: p)
+ added_names = set()
+
+ out_zip = output
+ if not isinstance(output, zipfile.ZipFile):
+ out_zip = zipfile.ZipFile(output, 'w')
+
+ try:
+ for in_file in input_zips:
+ with zipfile.ZipFile(in_file, 'r') as in_zip:
+ # ijar creates zips with null CRCs.
+ in_zip._expected_crc = None
+ for info in in_zip.infolist():
+ # Ignore directories.
+ if info.filename[-1] == '/':
+ continue
+ dst_name = path_transform(info.filename)
+ if not dst_name:
+ continue
+ already_added = dst_name in added_names
+ if not already_added:
+ if compress is not None:
+ compress_entry = compress
+ else:
+ compress_entry = info.compress_type != zipfile.ZIP_STORED
+ AddToZipHermetic(
+ out_zip,
+ dst_name,
+ data=in_zip.read(info),
+ compress=compress_entry)
+ added_names.add(dst_name)
+ finally:
+ if output is not out_zip:
+ out_zip.close()
+
+
+def GetSortedTransitiveDependencies(top, deps_func):
+ """Gets the list of all transitive dependencies in sorted order.
+
+ There should be no cycles in the dependency graph (crashes if cycles exist).
+
+ Args:
+ top: A list of the top level nodes
+ deps_func: A function that takes a node and returns a list of its direct
+ dependencies.
+ Returns:
+ A list of all transitive dependencies of nodes in top, in order (a node will
+ appear in the list at a higher index than all of its dependencies).
+ """
+ # Find all deps depth-first, maintaining original order in the case of ties.
+ deps_map = collections.OrderedDict()
+ def discover(nodes):
+ for node in nodes:
+ if node in deps_map:
+ continue
+ deps = deps_func(node)
+ discover(deps)
+ deps_map[node] = deps
+
+ discover(top)
+ return list(deps_map)
+
+
+def _ComputePythonDependencies():
+ """Gets the paths of imported non-system python modules.
+
+ A path is assumed to be a "system" import if it is outside of chromium's
+ src/. The paths will be relative to the current directory.
+ """
+ _ForceLazyModulesToLoad()
+ module_paths = (m.__file__ for m in sys.modules.itervalues()
+ if m is not None and hasattr(m, '__file__'))
+ abs_module_paths = map(os.path.abspath, module_paths)
+
+ assert os.path.isabs(DIR_SOURCE_ROOT)
+ non_system_module_paths = [
+ p for p in abs_module_paths if p.startswith(DIR_SOURCE_ROOT)]
+ def ConvertPycToPy(s):
+ if s.endswith('.pyc'):
+ return s[:-1]
+ return s
+
+ non_system_module_paths = map(ConvertPycToPy, non_system_module_paths)
+ non_system_module_paths = map(os.path.relpath, non_system_module_paths)
+ return sorted(set(non_system_module_paths))
+
+
+def _ForceLazyModulesToLoad():
+ """Forces any lazily imported modules to fully load themselves.
+
+ Inspecting the modules' __file__ attribute causes lazily imported modules
+ (e.g. from email) to get fully imported and update sys.modules. Iterate
+ over the values until sys.modules stabilizes so that no modules are missed.
+ """
+ while True:
+ num_modules_before = len(sys.modules.keys())
+ for m in sys.modules.values():
+ if m is not None and hasattr(m, '__file__'):
+ _ = m.__file__
+ num_modules_after = len(sys.modules.keys())
+ if num_modules_before == num_modules_after:
+ break
+
+
+def AddDepfileOption(parser):
+ # TODO(agrieve): Get rid of this once we've moved to argparse.
+ if hasattr(parser, 'add_option'):
+ func = parser.add_option
+ else:
+ func = parser.add_argument
+ func('--depfile',
+ help='Path to depfile (refer to `gn help depfile`)')
+
+
+def WriteDepfile(depfile_path, first_gn_output, inputs=None, add_pydeps=True):
+ assert depfile_path != first_gn_output # http://crbug.com/646165
+ assert not isinstance(inputs, basestring) # Easy mistake to make
+ inputs = inputs or []
+ if add_pydeps:
+ inputs = _ComputePythonDependencies() + inputs
+ MakeDirectory(os.path.dirname(depfile_path))
+ # Ninja does not support multiple outputs in depfiles.
+ with open(depfile_path, 'w') as depfile:
+ depfile.write(first_gn_output.replace(' ', '\\ '))
+ depfile.write(': ')
+ depfile.write(' '.join(i.replace(' ', '\\ ') for i in inputs))
+ depfile.write('\n')
+
+
+def ExpandFileArgs(args):
+ """Replaces file-arg placeholders in args.
+
+ These placeholders have the form:
+ @FileArg(filename:key1:key2:...:keyn)
+
+ The value of such a placeholder is calculated by reading 'filename' as json.
+ And then extracting the value at [key1][key2]...[keyn].
+
+ Note: This intentionally does not return the list of files that appear in such
+ placeholders. An action that uses file-args *must* know the paths of those
+ files prior to the parsing of the arguments (typically by explicitly listing
+ them in the action's inputs in build files).
+ """
+ new_args = list(args)
+ file_jsons = dict()
+ r = re.compile('@FileArg\((.*?)\)')
+ for i, arg in enumerate(args):
+ match = r.search(arg)
+ if not match:
+ continue
+
+ lookup_path = match.group(1).split(':')
+ file_path = lookup_path[0]
+ if not file_path in file_jsons:
+ with open(file_path) as f:
+ file_jsons[file_path] = json.load(f)
+
+ expansion = file_jsons[file_path]
+ for k in lookup_path[1:]:
+ expansion = expansion[k]
+
+ # This should match ParseGnList. The output is either a GN-formatted list
+ # or a literal (with no quotes).
+ if isinstance(expansion, list):
+ new_args[i] = (arg[:match.start()] + gn_helpers.ToGNString(expansion) +
+ arg[match.end():])
+ else:
+ new_args[i] = arg[:match.start()] + str(expansion) + arg[match.end():]
+
+ return new_args
+
+
+def ReadSourcesList(sources_list_file_name):
+ """Reads a GN-written file containing list of file names and returns a list.
+
+ Note that this function should not be used to parse response files.
+ """
+ with open(sources_list_file_name) as f:
+ return [file_name.strip() for file_name in f]
+
+
+def CallAndWriteDepfileIfStale(function, options, record_path=None,
+ input_paths=None, input_strings=None,
+ output_paths=None, force=False,
+ pass_changes=False, depfile_deps=None,
+ add_pydeps=True):
+ """Wraps md5_check.CallAndRecordIfStale() and writes a depfile if applicable.
+
+ Depfiles are automatically added to output_paths when present in the |options|
+ argument. They are then created after |function| is called.
+
+ By default, only python dependencies are added to the depfile. If there are
+ other input paths that are not captured by GN deps, then they should be listed
+ in depfile_deps. It's important to write paths to the depfile that are already
+ captured by GN deps since GN args can cause GN deps to change, and such
+ changes are not immediately reflected in depfiles (http://crbug.com/589311).
+ """
+ if not output_paths:
+ raise Exception('At least one output_path must be specified.')
+ input_paths = list(input_paths or [])
+ input_strings = list(input_strings or [])
+ output_paths = list(output_paths or [])
+
+ python_deps = None
+ if hasattr(options, 'depfile') and options.depfile:
+ python_deps = _ComputePythonDependencies()
+ input_paths += python_deps
+ output_paths += [options.depfile]
+
+ def on_stale_md5(changes):
+ args = (changes,) if pass_changes else ()
+ function(*args)
+ if python_deps is not None:
+ all_depfile_deps = list(python_deps) if add_pydeps else []
+ if depfile_deps:
+ all_depfile_deps.extend(depfile_deps)
+ WriteDepfile(options.depfile, output_paths[0], all_depfile_deps,
+ add_pydeps=False)
+
+ md5_check.CallAndRecordIfStale(
+ on_stale_md5,
+ record_path=record_path,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ force=force,
+ pass_changes=True)
diff --git a/deps/v8/build/android/gyp/util/build_utils_test.py b/deps/v8/build/android/gyp/util/build_utils_test.py
new file mode 100755
index 0000000000..d462f0c676
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/build_utils_test.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import sys
+import unittest
+
+sys.path.insert(
+ 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import build_utils
+
+_DEPS = collections.OrderedDict()
+_DEPS['a'] = []
+_DEPS['b'] = []
+_DEPS['c'] = ['a']
+_DEPS['d'] = ['a']
+_DEPS['e'] = ['f']
+_DEPS['f'] = ['a', 'd']
+_DEPS['g'] = []
+_DEPS['h'] = ['d', 'b', 'f']
+_DEPS['i'] = ['f']
+
+
+class BuildUtilsTest(unittest.TestCase):
+ def testGetSortedTransitiveDependencies_all(self):
+ TOP = _DEPS.keys()
+ EXPECTED = ['a', 'b', 'c', 'd', 'f', 'e', 'g', 'h', 'i']
+ actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+ self.assertEqual(EXPECTED, actual)
+
+ def testGetSortedTransitiveDependencies_leaves(self):
+ TOP = ['c', 'e', 'g', 'h', 'i']
+ EXPECTED = ['a', 'c', 'd', 'f', 'e', 'g', 'b', 'h', 'i']
+ actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+ self.assertEqual(EXPECTED, actual)
+
+ def testGetSortedTransitiveDependencies_leavesReverse(self):
+ TOP = ['i', 'h', 'g', 'e', 'c']
+ EXPECTED = ['a', 'd', 'f', 'i', 'b', 'h', 'g', 'e', 'c']
+ actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+ self.assertEqual(EXPECTED, actual)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/gyp/util/diff_utils.py b/deps/v8/build/android/gyp/util/diff_utils.py
new file mode 100755
index 0000000000..b20dc27df2
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/diff_utils.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+import difflib
+from util import build_utils
+
+
+def DiffFileContents(expected_path, actual_path):
+ """Check file contents for equality and return the diff or None."""
+ with open(expected_path) as f_expected, open(actual_path) as f_actual:
+ expected_lines = f_expected.readlines()
+ actual_lines = f_actual.readlines()
+
+ if expected_lines == actual_lines:
+ return None
+
+ expected_path = os.path.relpath(expected_path, build_utils.DIR_SOURCE_ROOT)
+ actual_path = os.path.relpath(actual_path, build_utils.DIR_SOURCE_ROOT)
+
+ diff = difflib.unified_diff(
+ expected_lines,
+ actual_lines,
+ fromfile=os.path.join('before', expected_path),
+ tofile=os.path.join('after', expected_path),
+ n=0)
+
+ # Space added before "patch" so that giant command is not put in bash history.
+ return """\
+Files Compared:
+ * {}
+ * {}
+
+To update the file, run:
+########### START ###########
+ patch -p1 <<'END_DIFF'
+{}
+END_DIFF
+############ END ############
+""".format(expected_path, actual_path, ''.join(diff).rstrip())
diff --git a/deps/v8/build/android/gyp/util/jar_info_utils.py b/deps/v8/build/android/gyp/util/jar_info_utils.py
new file mode 100644
index 0000000000..677e4e4261
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/jar_info_utils.py
@@ -0,0 +1,51 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+# Utilities to read and write .jar.info files.
+#
+# A .jar.info file contains a simple mapping from fully-qualified Java class
+# names to the source file that actually defines it.
+#
+# For APKs, the .jar.info maps the class names to the .jar file that which
+# contains its .class definition instead.
+
+
+def ParseJarInfoFile(info_path):
+ """Parse a given .jar.info file as a dictionary.
+
+ Args:
+ info_path: input .jar.info file path.
+ Returns:
+ A new dictionary mapping fully-qualified Java class names to file paths.
+ """
+ info_data = dict()
+ if os.path.exists(info_path):
+ with open(info_path, 'r') as info_file:
+ for line in info_file:
+ line = line.strip()
+ if line:
+ fully_qualified_name, path = line.split(',', 1)
+ info_data[fully_qualified_name] = path
+ return info_data
+
+
+def WriteJarInfoFile(output_obj, info_data, source_file_map=None):
+ """Generate a .jar.info file from a given dictionary.
+
+ Args:
+ output_obj: output file object.
+ info_data: a mapping of fully qualified Java class names to filepaths.
+ source_file_map: an optional mapping from java source file paths to the
+ corresponding source .srcjar. This is because info_data may contain the
+ path of Java source files that where extracted from an .srcjar into a
+ temporary location.
+ """
+ for fully_qualified_name, path in sorted(info_data.iteritems()):
+ if source_file_map and path in source_file_map:
+ path = source_file_map[path]
+ assert not path.startswith('/tmp'), (
+ 'Java file path should not be in temp dir: {}'.format(path))
+ output_obj.write('{},{}\n'.format(fully_qualified_name, path))
diff --git a/deps/v8/build/android/gyp/util/java_cpp_utils.py b/deps/v8/build/android/gyp/util/java_cpp_utils.py
new file mode 100755
index 0000000000..0b9748657e
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/java_cpp_utils.py
@@ -0,0 +1,32 @@
+#!/user/bin/env python
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import sys
+
+
+def GetScriptName():
+ return os.path.basename(os.path.abspath(sys.argv[0]))
+
+
+def KCamelToShouty(s):
+ """Convert |s| from kCamelCase or CamelCase to SHOUTY_CASE.
+
+ kFooBar -> FOO_BAR
+ FooBar -> FOO_BAR
+ FooBAR9 -> FOO_BAR9
+ FooBARBaz -> FOO_BAR_BAZ
+ """
+ if not re.match(r'^k?([A-Z][^A-Z]+|[A-Z0-9]+)+$', s):
+ return s
+ # Strip the leading k.
+ s = re.sub(r'^k', '', s)
+ # Add _ between title words and anything else.
+ s = re.sub(r'([^_])([A-Z][^A-Z_0-9]+)', r'\1_\2', s)
+ # Add _ between lower -> upper transitions.
+ s = re.sub(r'([^A-Z_0-9])([A-Z])', r'\1_\2', s)
+ return s.upper()
diff --git a/deps/v8/build/android/gyp/util/md5_check.py b/deps/v8/build/android/gyp/util/md5_check.py
new file mode 100644
index 0000000000..9a15ee6e75
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/md5_check.py
@@ -0,0 +1,420 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import difflib
+import hashlib
+import itertools
+import json
+import os
+import sys
+import zipfile
+
+
+# When set and a difference is detected, a diff of what changed is printed.
+PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0))
+
+# An escape hatch that causes all targets to be rebuilt.
+_FORCE_REBUILD = int(os.environ.get('FORCE_REBUILD', 0))
+
+
+def CallAndRecordIfStale(
+ function, record_path=None, input_paths=None, input_strings=None,
+ output_paths=None, force=False, pass_changes=False):
+ """Calls function if outputs are stale.
+
+ Outputs are considered stale if:
+ - any output_paths are missing, or
+ - the contents of any file within input_paths has changed, or
+ - the contents of input_strings has changed.
+
+ To debug which files are out-of-date, set the environment variable:
+ PRINT_MD5_DIFFS=1
+
+ Args:
+ function: The function to call.
+ record_path: Path to record metadata.
+ Defaults to output_paths[0] + '.md5.stamp'
+ input_paths: List of paths to calcualte an md5 sum on.
+ input_strings: List of strings to record verbatim.
+ output_paths: List of output paths.
+ force: Whether to treat outputs as missing regardless of whether they
+ actually are.
+ pass_changes: Whether to pass a Changes instance to |function|.
+ """
+ assert record_path or output_paths
+ input_paths = input_paths or []
+ input_strings = input_strings or []
+ output_paths = output_paths or []
+ record_path = record_path or output_paths[0] + '.md5.stamp'
+
+ assert record_path.endswith('.stamp'), (
+ 'record paths must end in \'.stamp\' so that they are easy to find '
+ 'and delete')
+
+ new_metadata = _Metadata(track_entries=pass_changes or PRINT_EXPLANATIONS)
+ new_metadata.AddStrings(input_strings)
+
+ for path in input_paths:
+ if _IsZipFile(path):
+ entries = _ExtractZipEntries(path)
+ new_metadata.AddZipFile(path, entries)
+ else:
+ new_metadata.AddFile(path, _Md5ForPath(path))
+
+ old_metadata = None
+ force = force or _FORCE_REBUILD
+ missing_outputs = [x for x in output_paths if force or not os.path.exists(x)]
+ # When outputs are missing, don't bother gathering change information.
+ if not missing_outputs and os.path.exists(record_path):
+ with open(record_path, 'r') as jsonfile:
+ try:
+ old_metadata = _Metadata.FromFile(jsonfile)
+ except: # pylint: disable=bare-except
+ pass # Not yet using new file format.
+
+ changes = Changes(old_metadata, new_metadata, force, missing_outputs)
+ if not changes.HasChanges():
+ return
+
+ if PRINT_EXPLANATIONS:
+ print('=' * 80)
+ print('Target is stale: %s' % record_path)
+ print(changes.DescribeDifference())
+ print('=' * 80)
+
+ args = (changes,) if pass_changes else ()
+ function(*args)
+
+ with open(record_path, 'w') as f:
+ new_metadata.ToFile(f)
+
+
+class Changes(object):
+ """Provides and API for querying what changed between runs."""
+
+ def __init__(self, old_metadata, new_metadata, force, missing_outputs):
+ self.old_metadata = old_metadata
+ self.new_metadata = new_metadata
+ self.force = force
+ self.missing_outputs = missing_outputs
+
+ def _GetOldTag(self, path, subpath=None):
+ return self.old_metadata and self.old_metadata.GetTag(path, subpath)
+
+ def HasChanges(self):
+ """Returns whether any changes exist."""
+ return (self.force or
+ not self.old_metadata or
+ self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5() or
+ self.old_metadata.FilesMd5() != self.new_metadata.FilesMd5())
+
+ def AddedOrModifiedOnly(self):
+ """Returns whether the only changes were from added or modified (sub)files.
+
+ No missing outputs, no removed paths/subpaths.
+ """
+ if (self.force or
+ not self.old_metadata or
+ self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5()):
+ return False
+ if any(self.IterRemovedPaths()):
+ return False
+ for path in self.IterModifiedPaths():
+ if any(self.IterRemovedSubpaths(path)):
+ return False
+ return True
+
+ def IterAllPaths(self):
+ """Generator for paths."""
+ return self.new_metadata.IterPaths();
+
+ def IterAllSubpaths(self, path):
+ """Generator for subpaths."""
+ return self.new_metadata.IterSubpaths(path);
+
+ def IterAddedPaths(self):
+ """Generator for paths that were added."""
+ for path in self.new_metadata.IterPaths():
+ if self._GetOldTag(path) is None:
+ yield path
+
+ def IterAddedSubpaths(self, path):
+ """Generator for paths that were added within the given zip file."""
+ for subpath in self.new_metadata.IterSubpaths(path):
+ if self._GetOldTag(path, subpath) is None:
+ yield subpath
+
+ def IterRemovedPaths(self):
+ """Generator for paths that were removed."""
+ if self.old_metadata:
+ for path in self.old_metadata.IterPaths():
+ if self.new_metadata.GetTag(path) is None:
+ yield path
+
+ def IterRemovedSubpaths(self, path):
+ """Generator for paths that were removed within the given zip file."""
+ if self.old_metadata:
+ for subpath in self.old_metadata.IterSubpaths(path):
+ if self.new_metadata.GetTag(path, subpath) is None:
+ yield subpath
+
+ def IterModifiedPaths(self):
+ """Generator for paths whose contents have changed."""
+ for path in self.new_metadata.IterPaths():
+ old_tag = self._GetOldTag(path)
+ new_tag = self.new_metadata.GetTag(path)
+ if old_tag is not None and old_tag != new_tag:
+ yield path
+
+ def IterModifiedSubpaths(self, path):
+ """Generator for paths within a zip file whose contents have changed."""
+ for subpath in self.new_metadata.IterSubpaths(path):
+ old_tag = self._GetOldTag(path, subpath)
+ new_tag = self.new_metadata.GetTag(path, subpath)
+ if old_tag is not None and old_tag != new_tag:
+ yield subpath
+
+ def IterChangedPaths(self):
+ """Generator for all changed paths (added/removed/modified)."""
+ return itertools.chain(self.IterRemovedPaths(),
+ self.IterModifiedPaths(),
+ self.IterAddedPaths())
+
+ def IterChangedSubpaths(self, path):
+ """Generator for paths within a zip that were added/removed/modified."""
+ return itertools.chain(self.IterRemovedSubpaths(path),
+ self.IterModifiedSubpaths(path),
+ self.IterAddedSubpaths(path))
+
+ def DescribeDifference(self):
+ """Returns a human-readable description of what changed."""
+ if self.force:
+ return 'force=True'
+ elif self.missing_outputs:
+ return 'Outputs do not exist:\n ' + '\n '.join(self.missing_outputs)
+ elif self.old_metadata is None:
+ return 'Previous stamp file not found.'
+
+ if self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5():
+ ndiff = difflib.ndiff(self.old_metadata.GetStrings(),
+ self.new_metadata.GetStrings())
+ changed = [s for s in ndiff if not s.startswith(' ')]
+ return 'Input strings changed:\n ' + '\n '.join(changed)
+
+ if self.old_metadata.FilesMd5() == self.new_metadata.FilesMd5():
+ return "There's no difference."
+
+ lines = []
+ lines.extend('Added: ' + p for p in self.IterAddedPaths())
+ lines.extend('Removed: ' + p for p in self.IterRemovedPaths())
+ for path in self.IterModifiedPaths():
+ lines.append('Modified: ' + path)
+ lines.extend(' -> Subpath added: ' + p
+ for p in self.IterAddedSubpaths(path))
+ lines.extend(' -> Subpath removed: ' + p
+ for p in self.IterRemovedSubpaths(path))
+ lines.extend(' -> Subpath modified: ' + p
+ for p in self.IterModifiedSubpaths(path))
+ if lines:
+ return 'Input files changed:\n ' + '\n '.join(lines)
+ return 'I have no idea what changed (there is a bug).'
+
+
+class _Metadata(object):
+ """Data model for tracking change metadata.
+
+ Args:
+ track_entries: Enables per-file change tracking. Slower, but required for
+ Changes functionality.
+ """
+ # Schema:
+ # {
+ # "files-md5": "VALUE",
+ # "strings-md5": "VALUE",
+ # "input-files": [
+ # {
+ # "path": "path.jar",
+ # "tag": "{MD5 of entries}",
+ # "entries": [
+ # { "path": "org/chromium/base/Foo.class", "tag": "{CRC32}" }, ...
+ # ]
+ # }, {
+ # "path": "path.txt",
+ # "tag": "{MD5}",
+ # }
+ # ],
+ # "input-strings": ["a", "b", ...],
+ # }
+ def __init__(self, track_entries=False):
+ self._track_entries = track_entries
+ self._files_md5 = None
+ self._strings_md5 = None
+ self._files = []
+ self._strings = []
+ # Map of (path, subpath) -> entry. Created upon first call to _GetEntry().
+ self._file_map = None
+
+ @classmethod
+ def FromFile(cls, fileobj):
+ """Returns a _Metadata initialized from a file object."""
+ ret = cls()
+ obj = json.load(fileobj)
+ ret._files_md5 = obj['files-md5']
+ ret._strings_md5 = obj['strings-md5']
+ ret._files = obj.get('input-files', [])
+ ret._strings = obj.get('input-strings', [])
+ return ret
+
+ def ToFile(self, fileobj):
+ """Serializes metadata to the given file object."""
+ obj = {
+ 'files-md5': self.FilesMd5(),
+ 'strings-md5': self.StringsMd5(),
+ }
+ if self._track_entries:
+ obj['input-files'] = sorted(self._files, key=lambda e: e['path'])
+ obj['input-strings'] = self._strings
+
+ json.dump(obj, fileobj, indent=2)
+
+ def _AssertNotQueried(self):
+ assert self._files_md5 is None
+ assert self._strings_md5 is None
+ assert self._file_map is None
+
+ def AddStrings(self, values):
+ self._AssertNotQueried()
+ self._strings.extend(str(v) for v in values)
+
+ def AddFile(self, path, tag):
+ """Adds metadata for a non-zip file.
+
+ Args:
+ path: Path to the file.
+ tag: A short string representative of the file contents.
+ """
+ self._AssertNotQueried()
+ self._files.append({
+ 'path': path,
+ 'tag': tag,
+ })
+
+ def AddZipFile(self, path, entries):
+ """Adds metadata for a zip file.
+
+ Args:
+ path: Path to the file.
+ entries: List of (subpath, tag) tuples for entries within the zip.
+ """
+ self._AssertNotQueried()
+ tag = _ComputeInlineMd5(itertools.chain((e[0] for e in entries),
+ (e[1] for e in entries)))
+ self._files.append({
+ 'path': path,
+ 'tag': tag,
+ 'entries': [{"path": e[0], "tag": e[1]} for e in entries],
+ })
+
+ def GetStrings(self):
+ """Returns the list of input strings."""
+ return self._strings
+
+ def FilesMd5(self):
+ """Lazily computes and returns the aggregate md5 of input files."""
+ if self._files_md5 is None:
+ # Omit paths from md5 since temporary files have random names.
+ self._files_md5 = _ComputeInlineMd5(
+ self.GetTag(p) for p in sorted(self.IterPaths()))
+ return self._files_md5
+
+ def StringsMd5(self):
+ """Lazily computes and returns the aggregate md5 of input strings."""
+ if self._strings_md5 is None:
+ self._strings_md5 = _ComputeInlineMd5(self._strings)
+ return self._strings_md5
+
+ def _GetEntry(self, path, subpath=None):
+ """Returns the JSON entry for the given path / subpath."""
+ if self._file_map is None:
+ self._file_map = {}
+ for entry in self._files:
+ self._file_map[(entry['path'], None)] = entry
+ for subentry in entry.get('entries', ()):
+ self._file_map[(entry['path'], subentry['path'])] = subentry
+ return self._file_map.get((path, subpath))
+
+ def GetTag(self, path, subpath=None):
+ """Returns the tag for the given path / subpath."""
+ ret = self._GetEntry(path, subpath)
+ return ret and ret['tag']
+
+ def IterPaths(self):
+ """Returns a generator for all top-level paths."""
+ return (e['path'] for e in self._files)
+
+ def IterSubpaths(self, path):
+ """Returns a generator for all subpaths in the given zip.
+
+ If the given path is not a zip file or doesn't exist, returns an empty
+ iterable.
+ """
+ outer_entry = self._GetEntry(path)
+ if not outer_entry:
+ return ()
+ subentries = outer_entry.get('entries', [])
+ return (entry['path'] for entry in subentries)
+
+
+def _UpdateMd5ForFile(md5, path, block_size=2**16):
+ with open(path, 'rb') as infile:
+ while True:
+ data = infile.read(block_size)
+ if not data:
+ break
+ md5.update(data)
+
+
+def _UpdateMd5ForDirectory(md5, dir_path):
+ for root, _, files in os.walk(dir_path):
+ for f in files:
+ _UpdateMd5ForFile(md5, os.path.join(root, f))
+
+
+def _Md5ForPath(path):
+ md5 = hashlib.md5()
+ if os.path.isdir(path):
+ _UpdateMd5ForDirectory(md5, path)
+ else:
+ _UpdateMd5ForFile(md5, path)
+ return md5.hexdigest()
+
+
+def _ComputeInlineMd5(iterable):
+ """Computes the md5 of the concatenated parameters."""
+ md5 = hashlib.md5()
+ for item in iterable:
+ md5.update(str(item))
+ return md5.hexdigest()
+
+
+def _IsZipFile(path):
+ """Returns whether to treat the given file as a zip file."""
+ # ijar doesn't set the CRC32 field.
+ if path.endswith('.interface.jar'):
+ return False
+ return path[-4:] in ('.zip', '.apk', '.jar') or path.endswith('.srcjar')
+
+
+def _ExtractZipEntries(path):
+ """Returns a list of (path, CRC32) of all files within |path|."""
+ entries = []
+ with zipfile.ZipFile(path) as zip_file:
+ for zip_info in zip_file.infolist():
+ # Skip directories and empty files.
+ if zip_info.CRC:
+ entries.append(
+ (zip_info.filename, zip_info.CRC + zip_info.compress_type))
+ return entries
diff --git a/deps/v8/build/android/gyp/util/md5_check_test.py b/deps/v8/build/android/gyp/util/md5_check_test.py
new file mode 100755
index 0000000000..41e9d3c248
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/md5_check_test.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import os
+import sys
+import tempfile
+import unittest
+import zipfile
+
+sys.path.insert(
+ 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import md5_check
+
+
+def _WriteZipFile(path, entries):
+ with zipfile.ZipFile(path, 'w') as zip_file:
+ for subpath, data in entries:
+ zip_file.writestr(subpath, data)
+
+
+class TestMd5Check(unittest.TestCase):
+ def setUp(self):
+ self.called = False
+ self.changes = None
+
+ def testCallAndRecordIfStale(self):
+ input_strings = ['string1', 'string2']
+ input_file1 = tempfile.NamedTemporaryFile(suffix='.txt')
+ input_file2 = tempfile.NamedTemporaryFile(suffix='.zip')
+ file1_contents = b'input file 1'
+ input_file1.write(file1_contents)
+ input_file1.flush()
+ # Test out empty zip file to start.
+ _WriteZipFile(input_file2.name, [])
+ input_files = [input_file1.name, input_file2.name]
+
+ record_path = tempfile.NamedTemporaryFile(suffix='.stamp')
+
+ def CheckCallAndRecord(should_call, message, force=False,
+ outputs_specified=False, outputs_missing=False,
+ expected_changes=None, added_or_modified_only=None):
+ output_paths = None
+ if outputs_specified:
+ output_file1 = tempfile.NamedTemporaryFile()
+ if outputs_missing:
+ output_file1.close() # Gets deleted on close().
+ output_paths = [output_file1.name]
+
+ self.called = False
+ self.changes = None
+ if expected_changes or added_or_modified_only is not None:
+ def MarkCalled(changes):
+ self.called = True
+ self.changes = changes
+ else:
+ def MarkCalled():
+ self.called = True
+
+ md5_check.CallAndRecordIfStale(
+ MarkCalled,
+ record_path=record_path.name,
+ input_paths=input_files,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ force=force,
+ pass_changes=(expected_changes or added_or_modified_only) is not None)
+ self.assertEqual(should_call, self.called, message)
+ if expected_changes:
+ description = self.changes.DescribeDifference()
+ self.assertTrue(fnmatch.fnmatch(description, expected_changes),
+ 'Expected %s to match %s' % (
+ repr(description), repr(expected_changes)))
+ if should_call and added_or_modified_only is not None:
+ self.assertEqual(added_or_modified_only,
+ self.changes.AddedOrModifiedOnly())
+
+ CheckCallAndRecord(True, 'should call when record doesn\'t exist',
+ expected_changes='Previous stamp file not found.',
+ added_or_modified_only=False)
+ CheckCallAndRecord(False, 'should not call when nothing changed')
+ CheckCallAndRecord(False, 'should not call when nothing changed #2',
+ outputs_specified=True, outputs_missing=False)
+ CheckCallAndRecord(True, 'should call when output missing',
+ outputs_specified=True, outputs_missing=True,
+ expected_changes='Outputs do not exist:*',
+ added_or_modified_only=False)
+ CheckCallAndRecord(True, force=True, message='should call when forced',
+ expected_changes='force=True',
+ added_or_modified_only=False)
+
+ input_file1.write('some more input')
+ input_file1.flush()
+ CheckCallAndRecord(True, 'changed input file should trigger call',
+ expected_changes='*Modified: %s' % input_file1.name,
+ added_or_modified_only=True)
+
+ input_files = input_files[::-1]
+ CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call')
+
+ input_files = input_files[:1]
+ CheckCallAndRecord(True, 'removing file should trigger call',
+ expected_changes='*Removed: %s' % input_file1.name,
+ added_or_modified_only=False)
+
+ input_files.append(input_file1.name)
+ CheckCallAndRecord(True, 'added input file should trigger call',
+ expected_changes='*Added: %s' % input_file1.name,
+ added_or_modified_only=True)
+
+ input_strings[0] = input_strings[0] + ' a bit longer'
+ CheckCallAndRecord(True, 'changed input string should trigger call',
+ expected_changes='*Input strings changed*',
+ added_or_modified_only=False)
+
+ input_strings = input_strings[::-1]
+ CheckCallAndRecord(True, 'reordering of string inputs should trigger call',
+ expected_changes='*Input strings changed*')
+
+ input_strings = input_strings[:1]
+ CheckCallAndRecord(True, 'removing a string should trigger call')
+
+ input_strings.append('a brand new string')
+ CheckCallAndRecord(
+ True,
+ 'added input string should trigger call',
+ added_or_modified_only=False)
+
+ _WriteZipFile(input_file2.name, [('path/1.txt', '1')])
+ CheckCallAndRecord(True, 'added subpath should trigger call',
+ expected_changes='*Modified: %s*Subpath added: %s' % (
+ input_file2.name, 'path/1.txt'),
+ added_or_modified_only=True)
+ _WriteZipFile(input_file2.name, [('path/1.txt', '2')])
+ CheckCallAndRecord(True, 'changed subpath should trigger call',
+ expected_changes='*Modified: %s*Subpath modified: %s' % (
+ input_file2.name, 'path/1.txt'),
+ added_or_modified_only=True)
+ CheckCallAndRecord(False, 'should not call when nothing changed')
+
+ _WriteZipFile(input_file2.name, [])
+ CheckCallAndRecord(True, 'removed subpath should trigger call',
+ expected_changes='*Modified: %s*Subpath removed: %s' % (
+ input_file2.name, 'path/1.txt'),
+ added_or_modified_only=False)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/gyp/util/proguard_util.py b/deps/v8/build/android/gyp/util/proguard_util.py
new file mode 100644
index 0000000000..c0fba206dc
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/proguard_util.py
@@ -0,0 +1,236 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+from util import build_utils
+
+
+class ProguardOutputFilter(object):
+ """ProGuard outputs boring stuff to stdout (proguard version, jar path, etc)
+ as well as interesting stuff (notes, warnings, etc). If stdout is entirely
+ boring, this class suppresses the output.
+ """
+
+ IGNORE_RE = re.compile(
+ r'Pro.*version|Note:|Reading|Preparing|Printing|ProgramClass:|Searching|'
+ r'jar \[|\d+ class path entries checked')
+
+ def __init__(self):
+ self._last_line_ignored = False
+ self._ignore_next_line = False
+
+ def __call__(self, output):
+ ret = []
+ for line in output.splitlines(True):
+ if self._ignore_next_line:
+ self._ignore_next_line = False
+ continue
+
+ if '***BINARY RUN STATS***' in line:
+ self._last_line_ignored = True
+ self._ignore_next_line = True
+ elif not line.startswith(' '):
+ self._last_line_ignored = bool(self.IGNORE_RE.match(line))
+ elif 'You should check if you need to specify' in line:
+ self._last_line_ignored = True
+
+ if not self._last_line_ignored:
+ ret.append(line)
+ return ''.join(ret)
+
+
+class ProguardCmdBuilder(object):
+ def __init__(self, proguard_jar):
+ assert os.path.exists(proguard_jar)
+ self._proguard_jar_path = proguard_jar
+ self._mapping = None
+ self._libraries = None
+ self._injars = None
+ self._configs = None
+ self._config_exclusions = None
+ self._outjar = None
+ self._mapping_output = None
+ self._verbose = False
+ self._min_api = None
+ self._disabled_optimizations = []
+
+ def outjar(self, path):
+ assert self._outjar is None
+ self._outjar = path
+
+ def mapping_output(self, path):
+ assert self._mapping_output is None
+ self._mapping_output = path
+
+ def mapping(self, path):
+ assert self._mapping is None
+ assert os.path.exists(path), path
+ self._mapping = path
+
+ def libraryjars(self, paths):
+ assert self._libraries is None
+ for p in paths:
+ assert os.path.exists(p), p
+ self._libraries = paths
+
+ def injars(self, paths):
+ assert self._injars is None
+ for p in paths:
+ assert os.path.exists(p), p
+ self._injars = paths
+
+ def configs(self, paths):
+ assert self._configs is None
+ self._configs = paths
+ for p in self._configs:
+ assert os.path.exists(p), p
+
+ def config_exclusions(self, paths):
+ assert self._config_exclusions is None
+ self._config_exclusions = paths
+
+ def verbose(self, verbose):
+ self._verbose = verbose
+
+ def min_api(self, min_api):
+ assert self._min_api is None
+ self._min_api = min_api
+
+ def disable_optimizations(self, optimizations):
+ self._disabled_optimizations += optimizations
+
+ def build(self):
+ assert self._injars is not None
+ assert self._outjar is not None
+ assert self._configs is not None
+ cmd = [
+ 'java', '-jar', self._proguard_jar_path,
+ '-forceprocessing',
+ ]
+
+ if self._mapping:
+ cmd += ['-applymapping', self._mapping]
+
+ if self._libraries:
+ cmd += ['-libraryjars', ':'.join(self._libraries)]
+
+ if self._min_api:
+ cmd += [
+ '-assumevalues class android.os.Build$VERSION {' +
+ ' public static final int SDK_INT return ' + self._min_api +
+ '..9999; }'
+ ]
+
+ for optimization in self._disabled_optimizations:
+ cmd += [ '-optimizations', '!' + optimization ]
+
+ # Filter to just .class files to avoid warnings about multiple inputs having
+ # the same files in META_INF/.
+ cmd += [
+ '-injars',
+ ':'.join('{}(**.class)'.format(x) for x in self._injars)
+ ]
+
+ for config_file in self.GetConfigs():
+ cmd += ['-include', config_file]
+
+ # The output jar must be specified after inputs.
+ cmd += [
+ '-outjars', self._outjar,
+ '-printseeds', self._outjar + '.seeds',
+ '-printusage', self._outjar + '.usage',
+ '-printmapping', self._mapping_output,
+ ]
+
+ if self._verbose:
+ cmd.append('-verbose')
+
+ return cmd
+
+ def GetDepfileDeps(self):
+ # The list of inputs that the GN target does not directly know about.
+ inputs = self._configs + self._injars
+ if self._libraries:
+ inputs += self._libraries
+ return inputs
+
+ def GetConfigs(self):
+ ret = list(self._configs)
+ for path in self._config_exclusions:
+ ret.remove(path)
+ return ret
+
+ def GetInputs(self):
+ inputs = self.GetDepfileDeps()
+ inputs += [self._proguard_jar_path]
+ if self._mapping:
+ inputs.append(self._mapping)
+ return inputs
+
+ def GetOutputs(self):
+ return [
+ self._outjar,
+ self._outjar + '.flags',
+ self._mapping_output,
+ self._outjar + '.seeds',
+ self._outjar + '.usage',
+ ]
+
+ def _WriteFlagsFile(self, cmd, out):
+ # Quite useful for auditing proguard flags.
+ WriteFlagsFile(self._configs, out)
+ out.write('#' * 80 + '\n')
+ out.write('# Command-line\n')
+ out.write('#' * 80 + '\n')
+ out.write('# ' + ' '.join(cmd) + '\n')
+
+ def CheckOutput(self):
+ cmd = self.build()
+
+ # There are a couple scenarios (.mapping files and switching from no
+ # proguard -> proguard) where GN's copy() target is used on output
+ # paths. These create hardlinks, so we explicitly unlink here to avoid
+ # updating files with multiple links.
+ for path in self.GetOutputs():
+ if os.path.exists(path):
+ os.unlink(path)
+
+ with open(self._outjar + '.flags', 'w') as out:
+ self._WriteFlagsFile(cmd, out)
+
+ # Warning: and Error: are sent to stderr, but messages and Note: are sent
+ # to stdout.
+ stdout_filter = None
+ stderr_filter = None
+ if not self._verbose:
+ stdout_filter = ProguardOutputFilter()
+ stderr_filter = ProguardOutputFilter()
+ build_utils.CheckOutput(cmd, print_stdout=True,
+ print_stderr=True,
+ stdout_filter=stdout_filter,
+ stderr_filter=stderr_filter)
+
+ # Proguard will skip writing -printseeds / -printusage / -printmapping if
+ # the files would be empty, but ninja needs all outputs to exist.
+ open(self._outjar + '.seeds', 'a').close()
+ open(self._outjar + '.usage', 'a').close()
+ open(self._outjar + '.mapping', 'a').close()
+
+
+def WriteFlagsFile(configs, out, exclude_generated=False):
+ for config in sorted(configs):
+ if exclude_generated and config.endswith('.resources.proguard.txt'):
+ continue
+
+ out.write('#' * 80 + '\n')
+ out.write('# ' + config + '\n')
+ out.write('#' * 80 + '\n')
+ with open(config) as config_file:
+ contents = config_file.read().rstrip()
+ # Remove numbers from generated rule comments to make file more
+ # diff'able.
+ contents = re.sub(r' #generated:\d+', '', contents)
+ out.write(contents)
+ out.write('\n\n')
diff --git a/deps/v8/build/android/gyp/util/resource_utils.py b/deps/v8/build/android/gyp/util/resource_utils.py
new file mode 100644
index 0000000000..61a4f3c238
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/resource_utils.py
@@ -0,0 +1,834 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import contextlib
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+from xml.etree import ElementTree
+
+import util.build_utils as build_utils
+
+_SOURCE_ROOT = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', '..', '..', '..'))
+# Import jinja2 from third_party/jinja2
+sys.path.insert(1, os.path.join(_SOURCE_ROOT, 'third_party'))
+from jinja2 import Template # pylint: disable=F0401
+
+
+EMPTY_ANDROID_MANIFEST_PATH = os.path.join(
+ _SOURCE_ROOT, 'build', 'android', 'AndroidManifest.xml')
+
+ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android'
+TOOLS_NAMESPACE = 'http://schemas.android.com/tools'
+
+# A variation of these maps also exists in:
+# //base/android/java/src/org/chromium/base/LocaleUtils.java
+# //ui/android/java/src/org/chromium/base/LocalizationUtils.java
+_CHROME_TO_ANDROID_LOCALE_MAP = {
+ 'es-419': 'es-rUS',
+ 'fil': 'tl',
+ 'he': 'iw',
+ 'id': 'in',
+ 'yi': 'ji',
+}
+_ANDROID_TO_CHROMIUM_LANGUAGE_MAP = {
+ 'tl': 'fil',
+ 'iw': 'he',
+ 'in': 'id',
+ 'ji': 'yi',
+ 'no': 'nb', # 'no' is not a real language. http://crbug.com/920960
+}
+
+
+_xml_namespace_initialized = False
+
+
+def ToAndroidLocaleName(chromium_locale):
+ """Convert an Chromium locale name into a corresponding Android one."""
+ # First handle the special cases, these are needed to deal with Android
+ # releases *before* 5.0/Lollipop.
+ android_locale = _CHROME_TO_ANDROID_LOCALE_MAP.get(chromium_locale)
+ if android_locale:
+ return android_locale
+
+ # Format of Chromium locale name is '<lang>' or '<lang>-<region>'
+ # where <lang> is a 2 or 3 letter language code (ISO 639-1 or 639-2)
+ # and region is a capitalized locale region name.
+ lang, _, region = chromium_locale.partition('-')
+ if not region:
+ return lang
+
+ # Translate newer language tags into obsolete ones. Only necessary if
+ # region is not None (e.g. 'he-IL' -> 'iw-rIL')
+ lang = _CHROME_TO_ANDROID_LOCALE_MAP.get(lang, lang)
+
+ # Using '<lang>-r<region>' is now acceptable as a locale name for all
+ # versions of Android.
+ return '%s-r%s' % (lang, region)
+
+
+# ISO 639 language code + optional ("-r" + capitalized region code).
+# Note that before Android 5.0/Lollipop, only 2-letter ISO 639-1 codes
+# are supported.
+_RE_ANDROID_LOCALE_QUALIFIER_1 = re.compile(r'^([a-z]{2,3})(\-r([A-Z]+))?$')
+
+# Starting with Android 7.0/Nougat, BCP 47 codes are supported but must
+# be prefixed with 'b+', and may include optional tags. e.g. 'b+en+US',
+# 'b+ja+Latn', 'b+ja+JP+Latn'
+_RE_ANDROID_LOCALE_QUALIFIER_2 = re.compile(r'^b\+([a-z]{2,3})(\+.+)?$')
+
+# Matches an all-uppercase region name.
+_RE_ALL_UPPERCASE = re.compile(r'^[A-Z]+$')
+
+
+def ToChromiumLocaleName(android_locale):
+ """Convert an Android locale name into a Chromium one."""
+ lang = None
+ region = None
+ m = _RE_ANDROID_LOCALE_QUALIFIER_1.match(android_locale)
+ if m:
+ lang = m.group(1)
+ if m.group(2):
+ region = m.group(3)
+ else:
+ m = _RE_ANDROID_LOCALE_QUALIFIER_2.match(android_locale)
+ if m:
+ lang = m.group(1)
+ if m.group(2):
+ tags = m.group(2).split('+')
+ # First all-uppercase tag is a region. This deals with cases where
+ # a special tag is placed before it (e.g. 'cmn+Hant-TW')
+ for tag in tags:
+ if _RE_ALL_UPPERCASE.match(tag):
+ region = tag
+ break
+
+ if not lang:
+ return None
+
+ # Special case for es-rUS -> es-419
+ if lang == 'es' and region == 'US':
+ return 'es-419'
+
+ lang = _ANDROID_TO_CHROMIUM_LANGUAGE_MAP.get(lang, lang)
+ if not region:
+ return lang
+
+ return '%s-%s' % (lang, region)
+
+
+def IsAndroidLocaleQualifier(string):
+ """Returns true if |string| is a valid Android resource locale qualifier."""
+ return (_RE_ANDROID_LOCALE_QUALIFIER_1.match(string)
+ or _RE_ANDROID_LOCALE_QUALIFIER_2.match(string))
+
+
+def FindLocaleInStringResourceFilePath(file_path):
+ """Return Android locale name of a string resource file path.
+
+ Args:
+ file_path: A file path.
+ Returns:
+ If |file_path| is of the format '.../values-<locale>/<name>.xml', return
+ the value of <locale> (and Android locale qualifier). Otherwise return None.
+ """
+ if not file_path.endswith('.xml'):
+ return None
+ prefix = 'values-'
+ dir_name = os.path.basename(os.path.dirname(file_path))
+ if not dir_name.startswith(prefix):
+ return None
+ qualifier = dir_name[len(prefix):]
+ return qualifier if IsAndroidLocaleQualifier(qualifier) else None
+
+
+def ToAndroidLocaleList(locale_list):
+ """Convert a list of Chromium locales into the corresponding Android list."""
+ return sorted(ToAndroidLocaleName(locale) for locale in locale_list)
+
+# Represents a line from a R.txt file.
+_TextSymbolEntry = collections.namedtuple('RTextEntry',
+ ('java_type', 'resource_type', 'name', 'value'))
+
+
+def CreateResourceInfoFile(files_to_zip, zip_path):
+ """Given a mapping of archive paths to their source, write an info file.
+
+ The info file contains lines of '{archive_path},{source_path}' for ease of
+ parsing. Assumes that there is no comma in the file names.
+
+ Args:
+ files_to_zip: Dict mapping path in the zip archive to original source.
+ zip_path: Path where the zip file ends up, this is where the info file goes.
+ """
+ info_file_path = zip_path + '.info'
+ with open(info_file_path, 'w') as info_file:
+ for archive_path, source_path in files_to_zip.iteritems():
+ info_file.write('{},{}\n'.format(archive_path, source_path))
+
+
+def _ParseTextSymbolsFile(path, fix_package_ids=False):
+ """Given an R.txt file, returns a list of _TextSymbolEntry.
+
+ Args:
+ path: Input file path.
+ fix_package_ids: if True, 0x00 and 0x02 package IDs read from the file
+ will be fixed to 0x7f.
+ Returns:
+ A list of _TextSymbolEntry instances.
+ Raises:
+ Exception: An unexpected line was detected in the input.
+ """
+ ret = []
+ with open(path) as f:
+ for line in f:
+ m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line)
+ if not m:
+ raise Exception('Unexpected line in R.txt: %s' % line)
+ java_type, resource_type, name, value = m.groups()
+ if fix_package_ids:
+ value = _FixPackageIds(value)
+ ret.append(_TextSymbolEntry(java_type, resource_type, name, value))
+ return ret
+
+
+def _FixPackageIds(resource_value):
+ # Resource IDs for resources belonging to regular APKs have their first byte
+ # as 0x7f (package id). However with webview, since it is not a regular apk
+ # but used as a shared library, aapt is passed the --shared-resources flag
+ # which changes some of the package ids to 0x00 and 0x02. This function
+ # normalises these (0x00 and 0x02) package ids to 0x7f, which the generated
+ # code in R.java changes to the correct package id at runtime.
+ # resource_value is a string with either, a single value '0x12345678', or an
+ # array of values like '{ 0xfedcba98, 0x01234567, 0x56789abc }'
+ return re.sub(r'0x(?:00|02)', r'0x7f', resource_value)
+
+
+def _GetRTxtResourceNames(r_txt_path):
+ """Parse an R.txt file and extract the set of resource names from it."""
+ return {entry.name for entry in _ParseTextSymbolsFile(r_txt_path)}
+
+
+def GetRTxtStringResourceNames(r_txt_path):
+ """Parse an R.txt file and the list of its string resource names."""
+ return sorted({
+ entry.name
+ for entry in _ParseTextSymbolsFile(r_txt_path)
+ if entry.resource_type == 'string'
+ })
+
+
+def GenerateStringResourcesWhitelist(module_r_txt_path, whitelist_r_txt_path):
+ """Generate a whitelist of string resource IDs.
+
+ Args:
+ module_r_txt_path: Input base module R.txt path.
+ whitelist_r_txt_path: Input whitelist R.txt path.
+ Returns:
+ A dictionary mapping numerical resource IDs to the corresponding
+ string resource names. The ID values are taken from string resources in
+ |module_r_txt_path| that are also listed by name in |whitelist_r_txt_path|.
+ """
+ whitelisted_names = {
+ entry.name
+ for entry in _ParseTextSymbolsFile(whitelist_r_txt_path)
+ if entry.resource_type == 'string'
+ }
+ return {
+ int(entry.value, 0): entry.name
+ for entry in _ParseTextSymbolsFile(module_r_txt_path)
+ if entry.resource_type == 'string' and entry.name in whitelisted_names
+ }
+
+
+class RJavaBuildOptions:
+ """A class used to model the various ways to build an R.java file.
+
+ This is used to control which resource ID variables will be final or
+ non-final, and whether an onResourcesLoaded() method will be generated
+ to adjust the non-final ones, when the corresponding library is loaded
+ at runtime.
+
+ Note that by default, all resources are final, and there is no
+ method generated, which corresponds to calling ExportNoResources().
+ """
+ def __init__(self):
+ self.has_constant_ids = True
+ self.resources_whitelist = None
+ self.has_on_resources_loaded = False
+ self.export_const_styleable = False
+
+ def ExportNoResources(self):
+ """Make all resource IDs final, and don't generate a method."""
+ self.has_constant_ids = True
+ self.resources_whitelist = None
+ self.has_on_resources_loaded = False
+ self.export_const_styleable = False
+
+ def ExportAllResources(self):
+ """Make all resource IDs non-final in the R.java file."""
+ self.has_constant_ids = False
+ self.resources_whitelist = None
+
+ def ExportSomeResources(self, r_txt_file_path):
+ """Only select specific resource IDs to be non-final.
+
+ Args:
+ r_txt_file_path: The path to an R.txt file. All resources named
+ int it will be non-final in the generated R.java file, all others
+ will be final.
+ """
+ self.has_constant_ids = True
+ self.resources_whitelist = _GetRTxtResourceNames(r_txt_file_path)
+
+ def ExportAllStyleables(self):
+ """Make all styleable constants non-final, even non-resources ones.
+
+ Resources that are styleable but not of int[] type are not actually
+ resource IDs but constants. By default they are always final. Call this
+ method to make them non-final anyway in the final R.java file.
+ """
+ self.export_const_styleable = True
+
+ def GenerateOnResourcesLoaded(self):
+ """Generate an onResourcesLoaded() method.
+
+ This Java method will be called at runtime by the framework when
+ the corresponding library (which includes the R.java source file)
+ will be loaded at runtime. This corresponds to the --shared-resources
+ or --app-as-shared-lib flags of 'aapt package'.
+ """
+ self.has_on_resources_loaded = True
+
+ def _IsResourceFinal(self, entry):
+ """Determines whether a resource should be final or not.
+
+ Args:
+ entry: A _TextSymbolEntry instance.
+ Returns:
+ True iff the corresponding entry should be final.
+ """
+ if entry.resource_type == 'styleable' and entry.java_type != 'int[]':
+ # A styleable constant may be exported as non-final after all.
+ return not self.export_const_styleable
+ elif not self.has_constant_ids:
+ # Every resource is non-final
+ return False
+ elif not self.resources_whitelist:
+ # No whitelist means all IDs are non-final.
+ return True
+ else:
+ # Otherwise, only those in the
+ return entry.name not in self.resources_whitelist
+
+
+def CreateRJavaFiles(srcjar_dir, package, main_r_txt_file, extra_res_packages,
+ extra_r_txt_files, rjava_build_options):
+ """Create all R.java files for a set of packages and R.txt files.
+
+ Args:
+ srcjar_dir: The top-level output directory for the generated files.
+ package: Top-level package name.
+ main_r_txt_file: The main R.txt file containing the valid values
+ of _all_ resource IDs.
+ extra_res_packages: A list of extra package names.
+ extra_r_txt_files: A list of extra R.txt files. One per item in
+ |extra_res_packages|. Note that all resource IDs in them will be ignored,
+ |and replaced by the values extracted from |main_r_txt_file|.
+ rjava_build_options: An RJavaBuildOptions instance that controls how
+ exactly the R.java file is generated.
+ Raises:
+ Exception if a package name appears several times in |extra_res_packages|
+ """
+ assert len(extra_res_packages) == len(extra_r_txt_files), \
+ 'Need one R.txt file per package'
+
+ packages = list(extra_res_packages)
+ r_txt_files = list(extra_r_txt_files)
+
+ if package and package not in packages:
+ # Sometimes, an apk target and a resources target share the same
+ # AndroidManifest.xml and thus |package| will already be in |packages|.
+ packages.append(package)
+ r_txt_files.append(main_r_txt_file)
+
+ # Map of (resource_type, name) -> Entry.
+ # Contains the correct values for resources.
+ all_resources = {}
+ for entry in _ParseTextSymbolsFile(main_r_txt_file, fix_package_ids=True):
+ all_resources[(entry.resource_type, entry.name)] = entry
+
+ # Map of package_name->resource_type->entry
+ resources_by_package = (
+ collections.defaultdict(lambda: collections.defaultdict(list)))
+ # Build the R.java files using each package's R.txt file, but replacing
+ # each entry's placeholder value with correct values from all_resources.
+ for package, r_txt_file in zip(packages, r_txt_files):
+ if package in resources_by_package:
+ raise Exception(('Package name "%s" appeared twice. All '
+ 'android_resources() targets must use unique package '
+ 'names, or no package name at all.') % package)
+ resources_by_type = resources_by_package[package]
+ # The sub-R.txt files have the wrong values at this point. Read them to
+ # figure out which entries belong to them, but use the values from the
+ # main R.txt file.
+ for entry in _ParseTextSymbolsFile(r_txt_file):
+ entry = all_resources.get((entry.resource_type, entry.name))
+ # For most cases missing entry here is an error. It means that some
+ # library claims to have or depend on a resource that isn't included into
+ # the APK. There is one notable exception: Google Play Services (GMS).
+ # GMS is shipped as a bunch of AARs. One of them - basement - contains
+ # R.txt with ids of all resources, but most of the resources are in the
+ # other AARs. However, all other AARs reference their resources via
+ # basement's R.java so the latter must contain all ids that are in its
+ # R.txt. Most targets depend on only a subset of GMS AARs so some
+ # resources are missing, which is okay because the code that references
+ # them is missing too. We can't get an id for a resource that isn't here
+ # so the only solution is to skip the resource entry entirely.
+ #
+ # We can verify that all entries referenced in the code were generated
+ # correctly by running Proguard on the APK: it will report missing
+ # fields.
+ if entry:
+ resources_by_type[entry.resource_type].append(entry)
+
+ for package, resources_by_type in resources_by_package.iteritems():
+ _CreateRJavaSourceFile(srcjar_dir, package, resources_by_type,
+ rjava_build_options)
+
+
+def _CreateRJavaSourceFile(srcjar_dir, package, resources_by_type,
+ rjava_build_options):
+ """Generates an R.java source file."""
+ package_r_java_dir = os.path.join(srcjar_dir, *package.split('.'))
+ build_utils.MakeDirectory(package_r_java_dir)
+ package_r_java_path = os.path.join(package_r_java_dir, 'R.java')
+ java_file_contents = _RenderRJavaSource(package, resources_by_type,
+ rjava_build_options)
+ with open(package_r_java_path, 'w') as f:
+ f.write(java_file_contents)
+
+
+# Resource IDs inside resource arrays are sorted. Application resource IDs start
+# with 0x7f but system resource IDs start with 0x01 thus system resource ids are
+# always at the start of the array. This function finds the index of the first
+# non system resource id to be used for package ID rewriting (we should not
+# rewrite system resource ids).
+def _GetNonSystemIndex(entry):
+ """Get the index of the first application resource ID within a resource
+ array."""
+ res_ids = re.findall(r'0x[0-9a-f]{8}', entry.value)
+ for i, res_id in enumerate(res_ids):
+ if res_id.startswith('0x7f'):
+ return i
+ return len(res_ids)
+
+
+def _RenderRJavaSource(package, resources_by_type, rjava_build_options):
+ """Render an R.java source file. See _CreateRJaveSourceFile for args info."""
+ final_resources_by_type = collections.defaultdict(list)
+ non_final_resources_by_type = collections.defaultdict(list)
+ for res_type, resources in resources_by_type.iteritems():
+ for entry in resources:
+ # Entries in stylable that are not int[] are not actually resource ids
+ # but constants.
+ if rjava_build_options._IsResourceFinal(entry):
+ final_resources_by_type[res_type].append(entry)
+ else:
+ non_final_resources_by_type[res_type].append(entry)
+
+ # Keep these assignments all on one line to make diffing against regular
+ # aapt-generated files easier.
+ create_id = ('{{ e.resource_type }}.{{ e.name }} ^= packageIdTransform;')
+ create_id_arr = ('{{ e.resource_type }}.{{ e.name }}[i] ^='
+ ' packageIdTransform;')
+ for_loop_condition = ('int i = {{ startIndex(e) }}; i < '
+ '{{ e.resource_type }}.{{ e.name }}.length; ++i')
+
+ # Here we diverge from what aapt does. Because we have so many
+ # resources, the onResourcesLoaded method was exceeding the 64KB limit that
+ # Java imposes. For this reason we split onResourcesLoaded into different
+ # methods for each resource type.
+ template = Template("""/* AUTO-GENERATED FILE. DO NOT MODIFY. */
+
+package {{ package }};
+
+public final class R {
+ private static boolean sResourcesDidLoad;
+ {% for resource_type in resource_types %}
+ public static final class {{ resource_type }} {
+ {% for e in final_resources[resource_type] %}
+ public static final {{ e.java_type }} {{ e.name }} = {{ e.value }};
+ {% endfor %}
+ {% for e in non_final_resources[resource_type] %}
+ {% if e.value != '0' %}
+ public static {{ e.java_type }} {{ e.name }} = {{ e.value }};
+ {% else %}
+ public static {{ e.java_type }} {{ e.name }};
+ {% endif %}
+ {% endfor %}
+ }
+ {% endfor %}
+ {% if has_on_resources_loaded %}
+ public static void onResourcesLoaded(int packageId) {
+ assert !sResourcesDidLoad;
+ sResourcesDidLoad = true;
+ int packageIdTransform = (packageId ^ 0x7f) << 24;
+ {% for resource_type in resource_types %}
+ onResourcesLoaded{{ resource_type|title }}(packageIdTransform);
+ {% for e in non_final_resources[resource_type] %}
+ {% if e.java_type == 'int[]' %}
+ for(""" + for_loop_condition + """) {
+ """ + create_id_arr + """
+ }
+ {% endif %}
+ {% endfor %}
+ {% endfor %}
+ }
+ {% for res_type in resource_types %}
+ private static void onResourcesLoaded{{ res_type|title }} (
+ int packageIdTransform) {
+ {% for e in non_final_resources[res_type] %}
+ {% if res_type != 'styleable' and e.java_type != 'int[]' %}
+ """ + create_id + """
+ {% endif %}
+ {% endfor %}
+ }
+ {% endfor %}
+ {% endif %}
+}
+""", trim_blocks=True, lstrip_blocks=True)
+
+ return template.render(
+ package=package,
+ resource_types=sorted(resources_by_type),
+ has_on_resources_loaded=rjava_build_options.has_on_resources_loaded,
+ final_resources=final_resources_by_type,
+ non_final_resources=non_final_resources_by_type,
+ startIndex=_GetNonSystemIndex)
+
+
+def ExtractPackageFromManifest(manifest_path):
+ """Extract package name from Android manifest file."""
+ return ParseAndroidManifest(manifest_path)[1].get('package')
+
+
+def ExtractBinaryManifestValues(aapt2_path, apk_path):
+ """Returns (version_code, version_name, package_name) for the given apk."""
+ output = subprocess.check_output([
+ aapt2_path, 'dump', 'xmltree', apk_path, '--file', 'AndroidManifest.xml'
+ ])
+ version_code = re.search(r'versionCode.*?=(\d*)', output).group(1)
+ version_name = re.search(r'versionName.*?="(.*?)"', output).group(1)
+ package_name = re.search(r'package.*?="(.*?)"', output).group(1)
+ return version_code, version_name, package_name
+
+
+def ExtractArscPackage(aapt2_path, apk_path):
+ """Returns (package_name, package_id) of resources.arsc from apk_path."""
+ proc = subprocess.Popen([aapt2_path, 'dump', 'resources', apk_path],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ for line in proc.stdout:
+ # Package name=org.chromium.webview_shell id=7f
+ if line.startswith('Package'):
+ proc.kill()
+ parts = line.split()
+ package_name = parts[1].split('=')[1]
+ package_id = parts[2][3:]
+ return package_name, int(package_id, 16)
+
+ # aapt2 currently crashes when dumping webview resources, but not until after
+ # it prints the "Package" line (b/130553900).
+ sys.stderr.write(proc.stderr.read())
+ raise Exception('Failed to find arsc package name')
+
+
+def ExtractDeps(dep_zips, deps_dir):
+ """Extract a list of resource dependency zip files.
+
+ Args:
+ dep_zips: A list of zip file paths, each one will be extracted to
+ a subdirectory of |deps_dir|, named after the zip file's path (e.g.
+ '/some/path/foo.zip' -> '{deps_dir}/some_path_foo/').
+ deps_dir: Top-level extraction directory.
+ Returns:
+ The list of all sub-directory paths, relative to |deps_dir|.
+ Raises:
+ Exception: If a sub-directory already exists with the same name before
+ extraction.
+ """
+ dep_subdirs = []
+ for z in dep_zips:
+ subdirname = z.replace(os.path.sep, '_')
+ subdir = os.path.join(deps_dir, subdirname)
+ if os.path.exists(subdir):
+ raise Exception('Resource zip name conflict: ' + subdirname)
+ build_utils.ExtractAll(z, path=subdir)
+ dep_subdirs.append(subdir)
+ return dep_subdirs
+
+
+class _ResourceBuildContext(object):
+ """A temporary directory for packaging and compiling Android resources.
+
+ Args:
+ temp_dir: Optional root build directory path. If None, a temporary
+ directory will be created, and removed in Close().
+ """
+ def __init__(self, temp_dir=None):
+ """Initialized the context."""
+ # The top-level temporary directory.
+ if temp_dir:
+ self.temp_dir = temp_dir
+ self.remove_on_exit = False
+ else:
+ self.temp_dir = tempfile.mkdtemp()
+ self.remove_on_exit = True
+
+ # A location to store resources extracted form dependency zip files.
+ self.deps_dir = os.path.join(self.temp_dir, 'deps')
+ os.mkdir(self.deps_dir)
+ # A location to place aapt-generated files.
+ self.gen_dir = os.path.join(self.temp_dir, 'gen')
+ os.mkdir(self.gen_dir)
+ # Location of the generated R.txt file.
+ self.r_txt_path = os.path.join(self.gen_dir, 'R.txt')
+ # A location to place generated R.java files.
+ self.srcjar_dir = os.path.join(self.temp_dir, 'java')
+ os.mkdir(self.srcjar_dir)
+
+ def Close(self):
+ """Close the context and destroy all temporary files."""
+ if self.remove_on_exit:
+ shutil.rmtree(self.temp_dir)
+
+
+@contextlib.contextmanager
+def BuildContext(temp_dir=None):
+ """Generator for a _ResourceBuildContext instance."""
+ try:
+ context = _ResourceBuildContext(temp_dir)
+ yield context
+ finally:
+ context.Close()
+
+
+def ResourceArgsParser():
+ """Create an argparse.ArgumentParser instance with common argument groups.
+
+ Returns:
+ A tuple of (parser, in_group, out_group) corresponding to the parser
+ instance, and the input and output argument groups for it, respectively.
+ """
+ parser = argparse.ArgumentParser(description=__doc__)
+
+ input_opts = parser.add_argument_group('Input options')
+ output_opts = parser.add_argument_group('Output options')
+
+ build_utils.AddDepfileOption(output_opts)
+
+ input_opts.add_argument('--include-resources', required=True, action="append",
+ help='Paths to arsc resource files used to link '
+ 'against. Can be specified multiple times.')
+
+ input_opts.add_argument('--dependencies-res-zips', required=True,
+ help='Resources zip archives from dependents. Required to '
+ 'resolve @type/foo references into dependent '
+ 'libraries.')
+
+ input_opts.add_argument(
+ '--r-text-in',
+ help='Path to pre-existing R.txt. Its resource IDs override those found '
+ 'in the aapt-generated R.txt when generating R.java.')
+
+ input_opts.add_argument(
+ '--extra-res-packages',
+ help='Additional package names to generate R.java files for.')
+
+ input_opts.add_argument(
+ '--extra-r-text-files',
+ help='For each additional package, the R.txt file should contain a '
+ 'list of resources to be included in the R.java file in the format '
+ 'generated by aapt.')
+
+ return (parser, input_opts, output_opts)
+
+
+def HandleCommonOptions(options):
+ """Handle common command-line options after parsing.
+
+ Args:
+ options: the result of parse_args() on the parser returned by
+ ResourceArgsParser(). This function updates a few common fields.
+ """
+ options.include_resources = [build_utils.ParseGnList(r) for r in
+ options.include_resources]
+ # Flatten list of include resources list to make it easier to use.
+ options.include_resources = [r for resources in options.include_resources
+ for r in resources]
+
+ options.dependencies_res_zips = (
+ build_utils.ParseGnList(options.dependencies_res_zips))
+
+ # Don't use [] as default value since some script explicitly pass "".
+ if options.extra_res_packages:
+ options.extra_res_packages = (
+ build_utils.ParseGnList(options.extra_res_packages))
+ else:
+ options.extra_res_packages = []
+
+ if options.extra_r_text_files:
+ options.extra_r_text_files = (
+ build_utils.ParseGnList(options.extra_r_text_files))
+ else:
+ options.extra_r_text_files = []
+
+
+def ParseAndroidResourceStringsFromXml(xml_data):
+ """Parse and Android xml resource file and extract strings from it.
+
+ Args:
+ xml_data: XML file data.
+ Returns:
+ A (dict, namespaces) tuple, where |dict| maps string names to their UTF-8
+ encoded value, and |namespaces| is a dictionary mapping prefixes to URLs
+ corresponding to namespaces declared in the <resources> element.
+ """
+ # NOTE: This uses regular expression matching because parsing with something
+ # like ElementTree makes it tedious to properly parse some of the structured
+ # text found in string resources, e.g.:
+ # <string msgid="3300176832234831527" \
+ # name="abc_shareactionprovider_share_with_application">\
+ # "Condividi tramite <ns1:g id="APPLICATION_NAME">%s</ns1:g>"\
+ # </string>
+ result = {}
+
+ # Find <resources> start tag and extract namespaces from it.
+ m = re.search('<resources([^>]*)>', xml_data, re.MULTILINE)
+ if not m:
+ raise Exception('<resources> start tag expected: ' + xml_data)
+ input_data = xml_data[m.end():]
+ resource_attrs = m.group(1)
+ re_namespace = re.compile('\s*(xmlns:(\w+)="([^"]+)")')
+ namespaces = {}
+ while resource_attrs:
+ m = re_namespace.match(resource_attrs)
+ if not m:
+ break
+ namespaces[m.group(2)] = m.group(3)
+ resource_attrs = resource_attrs[m.end(1):]
+
+ # Find each string element now.
+ re_string_element_start = re.compile('<string ([^>]* )?name="([^">]+)"[^>]*>')
+ re_string_element_end = re.compile('</string>')
+ while input_data:
+ m = re_string_element_start.search(input_data)
+ if not m:
+ break
+ name = m.group(2)
+ input_data = input_data[m.end():]
+ m2 = re_string_element_end.search(input_data)
+ if not m2:
+ raise Exception('Expected closing string tag: ' + input_data)
+ text = input_data[:m2.start()]
+ input_data = input_data[m2.end():]
+ if len(text) and text[0] == '"' and text[-1] == '"':
+ text = text[1:-1]
+ result[name] = text
+
+ return result, namespaces
+
+
+def GenerateAndroidResourceStringsXml(names_to_utf8_text, namespaces=None):
+ """Generate an XML text corresponding to an Android resource strings map.
+
+ Args:
+ names_to_text: A dictionary mapping resource names to localized
+ text (encoded as UTF-8).
+ namespaces: A map of namespace prefix to URL.
+ Returns:
+ New non-Unicode string containing an XML data structure describing the
+ input as an Android resource .xml file.
+ """
+ result = '<?xml version="1.0" encoding="utf-8"?>\n'
+ result += '<resources'
+ if namespaces:
+ for prefix, url in sorted(namespaces.iteritems()):
+ result += ' xmlns:%s="%s"' % (prefix, url)
+ result += '>\n'
+ if not names_to_utf8_text:
+ result += '<!-- this file intentionally empty -->\n'
+ else:
+ for name, utf8_text in sorted(names_to_utf8_text.iteritems()):
+ result += '<string name="%s">"%s"</string>\n' % (name, utf8_text)
+ result += '</resources>\n'
+ return result
+
+
+def FilterAndroidResourceStringsXml(xml_file_path, string_predicate):
+ """Remove unwanted localized strings from an Android resource .xml file.
+
+ This function takes a |string_predicate| callable object that will
+ receive a resource string name, and should return True iff the
+ corresponding <string> element should be kept in the file.
+
+ Args:
+ xml_file_path: Android resource strings xml file path.
+ string_predicate: A predicate function which will receive the string name
+ and shal
+ """
+ with open(xml_file_path) as f:
+ xml_data = f.read()
+ strings_map, namespaces = ParseAndroidResourceStringsFromXml(xml_data)
+
+ string_deletion = False
+ for name in strings_map.keys():
+ if not string_predicate(name):
+ del strings_map[name]
+ string_deletion = True
+
+ if string_deletion:
+ new_xml_data = GenerateAndroidResourceStringsXml(strings_map, namespaces)
+ with open(xml_file_path, 'wb') as f:
+ f.write(new_xml_data)
+
+
+def _RegisterElementTreeNamespaces():
+ global _xml_namespace_initialized
+ if not _xml_namespace_initialized:
+ _xml_namespace_initialized = True
+ ElementTree.register_namespace('android', ANDROID_NAMESPACE)
+ ElementTree.register_namespace('tools', TOOLS_NAMESPACE)
+
+
+def ParseAndroidManifest(path):
+ """Parses an AndroidManifest.xml using ElementTree.
+
+ Registers required namespaces & creates application node if missing.
+
+ Returns tuple of:
+ doc: Root xml document.
+ manifest_node: the <manifest> node.
+ app_node: the <application> node.
+ """
+ _RegisterElementTreeNamespaces()
+ doc = ElementTree.parse(path)
+ # ElementTree.find does not work if the required tag is the root.
+ if doc.getroot().tag == 'manifest':
+ manifest_node = doc.getroot()
+ else:
+ manifest_node = doc.find('manifest')
+
+ app_node = doc.find('application')
+ if app_node is None:
+ app_node = ElementTree.SubElement(manifest_node, 'application')
+
+ return doc, manifest_node, app_node
diff --git a/deps/v8/build/android/gyp/util/resource_utils_test.py b/deps/v8/build/android/gyp/util/resource_utils_test.py
new file mode 100755
index 0000000000..dc1094aca0
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/resource_utils_test.py
@@ -0,0 +1,268 @@
+#!/usr/bin/env python
+# coding: utf-8
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import sys
+import unittest
+
+sys.path.insert(
+ 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import build_utils
+
+# Required because the following import needs build/android/gyp in the
+# Python path to import util.build_utils.
+_BUILD_ANDROID_GYP_ROOT = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), os.pardir))
+sys.path.insert(1, _BUILD_ANDROID_GYP_ROOT)
+
+import resource_utils # pylint: disable=relative-import
+
+# pylint: disable=line-too-long
+
+_TEST_XML_INPUT_1 = '''<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:android="http://schemas.android.com/apk/res/android">
+<string name="copy_to_clipboard_failure_message">"Lõikelauale kopeerimine ebaõnnestus"</string>
+<string name="low_memory_error">"Eelmist toimingut ei saa vähese mälu tõttu lõpetada"</string>
+<string name="opening_file_error">"Valit. faili avamine ebaõnnestus"</string>
+<string name="structured_text">"This is <android:g id="STRUCTURED_TEXT">%s</android:g>"</string>
+</resources>
+'''
+
+_TEST_XML_OUTPUT_2 = '''<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:android="http://schemas.android.com/apk/res/android">
+<string name="low_memory_error">"Eelmist toimingut ei saa vähese mälu tõttu lõpetada"</string>
+<string name="structured_text">"This is <android:g id="STRUCTURED_TEXT">%s</android:g>"</string>
+</resources>
+'''
+
+# pylint: enable=line-too-long
+
+_TEST_XML_OUTPUT_EMPTY = '''<?xml version="1.0" encoding="utf-8"?>
+<resources>
+<!-- this file intentionally empty -->
+</resources>
+'''
+
+_TEST_RESOURCES_MAP_1 = {
+ 'low_memory_error': 'Eelmist toimingut ei saa vähese mälu tõttu lõpetada',
+ 'opening_file_error': 'Valit. faili avamine ebaõnnestus',
+ 'copy_to_clipboard_failure_message': 'Lõikelauale kopeerimine ebaõnnestus',
+ 'structured_text': 'This is <android:g id="STRUCTURED_TEXT">%s</android:g>',
+}
+
+_TEST_NAMESPACES_1 = {'android': 'http://schemas.android.com/apk/res/android'}
+
+_TEST_RESOURCES_WHITELIST_1 = ['low_memory_error', 'structured_text']
+
+# Extracted from one generated Chromium R.txt file, with string resource
+# names shuffled randomly.
+_TEST_R_TXT = r'''int anim abc_fade_in 0x7f050000
+int anim abc_fade_out 0x7f050001
+int anim abc_grow_fade_in_from_bottom 0x7f050002
+int array DefaultCookiesSettingEntries 0x7f120002
+int array DefaultCookiesSettingValues 0x7f120003
+int array DefaultGeolocationSettingEntries 0x7f120004
+int attr actionBarDivider 0x7f0100e7
+int attr actionBarStyle 0x7f0100e2
+int string AllowedDomainsForAppsDesc 0x7f0c0105
+int string AlternateErrorPagesEnabledDesc 0x7f0c0107
+int string AuthAndroidNegotiateAccountTypeDesc 0x7f0c0109
+int string AllowedDomainsForAppsTitle 0x7f0c0104
+int string AlternateErrorPagesEnabledTitle 0x7f0c0106
+int[] styleable SnackbarLayout { 0x0101011f, 0x7f010076, 0x7f0100ba }
+int styleable SnackbarLayout_android_maxWidth 0
+int styleable SnackbarLayout_elevation 2
+'''
+
+# Test whitelist R.txt file. Note that AlternateErrorPagesEnabledTitle is
+# listed as an 'anim' and should thus be skipped. Similarly the string
+# 'ThisStringDoesNotAppear' should not be in the final result.
+_TEST_WHITELIST_R_TXT = r'''int anim AlternateErrorPagesEnabledTitle 0x7f0eeeee
+int string AllowedDomainsForAppsDesc 0x7f0c0105
+int string AlternateErrorPagesEnabledDesc 0x7f0c0107
+int string ThisStringDoesNotAppear 0x7f0fffff
+'''
+
+_TEST_R_TEXT_RESOURCES_IDS = {
+ 0x7f0c0105: 'AllowedDomainsForAppsDesc',
+ 0x7f0c0107: 'AlternateErrorPagesEnabledDesc',
+}
+
+# Names of string resources in _TEST_R_TXT, should be sorted!
+_TEST_R_TXT_STRING_RESOURCE_NAMES = sorted([
+ 'AllowedDomainsForAppsDesc',
+ 'AllowedDomainsForAppsTitle',
+ 'AlternateErrorPagesEnabledDesc',
+ 'AlternateErrorPagesEnabledTitle',
+ 'AuthAndroidNegotiateAccountTypeDesc',
+])
+
+
+def _CreateTestFile(tmp_dir, file_name, file_data):
+ file_path = os.path.join(tmp_dir, file_name)
+ with open(file_path, 'wt') as f:
+ f.write(file_data)
+ return file_path
+
+
+
+class ResourceUtilsTest(unittest.TestCase):
+
+ def test_GetRTxtStringResourceNames(self):
+ with build_utils.TempDir() as tmp_dir:
+ tmp_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT)
+ self.assertListEqual(
+ resource_utils.GetRTxtStringResourceNames(tmp_file),
+ _TEST_R_TXT_STRING_RESOURCE_NAMES)
+
+ def test_GenerateStringResourcesWhitelist(self):
+ with build_utils.TempDir() as tmp_dir:
+ tmp_module_rtxt_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT)
+ tmp_whitelist_rtxt_file = _CreateTestFile(tmp_dir, "test_whitelist_R.txt",
+ _TEST_WHITELIST_R_TXT)
+ self.assertDictEqual(
+ resource_utils.GenerateStringResourcesWhitelist(
+ tmp_module_rtxt_file, tmp_whitelist_rtxt_file),
+ _TEST_R_TEXT_RESOURCES_IDS)
+
+ def test_IsAndroidLocaleQualifier(self):
+ good_locales = [
+ 'en',
+ 'en-rUS',
+ 'fil',
+ 'fil-rPH',
+ 'iw',
+ 'iw-rIL',
+ 'b+en',
+ 'b+en+US',
+ 'b+ja+Latn',
+ 'b+ja+JP+Latn',
+ 'b+cmn+Hant-TW',
+ ]
+ bad_locales = [
+ 'e', 'english', 'en-US', 'en_US', 'en-rus', 'b+e', 'b+english', 'b+ja+'
+ ]
+ for locale in good_locales:
+ self.assertTrue(
+ resource_utils.IsAndroidLocaleQualifier(locale),
+ msg="'%s' should be a good locale!" % locale)
+
+ for locale in bad_locales:
+ self.assertFalse(
+ resource_utils.IsAndroidLocaleQualifier(locale),
+ msg="'%s' should be a bad locale!" % locale)
+
+ def test_ToAndroidLocaleName(self):
+ _TEST_CHROMIUM_TO_ANDROID_LOCALE_MAP = {
+ 'en': 'en',
+ 'en-US': 'en-rUS',
+ 'en-FOO': 'en-rFOO',
+ 'fil': 'tl',
+ 'tl': 'tl',
+ 'he': 'iw',
+ 'he-IL': 'iw-rIL',
+ 'id': 'in',
+ 'id-BAR': 'in-rBAR',
+ 'nb': 'nb',
+ 'yi': 'ji'
+ }
+ for chromium_locale, android_locale in \
+ _TEST_CHROMIUM_TO_ANDROID_LOCALE_MAP.iteritems():
+ result = resource_utils.ToAndroidLocaleName(chromium_locale)
+ self.assertEqual(result, android_locale)
+
+ def test_ToChromiumLocaleName(self):
+ _TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP = {
+ 'foo': 'foo',
+ 'foo-rBAR': 'foo-BAR',
+ 'b+foo': 'foo',
+ 'b+foo+BAR': 'foo-BAR',
+ 'b+foo+BAR+Whatever': 'foo-BAR',
+ 'b+foo+Whatever+BAR': 'foo-BAR',
+ 'b+foo+Whatever': 'foo',
+ 'en': 'en',
+ 'en-rUS': 'en-US',
+ 'en-US': None,
+ 'en-FOO': None,
+ 'en-rFOO': 'en-FOO',
+ 'es-rES': 'es-ES',
+ 'es-rUS': 'es-419',
+ 'tl': 'fil',
+ 'fil': 'fil',
+ 'iw': 'he',
+ 'iw-rIL': 'he-IL',
+ 'in': 'id',
+ 'in-rBAR': 'id-BAR',
+ 'id-rBAR': 'id-BAR',
+ 'nb': 'nb',
+ 'no': 'nb', # http://crbug.com/920960
+ }
+ for android_locale, chromium_locale in \
+ _TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP.iteritems():
+ result = resource_utils.ToChromiumLocaleName(android_locale)
+ self.assertEqual(result, chromium_locale)
+
+ def test_FindLocaleInStringResourceFilePath(self):
+ self.assertEqual(
+ None,
+ resource_utils.FindLocaleInStringResourceFilePath(
+ 'res/values/whatever.xml'))
+ self.assertEqual(
+ 'foo',
+ resource_utils.FindLocaleInStringResourceFilePath(
+ 'res/values-foo/whatever.xml'))
+ self.assertEqual(
+ 'foo-rBAR',
+ resource_utils.FindLocaleInStringResourceFilePath(
+ 'res/values-foo-rBAR/whatever.xml'))
+ self.assertEqual(
+ None,
+ resource_utils.FindLocaleInStringResourceFilePath(
+ 'res/values-foo/ignore-subdirs/whatever.xml'))
+
+ def test_ParseAndroidResourceStringsFromXml(self):
+ ret, namespaces = resource_utils.ParseAndroidResourceStringsFromXml(
+ _TEST_XML_INPUT_1)
+ self.assertDictEqual(ret, _TEST_RESOURCES_MAP_1)
+ self.assertDictEqual(namespaces, _TEST_NAMESPACES_1)
+
+ def test_GenerateAndroidResourceStringsXml(self):
+ # Fist, an empty strings map, with no namespaces
+ result = resource_utils.GenerateAndroidResourceStringsXml({})
+ self.assertEqual(result, _TEST_XML_OUTPUT_EMPTY)
+
+ result = resource_utils.GenerateAndroidResourceStringsXml(
+ _TEST_RESOURCES_MAP_1, _TEST_NAMESPACES_1)
+ self.assertEqual(result, _TEST_XML_INPUT_1)
+
+ @staticmethod
+ def _CreateTestResourceFile(output_dir, locale, string_map, namespaces):
+ values_dir = os.path.join(output_dir, 'values-' + locale)
+ build_utils.MakeDirectory(values_dir)
+ file_path = os.path.join(values_dir, 'strings.xml')
+ with open(file_path, 'w') as f:
+ file_data = resource_utils.GenerateAndroidResourceStringsXml(
+ string_map, namespaces)
+ f.write(file_data)
+ return file_path
+
+ def _CheckTestResourceFile(self, file_path, expected_data):
+ with open(file_path) as f:
+ file_data = f.read()
+ self.assertEqual(file_data, expected_data)
+
+ def test_FilterAndroidResourceStringsXml(self):
+ with build_utils.TempDir() as tmp_path:
+ test_file = self._CreateTestResourceFile(
+ tmp_path, 'foo', _TEST_RESOURCES_MAP_1, _TEST_NAMESPACES_1)
+ resource_utils.FilterAndroidResourceStringsXml(
+ test_file, lambda x: x in _TEST_RESOURCES_WHITELIST_1)
+ self._CheckTestResourceFile(test_file, _TEST_XML_OUTPUT_2)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/gyp/write_build_config.py b/deps/v8/build/android/gyp/write_build_config.py
new file mode 100755
index 0000000000..68dfac4bb8
--- /dev/null
+++ b/deps/v8/build/android/gyp/write_build_config.py
@@ -0,0 +1,1643 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes a build_config file.
+
+The build_config file for a target is a json file containing information about
+how to build that target based on the target's dependencies. This includes
+things like: the javac classpath, the list of android resources dependencies,
+etc. It also includes the information needed to create the build_config for
+other targets that depend on that one.
+
+Android build scripts should not refer to the build_config directly, and the
+build specification should instead pass information in using the special
+file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing
+of values in a json dict in a file and looks like this:
+ --python-arg=@FileArg(build_config_path:javac:classpath)
+
+Note: If paths to input files are passed in this way, it is important that:
+ 1. inputs/deps of the action ensure that the files are available the first
+ time the action runs.
+ 2. Either (a) or (b)
+ a. inputs/deps ensure that the action runs whenever one of the files changes
+ b. the files are added to the action's depfile
+
+NOTE: All paths within .build_config files are relative to $OUTPUT_CHROMIUM_DIR.
+
+This is a technical note describing the format of .build_config files.
+Please keep it updated when changing this script. For extraction and
+visualization instructions, see build/android/docs/build_config.md
+
+------------- BEGIN_MARKDOWN ---------------------------------------------------
+The .build_config file format
+===
+
+# Introduction
+
+This document tries to explain the format of `.build_config` generated during
+the Android build of Chromium. For a higher-level explanation of these files,
+please read
+[build/android/docs/build_config.md](build/android/docs/build_config.md).
+
+# The `deps_info` top-level dictionary:
+
+All `.build_config` files have a required `'deps_info'` key, whose value is a
+dictionary describing the target and its dependencies. The latter has the
+following required keys:
+
+## Required keys in `deps_info`:
+
+* `deps_info['type']`: The target type as a string.
+
+ The following types are known by the internal GN build rules and the
+ build scripts altogether:
+
+ * [java_binary](#target_java_binary)
+ * [java_annotation_processor](#target_java_annotation_processor)
+ * [junit_binary](#target_junit_binary)
+ * [java_library](#target_java_library)
+ * [android_assets](#target_android_assets)
+ * [android_resources](#target_android_resources)
+ * [android_apk](#target_android_apk)
+ * [android_app_bundle_module](#target_android_app_bundle_module)
+ * [android_app_bundle](#target_android_app_bundle)
+ * [dist_jar](#target_dist_jar)
+ * [dist_aar](#target_dist_aar)
+ * [resource_rewriter](#target_resource_rewriter)
+ * [group](#target_group)
+
+ See later sections for more details of some of these.
+
+* `deps_info['path']`: Path to the target's `.build_config` file.
+
+* `deps_info['name']`: Nothing more than the basename of `deps_info['path']`
+at the moment.
+
+* `deps_info['deps_configs']`: List of paths to the `.build_config` files of
+all *direct* dependencies of the current target.
+
+ NOTE: Because the `.build_config` of a given target is always generated
+ after the `.build_config` of its dependencies, the `write_build_config.py`
+ script can use chains of `deps_configs` to compute transitive dependencies
+ for each target when needed.
+
+## Optional keys in `deps_info`:
+
+The following keys will only appear in the `.build_config` files of certain
+target types:
+
+* `deps_info['requires_android']`: True to indicate that the corresponding
+code uses Android-specific APIs, and thus cannot run on the host within a
+regular JVM. May only appear in Java-related targets.
+
+* `deps_info['supports_android']`:
+May appear in Java-related targets, and indicates that
+the corresponding code doesn't use Java APIs that are not available on
+Android. As such it may run either on the host or on an Android device.
+
+* `deps_info['assets']`:
+Only seen for the [`android_assets`](#target_android_assets) type. See below.
+
+* `deps_info['package_name']`: Java package name associated with this target.
+
+ NOTE: For `android_resources` targets,
+ this is the package name for the corresponding R class. For `android_apk`
+ targets, this is the corresponding package name. This does *not* appear for
+ other target types.
+
+* `deps_info['android_manifest']`:
+Path to an AndroidManifest.xml file related to the current target.
+
+# Top-level `resources` dictionary:
+
+This dictionary only appears for a few target types that can contain or
+relate to Android resources (e.g. `android_resources` or `android_apk`):
+
+* `resources['dependency_zips']`:
+List of `deps_info['resources_zip']` entries for all `android_resources`
+dependencies for the current target.
+
+* `resource['extra_package_names']`:
+Always empty for `android_resources` types. Otherwise,
+the list of `deps_info['package_name']` entries for all `android_resources`
+dependencies for the current target. Computed automatically by
+`write_build_config.py`.
+
+* `resources['extra_r_text_files']`:
+Always empty for `android_resources` types. Otherwise, the list of
+`deps_info['r_text']` entries for all `android_resources` dependencies for
+the current target. Computed automatically.
+
+
+# `.build_config` target types description:
+
+## <a name="target_group">Target type `group`</a>:
+
+This type corresponds to a simple target that is only used to group
+dependencies. It matches the `java_group()` GN template. Its only top-level
+`deps_info` keys are `supports_android` (always True), and `deps_configs`.
+
+
+## <a name="target_android_resources">Target type `android_resources`</a>:
+
+This type corresponds to targets that are used to group Android resource files.
+For example, all `android_resources` dependencies of an `android_apk` will
+end up packaged into the final APK by the build system.
+
+It uses the following keys:
+
+* `deps_info['resource_dirs']`:
+List of paths to the source directories containing the resources for this
+target. This key is optional, because some targets can refer to prebuilt
+`.aar` archives.
+
+
+* `deps_info['resources_zip']`:
+*Required*. Path to the `.resources.zip` file that contains all raw/uncompiled
+resource files for this target (and also no `R.txt`, `R.java` or `R.class`).
+
+ If `deps_info['resource_dirs']` is missing, this must point to a prebuilt
+ `.aar` archive containing resources. Otherwise, this will point to a
+ zip archive generated at build time, wrapping the content of
+ `deps_info['resource_dirs']` into a single zip file.
+
+* `deps_info['package_name']`:
+Java package name that the R class for this target belongs to.
+
+* `deps_info['android_manifest']`:
+Optional. Path to the top-level Android manifest file associated with these
+resources (if not provided, an empty manifest will be used to generate R.txt).
+
+* `deps_info['r_text']`:
+Provide the path to the `R.txt` file that describes the resources wrapped by
+this target. Normally this file is generated from the content of the resource
+directories or zip file, but some targets can provide their own `R.txt` file
+if they want.
+
+* `deps_info['srcjar_path']`:
+Path to the `.srcjar` file that contains the auto-generated `R.java` source
+file corresponding to the content of `deps_info['r_text']`. This is *always*
+generated from the content of `deps_info['r_text']` by the
+`build/android/gyp/process_resources.py` script.
+
+
+## <a name="target_android_assets">Target type `android_assets`</a>:
+
+This type corresponds to targets used to group Android assets, i.e. liberal
+files that will be placed under `//assets/` within the final APK.
+
+These use an `deps_info['assets']` key to hold a dictionary of values related
+to assets covered by this target.
+
+* `assets['sources']`:
+The list of all asset source paths for this target. Each source path can
+use an optional `:<zipPath>` suffix, where `<zipPath>` is the final location
+of the assets (relative to `//assets/`) within the APK.
+
+* `assets['outputs']`:
+Optional. Some of the sources might be renamed before being stored in the
+final //assets/ sub-directory. When this happens, this contains a list of
+all renamed output file paths
+
+ NOTE: When not empty, the first items of `assets['sources']` must match
+ every item in this list. Extra sources correspond to non-renamed sources.
+
+ NOTE: This comes from the `asset_renaming_destinations` parameter for the
+ `android_assets()` GN template.
+
+* `assets['disable_compression']`:
+Optional. Will be True to indicate that these assets should be stored
+uncompressed in the final APK. For example, this is necessary for locale
+.pak files used by the System WebView feature.
+
+* `assets['treat_as_locale_paks']`:
+Optional. Will be True to indicate that these assets are locale `.pak` files
+(containing localized strings for C++). These are later processed to generate
+a special ``.build_config`.java` source file, listing all supported Locales in
+the current build.
+
+
+## <a name="target_java_library">Target type `java_library`</a>:
+
+This type is used to describe target that wrap Java bytecode, either created
+by compiling sources, or providing them with a prebuilt jar.
+
+* `deps_info['unprocessed_jar_path']`:
+Path to the original .jar file for this target, before any kind of processing
+through Proguard or other tools. For most targets this is generated
+from sources, with a name like `$target_name.javac.jar`. However, when using
+a prebuilt jar, this will point to the source archive directly.
+
+* `deps_info['jar_path']`:
+Path to a file that is the result of processing
+`deps_info['unprocessed_jar_path']` with various tools.
+
+* `deps_info['interface_jar_path']:
+Path to the interface jar generated for this library. This corresponds to
+a jar file that only contains declarations. Generated by running the `ijar`
+tool on `deps_info['jar_path']`
+
+* `deps_info['dex_path']`:
+Path to the `.dex` file generated for this target, from `deps_info['jar_path']`
+unless this comes from a prebuilt `.aar` archive.
+
+* `deps_info['is_prebuilt']`:
+True to indicate that this target corresponds to a prebuilt `.jar` file.
+In this case, `deps_info['unprocessed_jar_path']` will point to the source
+`.jar` file. Otherwise, it will be point to a build-generated file.
+
+* `deps_info['java_sources_file']`:
+Path to a single `.sources` file listing all the Java sources that were used
+to generate the library (simple text format, one `.jar` path per line).
+
+* `deps_info['owned_resource_dirs']`:
+List of all resource directories belonging to all resource dependencies for
+this target.
+
+* `deps_info['owned_resource_zips']`:
+List of all resource zip files belonging to all resource dependencies for this
+target.
+
+* `deps_info['owned_resource_srcjars']`:
+List of all .srcjar files belonging to all resource dependencies for this
+target.
+
+* `deps_info['javac']`:
+A dictionary containing information about the way the sources in this library
+are compiled. Appears also on other Java-related targets. See the [dedicated
+section about this](#dict_javac) below for details.
+
+* `deps_info['javac_full_classpath']`:
+The classpath used when performing bytecode processing. Essentially the
+collection of all `deps_info['unprocessed_jar_path']` entries for the target
+and all its dependencies.
+
+* `deps_info['javac_full_interface_classpath']`:
+The classpath used when using the errorprone compiler.
+
+* `deps_info['proguard_enabled"]`:
+True to indicate that ProGuard processing is enabled for this target.
+
+* `deps_info['proguard_configs"]`:
+A list of paths to ProGuard configuration files related to this library.
+
+* `deps_info['extra_classpath_jars']:
+For some Java related types, a list of extra `.jar` files to use at build time
+but not at runtime.
+
+* `deps_info['extra_classpath_interface_jars']:
+The interface jars corresponding to extra_classpath_jars.
+
+## <a name="target_java_binary">Target type `java_binary`</a>:
+
+This type corresponds to a Java binary, which is nothing more than a
+`java_library` target that also provides a main class name. It thus inherits
+all entries from the `java_library` type, and adds:
+
+* `deps_info['main_class']`:
+Name of the main Java class that serves as an entry point for the binary.
+
+* `deps_info['java_runtime_classpath']`:
+The classpath used when running a Java or Android binary. Essentially the
+collection of all `deps_info['jar_path']` entries for the target and all its
+dependencies.
+
+
+## <a name="target_junit_binary">Target type `junit_binary`</a>:
+
+A target type for JUnit-specific binaries. Identical to
+[`java_binary`](#target_java_binary) in the context of `.build_config` files,
+except the name.
+
+
+## <a name="target_java_annotation_processor">Target type \
+`java_annotation_processor`</a>:
+
+A target type for Java annotation processors. Identical to
+[`java_binary`](#target_java_binary) in the context of `.build_config` files,
+except the name, except that it requires a `deps_info['main_class']` entry.
+
+
+## <a name="target_android_apk">Target type `android_apk`</a>:
+
+Corresponds to an Android APK. Inherits from the
+[`java_binary`](#target_java_binary) type and adds:
+
+* `deps_info['apk_path']`:
+Path to the raw, unsigned, APK generated by this target.
+
+* `deps_info['incremental_apk_path']`:
+Path to the raw, unsigned, incremental APK generated by this target.
+
+* `deps_info['incremental_install_json_path']`:
+Path to the JSON file with per-apk details for incremental install.
+See `build/android/gyp/incremental/write_installer_json.py` for more
+details about its content.
+
+* `deps_info['dist_jar']['all_interface_jars']`:
+For `android_apk` and `dist_jar` targets, a list of all interface jar files
+that will be merged into the final `.jar` file for distribution.
+
+* `deps_info['final_dex']['path']`:
+Path to the final classes.dex file (or classes.zip in case of multi-dex)
+for this APK.
+
+* `deps_info['final_dex']['dependency_dex_files']`:
+The list of paths to all `deps_info['dex_path']` entries for all library
+dependencies for this APK.
+
+* `native['libraries']`
+List of native libraries for the primary ABI to be embedded in this APK.
+E.g. [ "libchrome.so" ] (i.e. this doesn't include any ABI sub-directory
+prefix).
+
+* `native['java_libraries_list']`
+The same list as `native['libraries']` as a string holding a Java source
+fragment, e.g. `"{\"chrome\"}"`, without any `lib` prefix, and `.so`
+suffix (as expected by `System.loadLibrary()`).
+
+* `native['second_abi_libraries']`
+List of native libraries for the secondary ABI to be embedded in this APK.
+Empty if only a single ABI is supported.
+
+* `native['uncompress_shared_libraries']`
+A boolean indicating whether native libraries are stored uncompressed in the
+APK.
+
+* `native['extra_shared_libraries']`
+A list of native libraries to store within the APK, in addition to those from
+`native['libraries']`. These correspond to things like the Chromium linker
+or instrumentation libraries.
+
+* `assets`
+A list of assets stored compressed in the APK. Each entry has the format
+`<source-path>:<destination-path>`, where `<source-path>` is relative to
+`$CHROMIUM_OUTPUT_DIR`, and `<destination-path>` is relative to `//assets/`
+within the APK.
+
+NOTE: Not to be confused with the `deps_info['assets']` dictionary that
+belongs to `android_assets` targets only.
+
+* `uncompressed_assets`
+A list of uncompressed assets stored in the APK. Each entry has the format
+`<source-path>:<destination-path>` too.
+
+* `compressed_locales_java_list`
+A string holding a Java source fragment that gives the list of locales stored
+compressed in the `//assets/` directory. E.g. `"{\"am\","\ar\",\"en-US\"}"`.
+Note that the files will be stored with the `.pak` extension (e.g.
+`//assets/en-US.pak`).
+
+* `uncompressed_locales_java_list`
+A string holding a Java source fragment that gives the list of locales stored
+uncompressed in the `//assets/stored-locales/` directory. These are used for
+the System WebView feature only. Note that the files will be stored with the
+`.pak` extension (e.g. `//assets/stored-locales/en-US.apk`).
+
+* `extra_android_manifests`
+A list of `deps_configs['android_manifest]` entries, for all resource
+dependencies for this target. I.e. a list of paths to manifest files for
+all the resources in this APK. These will be merged with the root manifest
+file to generate the final one used to build the APK.
+
+* `java_resources_jars`
+This is a list of `.jar` files whose *Java* resources should be included in
+the final APK. For example, this is used to copy the `.res` files from the
+EMMA Coverage tool. The copy will omit any `.class` file and the top-level
+`//meta-inf/` directory from the input jars. Everything else will be copied
+into the final APK as-is.
+
+NOTE: This has nothing to do with *Android* resources.
+
+* `jni['all_source']`
+The list of all `deps_info['java_sources_file']` entries for all library
+dependencies for this APK. Note: this is a list of files, where each file
+contains a list of Java source files. This is used for JNI registration.
+
+* `deps_info['proguard_all_configs']`:
+The collection of all 'deps_info['proguard_configs']` values from this target
+and all its dependencies.
+
+* `deps_info['proguard_classpath_jars']`:
+The collection of all 'deps_info['extra_classpath_jars']` values from all
+dependencies.
+
+* `deps_info['proguard_under_test_mapping']`:
+Applicable to apks with proguard enabled that have an apk_under_test. This is
+the path to the apk_under_test's output proguard .mapping file.
+
+## <a name="target_android_app_bundle_module">Target type \
+`android_app_bundle_module`</a>:
+
+Corresponds to an Android app bundle module. Very similar to an APK and
+inherits the same fields, except that this does not generate an installable
+file (see `android_app_bundle`), and for the following omitted fields:
+
+* `deps_info['apk_path']`, `deps_info['incremental_apk_path']` and
+ `deps_info['incremental_install_json_path']` are omitted.
+
+* top-level `dist_jar` is omitted as well.
+
+In addition to `android_apk` targets though come these new fields:
+
+* `deps_info['proto_resources_path']`:
+The path of an zip archive containing the APK's resources compiled to the
+protocol buffer format (instead of regular binary xml + resources.arsc).
+
+* `deps_info['module_rtxt_path']`:
+The path of the R.txt file generated when compiling the resources for the bundle
+module.
+
+* `deps_info['base_whitelist_rtxt_path']`:
+Optional path to an R.txt file used as a whitelist for base string resources.
+This means that any string resource listed in this file *and* in
+`deps_info['module_rtxt_path']` will end up in the base split APK of any
+`android_app_bundle` target that uses this target as its base module.
+
+This ensures that such localized strings are available to all bundle installs,
+even when language based splits are enabled (e.g. required for WebView strings
+inside the Monochrome bundle).
+
+
+## <a name="target_android_app_bundle">Target type `android_app_bundle`</a>
+
+This target type corresponds to an Android app bundle, and is built from one
+or more `android_app_bundle_module` targets listed as dependencies.
+
+
+## <a name="target_dist_aar">Target type `dist_aar`</a>:
+
+This type corresponds to a target used to generate an `.aar` archive for
+distribution. The archive's content is determined by the target's dependencies.
+
+This always has the following entries:
+
+ * `deps_info['supports_android']` (always True).
+ * `deps_info['requires_android']` (always True).
+ * `deps_info['proguard_configs']` (optional).
+
+
+## <a name="target_dist_jar">Target type `dist_jar`</a>:
+
+This type is similar to [`dist_aar`](#target_dist_aar) but is not
+Android-specific, and used to create a `.jar` file that can be later
+redistributed.
+
+This always has the following entries:
+
+ * `deps_info['proguard_enabled']` (False by default).
+ * `deps_info['proguard_configs']` (optional).
+ * `deps_info['supports_android']` (True by default).
+ * `deps_info['requires_android']` (False by default).
+
+
+
+## <a name="target_resource_rewriter">Target type `resource_rewriter`</a>:
+
+The ResourceRewriter Java class is in charge of rewriting resource IDs at
+runtime, for the benefit of the System WebView feature. This is a special
+target type for it.
+
+Its `.build_config` only keeps a list of dependencies in its
+`deps_info['deps_configs']` key.
+
+## <a name="dict_javac">The `deps_info['javac']` dictionary</a>:
+
+This dictionary appears in Java-related targets (e.g. `java_library`,
+`android_apk` and others), and contains information related to the compilation
+of Java sources, class files, and jars.
+
+* `javac['resource_packages']`
+For `java_library` targets, this is the list of package names for all resource
+dependencies for the current target. Order must match the one from
+`javac['srcjars']`. For other target types, this key does not exist.
+
+* `javac['classpath']`
+The classpath used to compile this target when annotation processors are
+present.
+
+* `javac['interface_classpath']`
+The classpath used to compile this target when annotation processors are
+not present. These are also always used to known when a target needs to be
+rebuilt.
+
+* `javac['processor_classpath']`
+The classpath listing the jars used for annotation processors. I.e. sent as
+`-processorpath` when invoking `javac`.
+
+* `javac['processor_classes']`
+The list of annotation processor main classes. I.e. sent as `-processor' when
+invoking `javac`.
+
+## <a name="android_app_bundle">Target type `android_app_bundle`</a>:
+
+This type corresponds to an Android app bundle (`.aab` file).
+
+--------------- END_MARKDOWN ---------------------------------------------------
+TODO(estevenson): Add docs for static library synchronized proguarding.
+"""
+
+import collections
+import itertools
+import json
+import optparse
+import os
+import sys
+import xml.dom.minidom
+
+from util import build_utils
+
+# Types that should never be used as a dependency of another build config.
+_ROOT_TYPES = ('android_apk', 'java_binary', 'java_annotation_processor',
+ 'junit_binary', 'resource_rewriter', 'android_app_bundle')
+# Types that should not allow code deps to pass through.
+_RESOURCE_TYPES = ('android_assets', 'android_resources', 'system_java_library')
+
+
+def _ExtractMarkdownDocumentation(input_text):
+ """Extract Markdown documentation from a list of input strings lines.
+
+ This generates a list of strings extracted from |input_text|, by looking
+ for '-- BEGIN_MARKDOWN --' and '-- END_MARKDOWN --' line markers."""
+ in_markdown = False
+ result = []
+ for line in input_text.splitlines():
+ if in_markdown:
+ if '-- END_MARKDOWN --' in line:
+ in_markdown = False
+ else:
+ result.append(line)
+ else:
+ if '-- BEGIN_MARKDOWN --' in line:
+ in_markdown = True
+
+ return result
+
+class AndroidManifest(object):
+ def __init__(self, path):
+ self.path = path
+ dom = xml.dom.minidom.parse(path)
+ manifests = dom.getElementsByTagName('manifest')
+ assert len(manifests) == 1
+ self.manifest = manifests[0]
+
+ def GetInstrumentationElements(self):
+ instrumentation_els = self.manifest.getElementsByTagName('instrumentation')
+ if len(instrumentation_els) == 0:
+ return None
+ return instrumentation_els
+
+ def CheckInstrumentationElements(self, expected_package):
+ instrs = self.GetInstrumentationElements()
+ if not instrs:
+ raise Exception('No <instrumentation> elements found in %s' % self.path)
+ for instr in instrs:
+ instrumented_package = instr.getAttributeNS(
+ 'http://schemas.android.com/apk/res/android', 'targetPackage')
+ if instrumented_package != expected_package:
+ raise Exception(
+ 'Wrong instrumented package. Expected %s, got %s'
+ % (expected_package, instrumented_package))
+
+ def GetPackageName(self):
+ return self.manifest.getAttribute('package')
+
+
+dep_config_cache = {}
+def GetDepConfig(path):
+ if not path in dep_config_cache:
+ with open(path) as jsonfile:
+ dep_config_cache[path] = json.load(jsonfile)['deps_info']
+ return dep_config_cache[path]
+
+
+def DepsOfType(wanted_type, configs):
+ return [c for c in configs if c['type'] == wanted_type]
+
+
+def GetAllDepsConfigsInOrder(deps_config_paths):
+ def GetDeps(path):
+ return GetDepConfig(path)['deps_configs']
+ return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps)
+
+
+class Deps(object):
+ def __init__(self, direct_deps_config_paths):
+ self.all_deps_config_paths = GetAllDepsConfigsInOrder(
+ direct_deps_config_paths)
+ self.direct_deps_configs = [
+ GetDepConfig(p) for p in direct_deps_config_paths]
+ self.all_deps_configs = [
+ GetDepConfig(p) for p in self.all_deps_config_paths]
+ self.direct_deps_config_paths = direct_deps_config_paths
+
+ def All(self, wanted_type=None):
+ if type is None:
+ return self.all_deps_configs
+ return DepsOfType(wanted_type, self.all_deps_configs)
+
+ def Direct(self, wanted_type=None):
+ if wanted_type is None:
+ return self.direct_deps_configs
+ return DepsOfType(wanted_type, self.direct_deps_configs)
+
+ def AllConfigPaths(self):
+ return self.all_deps_config_paths
+
+ def RemoveNonDirectDep(self, path):
+ if path in self.direct_deps_config_paths:
+ raise Exception('Cannot remove direct dep.')
+ self.all_deps_config_paths.remove(path)
+ self.all_deps_configs.remove(GetDepConfig(path))
+
+ def GradlePrebuiltJarPaths(self):
+ ret = []
+
+ def helper(cur):
+ for config in cur.Direct('java_library'):
+ if config['is_prebuilt'] or config['gradle_treat_as_prebuilt']:
+ if config['jar_path'] not in ret:
+ ret.append(config['jar_path'])
+
+ helper(self)
+ return ret
+
+ def GradleLibraryProjectDeps(self):
+ ret = []
+
+ def helper(cur):
+ for config in cur.Direct('java_library'):
+ if config['is_prebuilt']:
+ pass
+ elif config['gradle_treat_as_prebuilt']:
+ helper(Deps(config['deps_configs']))
+ elif config not in ret:
+ ret.append(config)
+
+ helper(self)
+ return ret
+
+
+def _MergeAssets(all_assets):
+ """Merges all assets from the given deps.
+
+ Returns:
+ A tuple of: (compressed, uncompressed, locale_paks)
+ |compressed| and |uncompressed| are lists of "srcPath:zipPath". srcPath is
+ the path of the asset to add, and zipPath is the location within the zip
+ (excluding assets/ prefix).
+ |locale_paks| is a set of all zipPaths that have been marked as
+ treat_as_locale_paks=true.
+ """
+ compressed = {}
+ uncompressed = {}
+ locale_paks = set()
+ for asset_dep in all_assets:
+ entry = asset_dep['assets']
+ disable_compression = entry.get('disable_compression')
+ treat_as_locale_paks = entry.get('treat_as_locale_paks')
+ dest_map = uncompressed if disable_compression else compressed
+ other_map = compressed if disable_compression else uncompressed
+ outputs = entry.get('outputs', [])
+ for src, dest in itertools.izip_longest(entry['sources'], outputs):
+ if not dest:
+ dest = os.path.basename(src)
+ # Merge so that each path shows up in only one of the lists, and that
+ # deps of the same target override previous ones.
+ other_map.pop(dest, 0)
+ dest_map[dest] = src
+ if treat_as_locale_paks:
+ locale_paks.add(dest)
+
+ def create_list(asset_map):
+ ret = ['%s:%s' % (src, dest) for dest, src in asset_map.iteritems()]
+ # Sort to ensure deterministic ordering.
+ ret.sort()
+ return ret
+
+ return create_list(compressed), create_list(uncompressed), locale_paks
+
+
+def _ResolveGroups(configs):
+ """Returns a list of configs with all groups inlined."""
+ ret = list(configs)
+ while True:
+ groups = DepsOfType('group', ret)
+ if not groups:
+ return ret
+ for config in groups:
+ index = ret.index(config)
+ expanded_configs = [GetDepConfig(p) for p in config['deps_configs']]
+ ret[index:index + 1] = expanded_configs
+
+
+def _DepsFromPaths(dep_paths, target_type, filter_root_targets=True):
+ """Resolves all groups and trims dependency branches that we never want.
+
+ E.g. When a resource or asset depends on an apk target, the intent is to
+ include the .apk as a resource/asset, not to have the apk's classpath added.
+ """
+ configs = [GetDepConfig(p) for p in dep_paths]
+ groups = DepsOfType('group', configs)
+ configs = _ResolveGroups(configs)
+ configs += groups
+ # Don't allow root targets to be considered as a dep.
+ if filter_root_targets:
+ configs = [c for c in configs if c['type'] not in _ROOT_TYPES]
+
+ # Don't allow java libraries to cross through assets/resources.
+ if target_type in _RESOURCE_TYPES:
+ configs = [c for c in configs if c['type'] in _RESOURCE_TYPES]
+
+ return Deps([c['path'] for c in configs])
+
+
+def _ExtractSharedLibsFromRuntimeDeps(runtime_deps_file):
+ ret = []
+ with open(runtime_deps_file) as f:
+ for line in f:
+ line = line.rstrip()
+ if not line.endswith('.so'):
+ continue
+ # Only unstripped .so files are listed in runtime deps.
+ # Convert to the stripped .so by going up one directory.
+ ret.append(os.path.normpath(line.replace('lib.unstripped/', '')))
+ ret.reverse()
+ return ret
+
+
+def _CreateJavaLibrariesList(library_paths):
+ """Returns a java literal array with the "base" library names:
+ e.g. libfoo.so -> foo
+ """
+ return ('{%s}' % ','.join(['"%s"' % s[3:-3] for s in library_paths]))
+
+
+def _CreateJavaLocaleListFromAssets(assets, locale_paks):
+ """Returns a java literal array from a list of locale assets.
+
+ Args:
+ assets: A list of all APK asset paths in the form 'src:dst'
+ locale_paks: A list of asset paths that correponds to the locale pak
+ files of interest. Each |assets| entry will have its 'dst' part matched
+ against it to determine if they are part of the result.
+ Returns:
+ A string that is a Java source literal array listing the locale names
+ of the corresponding asset files, without directory or .pak suffix.
+ E.g. '{"en-GB", "en-US", "es-ES", "fr", ... }'
+ """
+ assets_paths = [a.split(':')[1] for a in assets]
+ locales = [os.path.basename(a)[:-4] for a in assets_paths if a in locale_paks]
+ return '{%s}' % ','.join(['"%s"' % l for l in sorted(locales)])
+
+
+def main(argv):
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_option('--build-config', help='Path to build_config output.')
+ parser.add_option(
+ '--type',
+ help='Type of this target (e.g. android_library).')
+ parser.add_option(
+ '--deps-configs',
+ help='GN-list of dependent build_config files.')
+ parser.add_option(
+ '--annotation-processor-configs',
+ help='GN-list of build_config files for annotation processors.')
+ parser.add_option(
+ '--classpath-deps-configs',
+ help='GN-list of build_config files for libraries to include as '
+ 'build-time-only classpath.')
+
+ # android_resources options
+ parser.add_option('--srcjar', help='Path to target\'s resources srcjar.')
+ parser.add_option('--resources-zip', help='Path to target\'s resources zip.')
+ parser.add_option('--r-text', help='Path to target\'s R.txt file.')
+ parser.add_option('--package-name',
+ help='Java package name for these resources.')
+ parser.add_option('--android-manifest', help='Path to android manifest.')
+ parser.add_option('--resource-dirs', action='append', default=[],
+ help='GYP-list of resource dirs')
+
+ # android_assets options
+ parser.add_option('--asset-sources', help='List of asset sources.')
+ parser.add_option('--asset-renaming-sources',
+ help='List of asset sources with custom destinations.')
+ parser.add_option('--asset-renaming-destinations',
+ help='List of asset custom destinations.')
+ parser.add_option('--disable-asset-compression', action='store_true',
+ help='Whether to disable asset compression.')
+ parser.add_option('--treat-as-locale-paks', action='store_true',
+ help='Consider the assets as locale paks in BuildConfig.java')
+
+ # java library options
+ parser.add_option('--jar-path', help='Path to target\'s jar output.')
+ parser.add_option('--unprocessed-jar-path',
+ help='Path to the .jar to use for javac classpath purposes.')
+ parser.add_option('--interface-jar-path',
+ help='Path to the .interface.jar to use for javac classpath purposes.')
+ parser.add_option('--is-prebuilt', action='store_true',
+ help='Whether the jar was compiled or pre-compiled.')
+ parser.add_option('--java-sources-file', help='Path to .sources file')
+ parser.add_option('--bundled-srcjars',
+ help='GYP-list of .srcjars that have been included in this java_library.')
+ parser.add_option('--supports-android', action='store_true',
+ help='Whether this library supports running on the Android platform.')
+ parser.add_option('--requires-android', action='store_true',
+ help='Whether this library requires running on the Android platform.')
+ parser.add_option('--bypass-platform-checks', action='store_true',
+ help='Bypass checks for support/require Android platform.')
+ parser.add_option('--extra-classpath-jars',
+ help='GYP-list of .jar files to include on the classpath when compiling, '
+ 'but not to include in the final binary.')
+ parser.add_option('--gradle-treat-as-prebuilt', action='store_true',
+ help='Whether this library should be treated as a prebuilt library by '
+ 'generate_gradle.py.')
+ parser.add_option('--main-class',
+ help='Main class for java_binary or java_annotation_processor targets.')
+ parser.add_option('--java-resources-jar-path',
+ help='Path to JAR that contains java resources. Everything '
+ 'from this JAR except meta-inf/ content and .class files '
+ 'will be added to the final APK.')
+
+ # android library options
+ parser.add_option('--dex-path', help='Path to target\'s dex output.')
+
+ # native library options
+ parser.add_option('--shared-libraries-runtime-deps',
+ help='Path to file containing runtime deps for shared '
+ 'libraries.')
+ parser.add_option('--native-libs',
+ action='append',
+ help='GN-list of native libraries for primary '
+ 'android-abi. Can be specified multiple times.',
+ default=[])
+ parser.add_option('--secondary-abi-shared-libraries-runtime-deps',
+ help='Path to file containing runtime deps for secondary '
+ 'abi shared libraries.')
+ parser.add_option('--secondary-native-libs',
+ action='append',
+ help='GN-list of native libraries for secondary '
+ 'android-abi. Can be specified multiple times.',
+ default=[])
+ parser.add_option(
+ '--native-lib-placeholders',
+ action='append',
+ help='GN-list of native library placeholders to add.',
+ default=[])
+ parser.add_option(
+ '--secondary-native-lib-placeholders',
+ action='append',
+ help='GN-list of native library placeholders to add '
+ 'for the secondary android-abi.',
+ default=[])
+ parser.add_option('--uncompress-shared-libraries', default=False,
+ action='store_true',
+ help='Whether to store native libraries uncompressed')
+ # apk options
+ parser.add_option('--apk-path', help='Path to the target\'s apk output.')
+ parser.add_option('--incremental-apk-path',
+ help="Path to the target's incremental apk output.")
+ parser.add_option('--incremental-install-json-path',
+ help="Path to the target's generated incremental install "
+ "json.")
+ parser.add_option(
+ '--static-library-dependent-configs',
+ help='GN list of .build_configs of targets that use this target as a '
+ 'static library.')
+
+ parser.add_option('--tested-apk-config',
+ help='Path to the build config of the tested apk (for an instrumentation '
+ 'test apk).')
+ parser.add_option('--proguard-enabled', action='store_true',
+ help='Whether proguard is enabled for this apk or bundle module.')
+ parser.add_option('--proguard-configs',
+ help='GN-list of proguard flag files to use in final apk.')
+ parser.add_option('--proguard-mapping-path',
+ help='Path to jar created by ProGuard step')
+ parser.add_option('--fail',
+ help='GN-list of error message lines to fail with.')
+
+ parser.add_option('--final-dex-path',
+ help='Path to final input classes.dex (or classes.zip) to '
+ 'use in final apk.')
+ parser.add_option('--apk-proto-resources',
+ help='Path to resources compiled in protocol buffer format '
+ ' for this apk.')
+ parser.add_option(
+ '--module-rtxt-path',
+ help='Path to R.txt file for resources in a bundle module.')
+ parser.add_option(
+ '--base-whitelist-rtxt-path',
+ help='Path to R.txt file for the base resources whitelist.')
+
+ parser.add_option('--generate-markdown-format-doc', action='store_true',
+ help='Dump the Markdown .build_config format documentation '
+ 'then exit immediately.')
+
+ parser.add_option(
+ '--base-module-build-config',
+ help='Path to the base module\'s build config '
+ 'if this is a feature module.')
+
+ options, args = parser.parse_args(argv)
+
+ if args:
+ parser.error('No positional arguments should be given.')
+
+ if options.generate_markdown_format_doc:
+ doc_lines = _ExtractMarkdownDocumentation(__doc__)
+ for line in doc_lines:
+ print(line)
+ return 0
+
+ if options.fail:
+ parser.error('\n'.join(build_utils.ParseGnList(options.fail)))
+
+ jar_path_options = ['jar_path', 'unprocessed_jar_path', 'interface_jar_path']
+ required_options_map = {
+ 'android_apk': ['build_config', 'dex_path', 'final_dex_path'] + \
+ jar_path_options,
+ 'android_app_bundle_module': ['build_config', 'dex_path',
+ 'final_dex_path'] + jar_path_options,
+ 'android_assets': ['build_config'],
+ 'android_resources': ['build_config', 'resources_zip'],
+ 'dist_aar': ['build_config'],
+ 'dist_jar': ['build_config'],
+ 'group': ['build_config'],
+ 'java_annotation_processor': ['build_config', 'main_class'],
+ 'java_binary': ['build_config'],
+ 'java_library': ['build_config'] + jar_path_options,
+ 'junit_binary': ['build_config'],
+ 'resource_rewriter': ['build_config'],
+ 'system_java_library': ['build_config'],
+ 'android_app_bundle': ['build_config'],
+ }
+ required_options = required_options_map.get(options.type)
+ if not required_options:
+ raise Exception('Unknown type: <%s>' % options.type)
+
+ build_utils.CheckOptions(options, parser, required_options)
+
+ if options.type != 'android_app_bundle_module':
+ if options.apk_proto_resources:
+ raise Exception('--apk-proto-resources can only be used with '
+ '--type=android_app_bundle_module')
+ if options.module_rtxt_path:
+ raise Exception('--module-rxt-path can only be used with '
+ '--type=android_app_bundle_module')
+ if options.base_whitelist_rtxt_path:
+ raise Exception('--base-whitelist-rtxt-path can only be used with '
+ '--type=android_app_bundle_module')
+
+ is_apk_or_module_target = options.type in ('android_apk',
+ 'android_app_bundle_module')
+
+ if options.uncompress_shared_libraries:
+ if not is_apk_or_module_target:
+ raise Exception('--uncompressed-shared-libraries can only be used '
+ 'with --type=android_apk or '
+ '--type=android_app_bundle_module')
+
+ if options.jar_path and options.supports_android and not options.dex_path:
+ raise Exception('java_library that supports Android requires a dex path.')
+ if any(getattr(options, x) for x in jar_path_options):
+ for attr in jar_path_options:
+ if not getattr(options, attr):
+ raise('Expected %s to be set.' % attr)
+
+ if options.requires_android and not options.supports_android:
+ raise Exception(
+ '--supports-android is required when using --requires-android')
+
+ is_java_target = options.type in (
+ 'java_binary', 'junit_binary', 'java_annotation_processor',
+ 'java_library', 'android_apk', 'dist_aar', 'dist_jar',
+ 'system_java_library', 'android_app_bundle_module')
+
+ is_static_library_dex_provider_target = (
+ options.static_library_dependent_configs and options.proguard_enabled)
+ if is_static_library_dex_provider_target and options.type != 'android_apk':
+ raise Exception(
+ '--static-library-dependent-configs only supports --type=android_apk')
+
+ options.static_library_dependent_configs = build_utils.ParseGnList(
+ options.static_library_dependent_configs)
+ static_library_dependent_configs_by_path = {
+ p: GetDepConfig(p)
+ for p in options.static_library_dependent_configs
+ }
+
+ deps = _DepsFromPaths(
+ build_utils.ParseGnList(options.deps_configs), options.type)
+ processor_deps = _DepsFromPaths(
+ build_utils.ParseGnList(options.annotation_processor_configs or ''),
+ options.type, filter_root_targets=False)
+ classpath_deps = _DepsFromPaths(
+ build_utils.ParseGnList(options.classpath_deps_configs or ''),
+ options.type)
+
+ all_inputs = sorted(
+ set(deps.AllConfigPaths() + processor_deps.AllConfigPaths() +
+ classpath_deps.AllConfigPaths() +
+ list(static_library_dependent_configs_by_path)))
+
+ system_library_deps = deps.Direct('system_java_library')
+ direct_library_deps = deps.Direct('java_library')
+ direct_group_deps = deps.Direct('group')
+ all_group_deps = deps.All('group')
+ all_library_deps = deps.All('java_library')
+ all_resources_deps = deps.All('android_resources')
+ all_classpath_library_deps = classpath_deps.All('java_library')
+
+ base_module_build_config = None
+ if options.base_module_build_config:
+ with open(options.base_module_build_config, 'r') as f:
+ base_module_build_config = json.load(f)
+
+ # Initialize some common config.
+ # Any value that needs to be queryable by dependents must go within deps_info.
+ config = {
+ 'deps_info': {
+ 'name': os.path.basename(options.build_config),
+ 'path': options.build_config,
+ 'type': options.type,
+ 'deps_configs': deps.direct_deps_config_paths
+ },
+ # Info needed only by generate_gradle.py.
+ 'gradle': {}
+ }
+ deps_info = config['deps_info']
+ gradle = config['gradle']
+
+ if options.type == 'android_apk' and options.tested_apk_config:
+ tested_apk_deps = Deps([options.tested_apk_config])
+ tested_apk_config = tested_apk_deps.Direct()[0]
+ tested_apk_resources_deps = tested_apk_deps.All('android_resources')
+ gradle['apk_under_test'] = tested_apk_config['name']
+ all_resources_deps = [
+ d for d in all_resources_deps if not d in tested_apk_resources_deps]
+
+ # Required for generating gradle files.
+ if options.type == 'java_library':
+ deps_info['is_prebuilt'] = bool(options.is_prebuilt)
+ deps_info['gradle_treat_as_prebuilt'] = options.gradle_treat_as_prebuilt
+
+ if options.android_manifest:
+ deps_info['android_manifest'] = options.android_manifest
+
+ if is_java_target:
+ if options.java_sources_file:
+ deps_info['java_sources_file'] = options.java_sources_file
+ if options.bundled_srcjars:
+ gradle['bundled_srcjars'] = (
+ build_utils.ParseGnList(options.bundled_srcjars))
+
+ gradle['dependent_android_projects'] = []
+ gradle['dependent_java_projects'] = []
+ gradle['dependent_prebuilt_jars'] = deps.GradlePrebuiltJarPaths()
+
+ if options.main_class:
+ deps_info['main_class'] = options.main_class
+
+ for c in deps.GradleLibraryProjectDeps():
+ if c['requires_android']:
+ gradle['dependent_android_projects'].append(c['path'])
+ else:
+ gradle['dependent_java_projects'].append(c['path'])
+
+ # TODO(tiborg): Remove creation of JNI info for type group and java_library
+ # once we can generate the JNI registration based on APK / module targets as
+ # opposed to groups and libraries.
+ if is_apk_or_module_target or options.type in (
+ 'group', 'java_library', 'junit_binary'):
+ deps_info['jni'] = {}
+ all_java_sources = [c['java_sources_file'] for c in all_library_deps
+ if 'java_sources_file' in c]
+ if options.java_sources_file:
+ all_java_sources.append(options.java_sources_file)
+
+ if options.apk_proto_resources:
+ deps_info['proto_resources_path'] = options.apk_proto_resources
+
+ if options.module_rtxt_path:
+ deps_info['module_rtxt_path'] = options.module_rtxt_path
+ if options.base_whitelist_rtxt_path:
+ deps_info['base_whitelist_rtxt_path'] = options.base_whitelist_rtxt_path
+ else:
+ # Ensure there is an entry, even if it is empty, for modules
+ # that don't need such a whitelist.
+ deps_info['base_whitelist_rtxt_path'] = ''
+
+ if is_java_target:
+ deps_info['requires_android'] = bool(options.requires_android)
+ deps_info['supports_android'] = bool(options.supports_android)
+
+ if not options.bypass_platform_checks:
+ deps_require_android = (all_resources_deps +
+ [d['name'] for d in all_library_deps if d['requires_android']])
+ deps_not_support_android = (
+ [d['name'] for d in all_library_deps if not d['supports_android']])
+
+ if deps_require_android and not options.requires_android:
+ raise Exception('Some deps require building for the Android platform: '
+ + str(deps_require_android))
+
+ if deps_not_support_android and options.supports_android:
+ raise Exception('Not all deps support the Android platform: '
+ + str(deps_not_support_android))
+
+ if is_java_target:
+ # Classpath values filled in below (after applying tested_apk_config).
+ config['javac'] = {}
+ if options.jar_path:
+ deps_info['jar_path'] = options.jar_path
+ deps_info['unprocessed_jar_path'] = options.unprocessed_jar_path
+ deps_info['interface_jar_path'] = options.interface_jar_path
+ if options.dex_path:
+ deps_info['dex_path'] = options.dex_path
+ if options.type == 'android_apk':
+ deps_info['apk_path'] = options.apk_path
+ deps_info['incremental_apk_path'] = options.incremental_apk_path
+ deps_info['incremental_install_json_path'] = (
+ options.incremental_install_json_path)
+
+ if options.type == 'android_assets':
+ all_asset_sources = []
+ if options.asset_renaming_sources:
+ all_asset_sources.extend(
+ build_utils.ParseGnList(options.asset_renaming_sources))
+ if options.asset_sources:
+ all_asset_sources.extend(build_utils.ParseGnList(options.asset_sources))
+
+ deps_info['assets'] = {
+ 'sources': all_asset_sources
+ }
+ if options.asset_renaming_destinations:
+ deps_info['assets']['outputs'] = (
+ build_utils.ParseGnList(options.asset_renaming_destinations))
+ if options.disable_asset_compression:
+ deps_info['assets']['disable_compression'] = True
+ if options.treat_as_locale_paks:
+ deps_info['assets']['treat_as_locale_paks'] = True
+
+ if options.type == 'android_resources':
+ deps_info['resources_zip'] = options.resources_zip
+ if options.srcjar:
+ deps_info['srcjar'] = options.srcjar
+ if options.android_manifest:
+ manifest = AndroidManifest(options.android_manifest)
+ deps_info['package_name'] = manifest.GetPackageName()
+ if options.package_name:
+ deps_info['package_name'] = options.package_name
+ if options.r_text:
+ deps_info['r_text'] = options.r_text
+
+ deps_info['resources_dirs'] = []
+ if options.resource_dirs:
+ for gyp_list in options.resource_dirs:
+ deps_info['resources_dirs'].extend(build_utils.ParseGnList(gyp_list))
+
+ if options.requires_android and is_java_target:
+ # Lint all resources that are not already linted by a dependent library.
+ owned_resource_dirs = set()
+ owned_resource_zips = set()
+ owned_resource_srcjars = set()
+ for c in all_resources_deps:
+ # Always use resources_dirs in favour of resources_zips so that lint error
+ # messages have paths that are closer to reality (and to avoid needing to
+ # extract during lint).
+ if c['resources_dirs']:
+ owned_resource_dirs.update(c['resources_dirs'])
+ else:
+ owned_resource_zips.add(c['resources_zip'])
+ srcjar = c.get('srcjar')
+ if srcjar:
+ owned_resource_srcjars.add(srcjar)
+
+ for c in itertools.chain(all_library_deps, all_classpath_library_deps):
+ if c['requires_android']:
+ owned_resource_dirs.difference_update(c['owned_resources_dirs'])
+ owned_resource_zips.difference_update(c['owned_resources_zips'])
+ # Many .aar files include R.class files in them, as it makes it easier
+ # for IDEs to resolve symbols. However, including them is not required
+ # and not all prebuilts do. Rather than try to detect their presense,
+ # just assume they are not there. The only consequence is redundant
+ # compilation of the R.class.
+ if not c['is_prebuilt']:
+ owned_resource_srcjars.difference_update(c['owned_resource_srcjars'])
+ deps_info['owned_resources_dirs'] = sorted(owned_resource_dirs)
+ deps_info['owned_resources_zips'] = sorted(owned_resource_zips)
+ deps_info['owned_resource_srcjars'] = sorted(owned_resource_srcjars)
+
+ if options.type == 'java_library':
+ # Used to strip out R.class for android_prebuilt()s.
+ config['javac']['resource_packages'] = [
+ c['package_name'] for c in all_resources_deps if 'package_name' in c]
+
+ if options.type in (
+ 'android_resources', 'android_apk', 'junit_binary', 'resource_rewriter',
+ 'dist_aar', 'android_app_bundle_module'):
+ config['resources'] = {}
+
+ dependency_zips = [
+ c['resources_zip'] for c in all_resources_deps if c['resources_zip']
+ ]
+ extra_package_names = []
+ extra_r_text_files = []
+
+ if options.type != 'android_resources':
+ extra_package_names = [
+ c['package_name'] for c in all_resources_deps if 'package_name' in c]
+ extra_r_text_files = [
+ c['r_text'] for c in all_resources_deps if 'r_text' in c]
+
+ # For feature modules, remove any resources that already exist in the base
+ # module.
+ if base_module_build_config:
+ dependency_zips = [
+ c for c in dependency_zips
+ if c not in base_module_build_config['resources']['dependency_zips']
+ ]
+ extra_package_names = [
+ c for c in extra_package_names if c not in
+ base_module_build_config['resources']['extra_package_names']
+ ]
+ extra_r_text_files = [
+ c for c in extra_r_text_files if c not in
+ base_module_build_config['resources']['extra_r_text_files']
+ ]
+
+ config['resources']['dependency_zips'] = dependency_zips
+ config['resources']['extra_package_names'] = extra_package_names
+ config['resources']['extra_r_text_files'] = extra_r_text_files
+ if options.type == 'android_apk' and options.tested_apk_config:
+ config['resources']['arsc_package_name'] = (
+ tested_apk_config['package_name'])
+
+ if is_apk_or_module_target:
+ deps_dex_files = [c['dex_path'] for c in all_library_deps]
+
+ if options.type == 'group':
+ if options.extra_classpath_jars:
+ # These are .jars to add to javac classpath but not to runtime classpath.
+ extra_jars = build_utils.ParseGnList(options.extra_classpath_jars)
+ deps_info['extra_classpath_jars'] = extra_jars
+ deps_info['extra_classpath_interface_jars'] = extra_jars
+
+ if is_java_target:
+ # The classpath used to compile this target when annotation processors are
+ # present.
+ javac_classpath = [
+ c['unprocessed_jar_path'] for c in direct_library_deps]
+ # The classpath used to compile this target when annotation processors are
+ # not present. These are also always used to know when a target needs to be
+ # rebuilt.
+ javac_interface_classpath = [
+ c['interface_jar_path'] for c in direct_library_deps]
+ # The classpath used for error prone.
+ javac_full_interface_classpath = [
+ c['interface_jar_path'] for c in all_library_deps]
+ # The classpath used for bytecode-rewritting.
+ javac_full_classpath = [
+ c['unprocessed_jar_path'] for c in all_library_deps]
+
+ for dep in direct_group_deps:
+ javac_classpath.extend(dep.get('extra_classpath_jars', []))
+ javac_interface_classpath.extend(
+ dep.get('extra_classpath_interface_jars', []))
+ for dep in all_group_deps:
+ javac_full_classpath.extend(dep.get('extra_classpath_jars', []))
+ javac_full_interface_classpath.extend(
+ dep.get('extra_classpath_interface_jars', []))
+
+ # Deps to add to the compile-time classpath (but not the runtime classpath).
+ # TODO(agrieve): Might be less confusing to fold these into bootclasspath.
+ javac_extra_jars = [
+ c['unprocessed_jar_path'] for c in classpath_deps.All('java_library')
+ ]
+ extra_jars = [c['jar_path'] for c in classpath_deps.All('java_library')]
+ interface_extra_jars = [
+ c['interface_jar_path'] for c in classpath_deps.All('java_library')
+ ]
+
+ # These are jars specified by input_jars_paths that almost never change.
+ # Just add them directly to all the *extra_jars.
+ if options.extra_classpath_jars:
+ # These are .jars to add to javac classpath but not to runtime classpath.
+ javac_extra_jars.extend(
+ build_utils.ParseGnList(options.extra_classpath_jars))
+ extra_jars.extend(build_utils.ParseGnList(options.extra_classpath_jars))
+ interface_extra_jars.extend(
+ build_utils.ParseGnList(options.extra_classpath_jars))
+
+ if extra_jars:
+ deps_info['extra_classpath_jars'] = extra_jars
+
+ if interface_extra_jars:
+ deps_info['extra_classpath_interface_jars'] = interface_extra_jars
+
+ javac_extra_jars = [p for p in javac_extra_jars if p not in javac_classpath]
+ javac_classpath.extend(javac_extra_jars)
+ javac_full_classpath.extend(
+ p for p in javac_extra_jars if p not in javac_full_classpath)
+
+ interface_extra_jars = [
+ p for p in interface_extra_jars if p not in javac_interface_classpath
+ ]
+ javac_interface_classpath.extend(interface_extra_jars)
+ javac_full_interface_classpath.extend(
+ p for p in interface_extra_jars
+ if p not in javac_full_interface_classpath)
+
+ if is_java_target or options.type == 'android_app_bundle':
+ # The classpath to use to run this target (or as an input to ProGuard).
+ java_full_classpath = []
+ if is_java_target and options.jar_path:
+ java_full_classpath.append(options.jar_path)
+ java_full_classpath.extend(c['jar_path'] for c in all_library_deps)
+ if options.type == 'android_app_bundle':
+ for d in deps.Direct('android_app_bundle_module'):
+ java_full_classpath.extend(
+ c for c in d.get('java_runtime_classpath', [])
+ if c not in java_full_classpath)
+
+ all_configs = build_utils.ParseGnList(options.proguard_configs)
+ deps_info['proguard_configs'] = list(all_configs)
+ extra_jars = []
+
+ if is_static_library_dex_provider_target:
+ # Map classpath entries to configs that include them in their classpath.
+ configs_by_classpath_entry = collections.defaultdict(list)
+ for config_path, dep_config in (
+ static_library_dependent_configs_by_path.iteritems()):
+ all_configs.extend(dep_config['proguard_all_configs'])
+ extra_jars.extend(dep_config['proguard_classpath_jars'])
+ all_java_sources.extend(dep_config['jni']['all_source'])
+ for cp_entry in dep_config['java_runtime_classpath']:
+ # The APK Java targets for the static library dependent targets will
+ # have some of the same classes (R.java) due to shared resource
+ # dependencies. To avoid Proguard failures due to duplicate classes, we
+ # merge the APK jars into the static library's jar_path as a
+ # preprocessing build step.
+ if cp_entry != dep_config['jar_path']:
+ configs_by_classpath_entry[cp_entry].append(config_path)
+
+ for cp_entry in java_full_classpath:
+ configs_by_classpath_entry[cp_entry].append(options.build_config)
+
+ # Map configs to classpath entries that should be included in their final
+ # dex.
+ classpath_entries_by_owning_config = collections.defaultdict(list)
+ for cp_entry, candidate_configs in configs_by_classpath_entry.iteritems():
+ config_path = (candidate_configs[0]
+ if len(candidate_configs) == 1 else options.build_config)
+ classpath_entries_by_owning_config[config_path].append(cp_entry)
+ java_full_classpath.append(cp_entry)
+
+ classpath_entries_by_owning_config[options.build_config].append(
+ deps_info['jar_path'])
+
+ java_full_classpath = sorted(set(java_full_classpath))
+ deps_info['static_library_dependent_classpath_configs'] = {
+ path: sorted(set(classpath))
+ for path, classpath in classpath_entries_by_owning_config.iteritems()
+ }
+
+ if is_apk_or_module_target or options.type in ('group', 'java_library',
+ 'junit_binary'):
+ deps_info['jni']['all_source'] = sorted(set(all_java_sources))
+
+ system_jars = [c['jar_path'] for c in system_library_deps]
+ system_interface_jars = [c['interface_jar_path'] for c in system_library_deps]
+ if system_library_deps:
+ config['android'] = {}
+ config['android']['sdk_interface_jars'] = system_interface_jars
+ config['android']['sdk_jars'] = system_jars
+
+ if options.type in ('android_apk', 'dist_aar',
+ 'dist_jar', 'android_app_bundle_module', 'android_app_bundle'):
+ for c in all_library_deps:
+ all_configs.extend(p for p in c.get('proguard_configs', []))
+ extra_jars.extend(p for p in c.get('extra_classpath_jars', []))
+ for c in all_group_deps:
+ extra_jars.extend(p for p in c.get('extra_classpath_jars', []))
+ if options.type == 'android_app_bundle':
+ for c in deps.Direct('android_app_bundle_module'):
+ all_configs.extend(p for p in c.get('proguard_configs', []))
+ if options.type == 'android_app_bundle':
+ for d in deps.Direct('android_app_bundle_module'):
+ extra_jars.extend(
+ c for c in d.get('proguard_classpath_jars', [])
+ if c not in extra_jars)
+
+ if options.type == 'android_app_bundle':
+ deps_proguard_enabled = []
+ deps_proguard_disabled = []
+ for d in deps.Direct('android_app_bundle_module'):
+ if not d['java_runtime_classpath']:
+ # We don't care about modules that have no Java code for proguarding.
+ continue
+ if d['proguard_enabled']:
+ deps_proguard_enabled.append(d['name'])
+ else:
+ deps_proguard_disabled.append(d['name'])
+ if deps_proguard_enabled and deps_proguard_disabled:
+ raise Exception('Deps %s have proguard enabled while deps %s have '
+ 'proguard disabled' % (deps_proguard_enabled,
+ deps_proguard_disabled))
+ else:
+ deps_info['proguard_enabled'] = bool(options.proguard_enabled)
+ if options.proguard_mapping_path:
+ deps_info['proguard_mapping_path'] = options.proguard_mapping_path
+
+ # The java code for an instrumentation test apk is assembled differently for
+ # ProGuard vs. non-ProGuard.
+ #
+ # Without ProGuard: Each library's jar is dexed separately and then combined
+ # into a single classes.dex. A test apk will include all dex files not already
+ # present in the apk-under-test. At runtime all test code lives in the test
+ # apk, and the program code lives in the apk-under-test.
+ #
+ # With ProGuard: Each library's .jar file is fed into ProGuard, which outputs
+ # a single .jar, which is then dexed into a classes.dex. A test apk includes
+ # all jar files from the program and the tests because having them separate
+ # doesn't work with ProGuard's whole-program optimizations. Although the
+ # apk-under-test still has all of its code in its classes.dex, none of it is
+ # used at runtime because the copy of it within the test apk takes precidence.
+ if options.type == 'android_apk' and options.tested_apk_config:
+ if tested_apk_config['proguard_enabled']:
+ assert options.proguard_enabled, ('proguard must be enabled for '
+ 'instrumentation apks if it\'s enabled for the tested apk.')
+ # Mutating lists, so no need to explicitly re-assign to dict.
+ all_configs.extend(p for p in tested_apk_config['proguard_all_configs'])
+ extra_jars.extend(p for p in tested_apk_config['proguard_classpath_jars'])
+ tested_apk_config = GetDepConfig(options.tested_apk_config)
+ deps_info['proguard_under_test_mapping'] = (
+ tested_apk_config['proguard_mapping_path'])
+ elif options.proguard_enabled:
+ # Not sure why you'd want to proguard the test apk when the under-test apk
+ # is not proguarded, but it's easy enough to support.
+ deps_info['proguard_under_test_mapping'] = ''
+
+ expected_tested_package = tested_apk_config['package_name']
+ AndroidManifest(options.android_manifest).CheckInstrumentationElements(
+ expected_tested_package)
+
+ # Add all tested classes to the test's classpath to ensure that the test's
+ # java code is a superset of the tested apk's java code
+ java_full_classpath.extend(
+ p for p in tested_apk_config['java_runtime_classpath']
+ if p not in java_full_classpath)
+ # Include in the classpath classes that are added directly to the apk under
+ # test (those that are not a part of a java_library).
+ javac_classpath.append(tested_apk_config['unprocessed_jar_path'])
+ javac_full_classpath.append(tested_apk_config['unprocessed_jar_path'])
+ javac_interface_classpath.append(tested_apk_config['interface_jar_path'])
+ javac_full_interface_classpath.append(
+ tested_apk_config['interface_jar_path'])
+ javac_full_interface_classpath.extend(
+ p for p in tested_apk_config['javac_full_interface_classpath']
+ if p not in javac_full_interface_classpath)
+ javac_full_classpath.extend(
+ p for p in tested_apk_config['javac_full_classpath']
+ if p not in javac_full_classpath)
+
+ # Exclude dex files from the test apk that exist within the apk under test.
+ # TODO(agrieve): When proguard is enabled, this filtering logic happens
+ # within proguard_util.py. Move the logic for the proguard case into
+ # here as well.
+ tested_apk_library_deps = tested_apk_deps.All('java_library')
+ tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps]
+ deps_dex_files = [
+ p for p in deps_dex_files if not p in tested_apk_deps_dex_files]
+
+ if options.type in ('android_apk', 'dist_aar', 'dist_jar',
+ 'android_app_bundle_module', 'android_app_bundle'):
+ deps_info['proguard_all_configs'] = sorted(set(all_configs))
+ deps_info['proguard_classpath_jars'] = sorted(set(extra_jars))
+
+ # Dependencies for the final dex file of an apk.
+ if is_apk_or_module_target:
+ config['final_dex'] = {}
+ dex_config = config['final_dex']
+ dex_config['dependency_dex_files'] = deps_dex_files
+ dex_config['path'] = options.final_dex_path
+
+ if is_java_target:
+ config['javac']['classpath'] = javac_classpath
+ config['javac']['interface_classpath'] = javac_interface_classpath
+ # Direct() will be of type 'java_annotation_processor'.
+ config['javac']['processor_classpath'] = [
+ c['jar_path'] for c in processor_deps.Direct() if c.get('jar_path')] + [
+ c['jar_path'] for c in processor_deps.All('java_library')]
+ config['javac']['processor_classes'] = [
+ c['main_class'] for c in processor_deps.Direct()]
+ deps_info['javac_full_classpath'] = javac_full_classpath
+ deps_info['javac_full_interface_classpath'] = javac_full_interface_classpath
+ elif options.type == 'android_app_bundle':
+ # bundles require javac_full_classpath to create .aab.jar.info.
+ javac_full_classpath = set()
+ for d in deps.Direct('android_app_bundle_module'):
+ javac_full_classpath.update(p for p in d['javac_full_classpath'])
+ javac_full_classpath.add(d['jar_path'])
+ deps_info['javac_full_classpath'] = sorted(javac_full_classpath)
+
+ if options.type in ('android_apk', 'dist_jar', 'java_binary', 'junit_binary',
+ 'android_app_bundle_module', 'android_app_bundle'):
+ deps_info['java_runtime_classpath'] = java_full_classpath
+
+ if options.type in ('android_apk', 'dist_jar'):
+ all_interface_jars = []
+ if options.jar_path:
+ all_interface_jars.append(options.interface_jar_path)
+ all_interface_jars.extend(c['interface_jar_path'] for c in all_library_deps)
+
+ config['dist_jar'] = {
+ 'all_interface_jars': all_interface_jars,
+ }
+
+ if is_apk_or_module_target:
+ manifest = AndroidManifest(options.android_manifest)
+ deps_info['package_name'] = manifest.GetPackageName()
+ if not options.tested_apk_config and manifest.GetInstrumentationElements():
+ # This must then have instrumentation only for itself.
+ manifest.CheckInstrumentationElements(manifest.GetPackageName())
+
+ library_paths = []
+ java_libraries_list = None
+ if options.shared_libraries_runtime_deps:
+ library_paths = _ExtractSharedLibsFromRuntimeDeps(
+ options.shared_libraries_runtime_deps)
+ java_libraries_list = _CreateJavaLibrariesList(library_paths)
+ all_inputs.append(options.shared_libraries_runtime_deps)
+
+ secondary_abi_library_paths = []
+ if options.secondary_abi_shared_libraries_runtime_deps:
+ secondary_abi_library_paths = _ExtractSharedLibsFromRuntimeDeps(
+ options.secondary_abi_shared_libraries_runtime_deps)
+ all_inputs.append(options.secondary_abi_shared_libraries_runtime_deps)
+
+ secondary_abi_library_paths.extend(
+ build_utils.ParseGnList(options.secondary_native_libs))
+
+ native_library_placeholder_paths = build_utils.ParseGnList(
+ options.native_lib_placeholders)
+
+ secondary_native_library_placeholder_paths = build_utils.ParseGnList(
+ options.secondary_native_lib_placeholders)
+
+ extra_shared_libraries = build_utils.ParseGnList(options.native_libs)
+
+ config['native'] = {
+ 'libraries':
+ library_paths,
+ 'native_library_placeholders':
+ native_library_placeholder_paths,
+ 'secondary_abi_libraries':
+ secondary_abi_library_paths,
+ 'secondary_native_library_placeholders':
+ secondary_native_library_placeholder_paths,
+ 'java_libraries_list':
+ java_libraries_list,
+ 'uncompress_shared_libraries':
+ options.uncompress_shared_libraries,
+ 'extra_shared_libraries':
+ extra_shared_libraries,
+ }
+ config['assets'], config['uncompressed_assets'], locale_paks = (
+ _MergeAssets(deps.All('android_assets')))
+ config['compressed_locales_java_list'] = _CreateJavaLocaleListFromAssets(
+ config['assets'], locale_paks)
+ config['uncompressed_locales_java_list'] = _CreateJavaLocaleListFromAssets(
+ config['uncompressed_assets'], locale_paks)
+
+ config['extra_android_manifests'] = filter(None, (
+ d.get('android_manifest') for d in all_resources_deps))
+
+ # Collect java resources
+ java_resources_jars = [d['java_resources_jar'] for d in all_library_deps
+ if 'java_resources_jar' in d]
+ if options.tested_apk_config:
+ tested_apk_resource_jars = [d['java_resources_jar']
+ for d in tested_apk_library_deps
+ if 'java_resources_jar' in d]
+ java_resources_jars = [jar for jar in java_resources_jars
+ if jar not in tested_apk_resource_jars]
+ config['java_resources_jars'] = java_resources_jars
+
+ if options.java_resources_jar_path:
+ deps_info['java_resources_jar'] = options.java_resources_jar_path
+
+ build_utils.WriteJson(config, options.build_config, only_if_changed=True)
+
+ if options.depfile:
+ build_utils.WriteDepfile(options.depfile, options.build_config, all_inputs,
+ add_pydeps=False) # pydeps listed in GN.
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/write_build_config.pydeps b/deps/v8/build/android/gyp/write_build_config.pydeps
new file mode 100644
index 0000000000..e317c47cae
--- /dev/null
+++ b/deps/v8/build/android/gyp/write_build_config.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_build_config.pydeps build/android/gyp/write_build_config.py
+../../gn_helpers.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+write_build_config.py
diff --git a/deps/v8/build/android/gyp/write_ordered_libraries.py b/deps/v8/build/android/gyp/write_ordered_libraries.py
new file mode 100755
index 0000000000..61da64ce26
--- /dev/null
+++ b/deps/v8/build/android/gyp/write_ordered_libraries.py
@@ -0,0 +1,117 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes dependency ordered list of native libraries.
+
+The list excludes any Android system libraries, as those are not bundled with
+the APK.
+
+This list of libraries is used for several steps of building an APK.
+In the component build, the --input-libraries only needs to be the top-level
+library (i.e. libcontent_shell_content_view). This will then use readelf to
+inspect the shared libraries and determine the full list of (non-system)
+libraries that should be included in the APK.
+"""
+
+# TODO(cjhopman): See if we can expose the list of library dependencies from
+# gyp, rather than calculating it ourselves.
+# http://crbug.com/225558
+
+import optparse
+import os
+import re
+import sys
+
+from util import build_utils
+
+_readelf = None
+
+_library_re = re.compile(
+ '.*NEEDED.*Shared library: \[(?P<library_name>.+)\]')
+
+_library_path_map = {}
+
+
+def SetReadelfPath(path):
+ global _readelf
+ _readelf = path
+
+
+def CallReadElf(library_or_executable):
+ assert _readelf is not None
+ readelf_cmd = [_readelf, '-d', library_or_executable]
+ return build_utils.CheckOutput(readelf_cmd)
+
+
+def GetDependencies(library_or_executable):
+ elf = CallReadElf(library_or_executable)
+ deps = []
+ for l in _library_re.findall(elf):
+ p = _library_path_map.get(l)
+ if p is not None:
+ deps.append(p)
+ return deps
+
+
+def GetSortedTransitiveDependencies(libraries):
+ """Returns all transitive library dependencies in dependency order."""
+ return build_utils.GetSortedTransitiveDependencies(
+ libraries, GetDependencies)
+
+
+def main():
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_option('--readelf', help='Path to the readelf binary.')
+ parser.add_option('--runtime-deps',
+ help='A file created for the target using write_runtime_deps.')
+ parser.add_option('--exclude-shared-libraries',
+ help='List of shared libraries to exclude from the output.')
+ parser.add_option('--output', help='Path to the generated .json file.')
+
+ options, _ = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:]))
+
+ SetReadelfPath(options.readelf)
+
+ unsorted_lib_paths = []
+ exclude_shared_libraries = []
+ if options.exclude_shared_libraries:
+ exclude_shared_libraries = options.exclude_shared_libraries.split(',')
+ for f in open(options.runtime_deps):
+ f = f[:-1]
+ if f.endswith('.so'):
+ p = f.replace('lib.unstripped/', '')
+ if os.path.basename(p) in exclude_shared_libraries:
+ continue
+ unsorted_lib_paths.append(p)
+ _library_path_map[os.path.basename(p)] = p
+
+ lib_paths = GetSortedTransitiveDependencies(unsorted_lib_paths)
+
+ libraries = [os.path.basename(l) for l in lib_paths]
+
+ # Convert to "base" library names: e.g. libfoo.so -> foo
+ java_libraries_list = (
+ '{%s}' % ','.join(['"%s"' % s[3:-3] for s in libraries]))
+
+ out_json = {
+ 'libraries': libraries,
+ 'lib_paths': lib_paths,
+ 'java_libraries_list': java_libraries_list
+ }
+ build_utils.WriteJson(
+ out_json,
+ options.output,
+ only_if_changed=True)
+
+ if options.depfile:
+ build_utils.WriteDepfile(
+ options.depfile, options.output, lib_paths, add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/write_ordered_libraries.pydeps b/deps/v8/build/android/gyp/write_ordered_libraries.pydeps
new file mode 100644
index 0000000000..c2ed1fee36
--- /dev/null
+++ b/deps/v8/build/android/gyp/write_ordered_libraries.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_ordered_libraries.pydeps build/android/gyp/write_ordered_libraries.py
+../../gn_helpers.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+write_ordered_libraries.py
diff --git a/deps/v8/build/android/gyp/zip.py b/deps/v8/build/android/gyp/zip.py
new file mode 100755
index 0000000000..b9503960fa
--- /dev/null
+++ b/deps/v8/build/android/gyp/zip.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Archives a set of files."""
+
+import argparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser(args)
+ parser.add_argument('--input-files', help='GN-list of files to zip.')
+ parser.add_argument(
+ '--input-files-base-dir',
+ help='Paths in the archive will be relative to this directory')
+ parser.add_argument('--input-zips', help='GN-list of zips to merge.')
+ parser.add_argument(
+ '--input-zips-excluded-globs',
+ help='GN-list of globs for paths to exclude.')
+ parser.add_argument('--output', required=True, help='Path to output archive.')
+ compress_group = parser.add_mutually_exclusive_group()
+ compress_group.add_argument(
+ '--compress', action='store_true', help='Compress entries')
+ compress_group.add_argument(
+ '--no-compress',
+ action='store_false',
+ dest='compress',
+ help='Do not compress entries')
+ build_utils.AddDepfileOption(parser)
+ options = parser.parse_args(args)
+
+ with build_utils.AtomicOutput(options.output) as f:
+ with zipfile.ZipFile(f.name, 'w') as out_zip:
+ depfile_deps = None
+ if options.input_files:
+ files = build_utils.ParseGnList(options.input_files)
+ build_utils.DoZip(
+ files,
+ out_zip,
+ base_dir=options.input_files_base_dir,
+ compress_fn=lambda _: options.compress)
+
+ if options.input_zips:
+ files = build_utils.ParseGnList(options.input_zips)
+ depfile_deps = files
+ path_transform = None
+ if options.input_zips_excluded_globs:
+ globs = build_utils.ParseGnList(options.input_zips_excluded_globs)
+ path_transform = (
+ lambda p: None if build_utils.MatchesGlob(p, globs) else p)
+ build_utils.MergeZips(
+ out_zip,
+ files,
+ path_transform=path_transform,
+ compress=options.compress)
+
+ # Depfile used only by dist_jar().
+ if options.depfile:
+ build_utils.WriteDepfile(
+ options.depfile, options.output, inputs=depfile_deps, add_pydeps=False)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/zip.pydeps b/deps/v8/build/android/gyp/zip.pydeps
new file mode 100644
index 0000000000..ce99648ca0
--- /dev/null
+++ b/deps/v8/build/android/gyp/zip.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/zip.pydeps build/android/gyp/zip.py
+../../gn_helpers.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+zip.py
diff --git a/deps/v8/build/android/host_heartbeat.py b/deps/v8/build/android/host_heartbeat.py
new file mode 100755
index 0000000000..89905928ec
--- /dev/null
+++ b/deps/v8/build/android/host_heartbeat.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Sends a heart beat pulse to the currently online Android devices.
+This heart beat lets the devices know that they are connected to a host.
+"""
+# pylint: disable=W0702
+
+import sys
+import time
+
+import devil_chromium
+from devil.android import device_utils
+
+PULSE_PERIOD = 20
+
+def main():
+ devil_chromium.Initialize()
+
+ while True:
+ try:
+ devices = device_utils.DeviceUtils.HealthyDevices(blacklist=None)
+ for d in devices:
+ d.RunShellCommand(['touch', '/sdcard/host_heartbeat'],
+ check_return=True)
+ except:
+ # Keep the heatbeat running bypassing all errors.
+ pass
+ time.sleep(PULSE_PERIOD)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/incremental_install/BUILD.gn b/deps/v8/build/android/incremental_install/BUILD.gn
new file mode 100644
index 0000000000..3093c3a710
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/BUILD.gn
@@ -0,0 +1,20 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+android_library("bootstrap_java") {
+ # Use .dex rather than .dex.jar to be usable by package_apk().
+ dex_path = "$target_gen_dir/bootstrap.dex"
+ java_files = [
+ "java/org/chromium/incrementalinstall/BootstrapApplication.java",
+ "java/org/chromium/incrementalinstall/BootstrapInstrumentation.java",
+ "java/org/chromium/incrementalinstall/ClassLoaderPatcher.java",
+ "java/org/chromium/incrementalinstall/LockFile.java",
+ "java/org/chromium/incrementalinstall/Reflect.java",
+ "java/org/chromium/incrementalinstall/SecondInstrumentation.java",
+ ]
+ emma_never_instrument = true
+ no_build_hooks = true
+}
diff --git a/deps/v8/build/android/incremental_install/README.md b/deps/v8/build/android/incremental_install/README.md
new file mode 100644
index 0000000000..0916e07d23
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/README.md
@@ -0,0 +1,81 @@
+# Incremental Install
+
+Incremental Install is a way of building & deploying an APK that tries to
+minimize the time it takes to make a change and see that change running on
+device. They work best with `is_component_build=true`, and do *not* require a
+rooted device.
+
+## Building
+
+**Option 1:** Add the gn arg:
+
+ incremental_apk_by_default = true
+
+This causes all apks to be built as incremental (except for blacklisted ones).
+
+**Option 2:** Add `_incremental` to the apk target name. E.g.:
+
+ ninja -C out/Debug chrome_public_apk_incremental
+ ninja -C out/Debug chrome_public_test_apk_incremental
+
+## Running
+
+It is not enough to `adb install` them. You must use a generated wrapper script:
+
+ out/Debug/bin/install_chrome_public_apk_incremental
+ out/Debug/bin/run_chrome_public_test_apk_incremental # Automatically sets --fast-local-dev
+
+## Caveats
+
+Isolated processes (on L+) are incompatible with incremental install. As a
+work-around, you can disable isolated processes only for incremental apks using
+gn arg:
+
+ disable_incremental_isolated_processes = true
+
+# How it Works
+
+## Overview
+
+The basic idea is to side-load .dex and .so files to `/data/local/tmp` rather
+than bundling them in the .apk. Then, when making a change, only the changed
+.dex / .so needs to be pushed to the device.
+
+Faster Builds:
+
+ * No `final_dex` step (where all .dex files are merged into one)
+ * No need to rebuild .apk for code-only changes (but required for resources)
+ * Apks sign faster because they are smaller.
+
+Faster Installs:
+
+ * The .apk is smaller, and so faster to verify.
+ * No need to run `adb install` for code-only changes.
+ * Only changed .so / .dex files are pushed. MD5s of existing on-device files
+ are cached on host computer.
+
+Slower Initial Runs:
+
+ * The first time you run an incremental .apk, the `DexOpt` needs to run on all
+ .dex files. This step is normally done during `adb install`, but is done on
+ start-up for incremental apks.
+ * DexOpt results are cached, so subsequent runs are much faster
+
+## The Code
+
+All incremental apks have the same classes.dex, which is built from:
+
+ //build/android/incremental_install:bootstrap_java
+
+They also have a transformed `AndroidManifest.xml`, which overrides the the
+main application class and any instrumentation classes so that they instead
+point to `BootstrapApplication`. This is built by:
+
+ //build/android/incremental_install/generate_android_manifest.py
+
+Wrapper scripts and install logic is contained in:
+
+ //build/android/incremental_install/create_install_script.py
+ //build/android/incremental_install/installer.py
+
+Finally, GN logic for incremental apks is sprinkled throughout.
diff --git a/deps/v8/build/android/incremental_install/__init__.py b/deps/v8/build/android/incremental_install/__init__.py
new file mode 100644
index 0000000000..50b23dff63
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/incremental_install/generate_android_manifest.py b/deps/v8/build/android/incremental_install/generate_android_manifest.py
new file mode 100755
index 0000000000..bf38f4e601
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/generate_android_manifest.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates an AndroidManifest.xml for an incremental APK.
+
+Given the manifest file for the real APK, generates an AndroidManifest.xml with
+the application class changed to IncrementalApplication.
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+import tempfile
+import zipfile
+from xml.etree import ElementTree
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, 'gyp'))
+from util import build_utils
+from util import resource_utils
+
+_INCREMENTAL_APP_NAME = 'org.chromium.incrementalinstall.BootstrapApplication'
+_META_DATA_APP_NAME = 'incremental-install-real-app'
+_DEFAULT_APPLICATION_CLASS = 'android.app.Application'
+_META_DATA_INSTRUMENTATION_NAMES = [
+ 'incremental-install-real-instrumentation-0',
+ 'incremental-install-real-instrumentation-1',
+]
+_INCREMENTAL_INSTRUMENTATION_CLASSES = [
+ 'android.app.Instrumentation',
+ 'org.chromium.incrementalinstall.SecondInstrumentation',
+]
+
+
+def _AddNamespace(name):
+ """Adds the android namespace prefix to the given identifier."""
+ return '{%s}%s' % (resource_utils.ANDROID_NAMESPACE, name)
+
+
+def _ParseArgs(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--src-manifest', required=True, help='The main manifest of the app')
+ parser.add_argument('--disable-isolated-processes',
+ help='Changes all android:isolatedProcess to false. '
+ 'This is required on Android M+',
+ action='store_true')
+ parser.add_argument(
+ '--out-apk', required=True, help='Path to output .ap_ file')
+ parser.add_argument(
+ '--in-apk', required=True, help='Path to non-incremental .ap_ file')
+ parser.add_argument(
+ '--aapt2-path', required=True, help='Path to the Android aapt tool')
+ parser.add_argument(
+ '--android-sdk-jars', help='GN List of resource apks to include.')
+
+ ret = parser.parse_args(build_utils.ExpandFileArgs(args))
+ ret.android_sdk_jars = build_utils.ParseGnList(ret.android_sdk_jars)
+ return ret
+
+
+def _CreateMetaData(parent, name, value):
+ meta_data_node = ElementTree.SubElement(parent, 'meta-data')
+ meta_data_node.set(_AddNamespace('name'), name)
+ meta_data_node.set(_AddNamespace('value'), value)
+
+
+def _ProcessManifest(path, arsc_package_name, disable_isolated_processes):
+ doc, manifest_node, app_node = resource_utils.ParseAndroidManifest(path)
+
+ # Ensure the manifest package matches that of the apk's arsc package
+ # So that resource references resolve correctly. The actual manifest
+ # package name is set via --rename-manifest-package.
+ manifest_node.set('package', arsc_package_name)
+
+ # Pylint for some reason things app_node is an int.
+ # pylint: disable=no-member
+ real_app_class = app_node.get(_AddNamespace('name'),
+ _DEFAULT_APPLICATION_CLASS)
+ app_node.set(_AddNamespace('name'), _INCREMENTAL_APP_NAME)
+ # pylint: enable=no-member
+ _CreateMetaData(app_node, _META_DATA_APP_NAME, real_app_class)
+
+ # Seems to be a bug in ElementTree, as doc.find() doesn't work here.
+ instrumentation_nodes = doc.findall('instrumentation')
+ assert len(instrumentation_nodes) <= 2, (
+ 'Need to update incremental install to support >2 <instrumentation> tags')
+ for i, instrumentation_node in enumerate(instrumentation_nodes):
+ real_instrumentation_class = instrumentation_node.get(_AddNamespace('name'))
+ instrumentation_node.set(_AddNamespace('name'),
+ _INCREMENTAL_INSTRUMENTATION_CLASSES[i])
+ _CreateMetaData(app_node, _META_DATA_INSTRUMENTATION_NAMES[i],
+ real_instrumentation_class)
+
+ ret = ElementTree.tostring(doc.getroot(), encoding='UTF-8')
+ # Disable check for page-aligned native libraries.
+ ret = ret.replace('extractNativeLibs="false"', 'extractNativeLibs="true"')
+ if disable_isolated_processes:
+ ret = ret.replace('isolatedProcess="true"', 'isolatedProcess="false"')
+ return ret
+
+
+def main(raw_args):
+ options = _ParseArgs(raw_args)
+
+ arsc_package, _ = resource_utils.ExtractArscPackage(options.aapt2_path,
+ options.in_apk)
+ # Extract version from the compiled manifest since it might have been set
+ # via aapt, and not exist in the manifest's text form.
+ version_code, version_name, manifest_package = (
+ resource_utils.ExtractBinaryManifestValues(options.aapt2_path,
+ options.in_apk))
+
+ new_manifest_data = _ProcessManifest(options.src_manifest, arsc_package,
+ options.disable_isolated_processes)
+ with tempfile.NamedTemporaryFile() as tmp_manifest, \
+ tempfile.NamedTemporaryFile() as tmp_apk:
+ tmp_manifest.write(new_manifest_data)
+ tmp_manifest.flush()
+ cmd = [
+ options.aapt2_path, 'link', '-o', tmp_apk.name, '--manifest',
+ tmp_manifest.name, '-I', options.in_apk, '--replace-version',
+ '--version-code', version_code, '--version-name', version_name,
+ '--rename-manifest-package', manifest_package, '--debug-mode'
+ ]
+ for j in options.android_sdk_jars:
+ cmd += ['-I', j]
+ subprocess.check_call(cmd)
+ with zipfile.ZipFile(options.out_apk, 'w') as z:
+ path_transform = lambda p: None if p != 'AndroidManifest.xml' else p
+ build_utils.MergeZips(z, [tmp_apk.name], path_transform=path_transform)
+ path_transform = lambda p: None if p == 'AndroidManifest.xml' else p
+ build_utils.MergeZips(z, [options.in_apk], path_transform=path_transform)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/incremental_install/generate_android_manifest.pydeps b/deps/v8/build/android/incremental_install/generate_android_manifest.pydeps
new file mode 100644
index 0000000000..21b49c6f74
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/generate_android_manifest.pydeps
@@ -0,0 +1,29 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/incremental_install --output build/android/incremental_install/generate_android_manifest.pydeps build/android/incremental_install/generate_android_manifest.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../gyp/util/__init__.py
+../gyp/util/build_utils.py
+../gyp/util/md5_check.py
+../gyp/util/resource_utils.py
+generate_android_manifest.py
diff --git a/deps/v8/build/android/incremental_install/installer.py b/deps/v8/build/android/incremental_install/installer.py
new file mode 100755
index 0000000000..95475b1621
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/installer.py
@@ -0,0 +1,303 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Install *_incremental.apk targets as well as their dependent files."""
+
+import argparse
+import glob
+import json
+import logging
+import os
+import posixpath
+import shutil
+import sys
+import zipfile
+
+sys.path.append(
+ os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+import devil_chromium
+from devil.android import apk_helper
+from devil.android import device_utils
+from devil.android.sdk import version_codes
+from devil.utils import reraiser_thread
+from devil.utils import run_tests_helper
+from pylib import constants
+from pylib.utils import time_profile
+
+prev_sys_path = list(sys.path)
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir, 'gyp'))
+from util import build_utils
+sys.path = prev_sys_path
+
+
+def _DeviceCachePath(device):
+ file_name = 'device_cache_%s.json' % device.adb.GetDeviceSerial()
+ return os.path.join(constants.GetOutDirectory(), file_name)
+
+
+def _TransformDexPaths(paths):
+ """Given paths like ["/a/b/c", "/a/c/d"], returns ["b.c", "c.d"]."""
+ if len(paths) == 1:
+ return [os.path.basename(paths[0])]
+
+ prefix_len = len(os.path.commonprefix(paths))
+ return [p[prefix_len:].replace(os.sep, '.') for p in paths]
+
+
+def _Execute(concurrently, *funcs):
+ """Calls all functions in |funcs| concurrently or in sequence."""
+ timer = time_profile.TimeProfile()
+ if concurrently:
+ reraiser_thread.RunAsync(funcs)
+ else:
+ for f in funcs:
+ f()
+ timer.Stop(log=False)
+ return timer
+
+
+def _GetDeviceIncrementalDir(package):
+ """Returns the device path to put incremental files for the given package."""
+ return '/data/local/tmp/incremental-app-%s' % package
+
+
+def _HasClasses(jar_path):
+ """Returns whether the given jar contains classes.dex."""
+ with zipfile.ZipFile(jar_path) as jar:
+ return 'classes.dex' in jar.namelist()
+
+
+def Uninstall(device, package, enable_device_cache=False):
+ """Uninstalls and removes all incremental files for the given package."""
+ main_timer = time_profile.TimeProfile()
+ device.Uninstall(package)
+ if enable_device_cache:
+ # Uninstall is rare, so just wipe the cache in this case.
+ cache_path = _DeviceCachePath(device)
+ if os.path.exists(cache_path):
+ os.unlink(cache_path)
+ device.RunShellCommand(['rm', '-rf', _GetDeviceIncrementalDir(package)],
+ check_return=True)
+ logging.info('Uninstall took %s seconds.', main_timer.GetDelta())
+
+
+def Install(device, install_json, apk=None, enable_device_cache=False,
+ use_concurrency=True, permissions=()):
+ """Installs the given incremental apk and all required supporting files.
+
+ Args:
+ device: A DeviceUtils instance (to install to).
+ install_json: Path to .json file or already parsed .json object.
+ apk: An existing ApkHelper instance for the apk (optional).
+ enable_device_cache: Whether to enable on-device caching of checksums.
+ use_concurrency: Whether to speed things up using multiple threads.
+ permissions: A list of the permissions to grant, or None to grant all
+ non-blacklisted permissions in the manifest.
+ """
+ if isinstance(install_json, basestring):
+ with open(install_json) as f:
+ install_dict = json.load(f)
+ else:
+ install_dict = install_json
+
+ if install_dict.get('dont_even_try'):
+ raise Exception(install_dict['dont_even_try'])
+
+ main_timer = time_profile.TimeProfile()
+ install_timer = time_profile.TimeProfile()
+ push_native_timer = time_profile.TimeProfile()
+ push_dex_timer = time_profile.TimeProfile()
+
+ def fix_path(p):
+ return os.path.normpath(os.path.join(constants.GetOutDirectory(), p))
+
+ if not apk:
+ apk = apk_helper.ToHelper(fix_path(install_dict['apk_path']))
+ split_globs = [fix_path(p) for p in install_dict['split_globs']]
+ native_libs = [fix_path(p) for p in install_dict['native_libs']]
+ dex_files = [fix_path(p) for p in install_dict['dex_files']]
+ show_proguard_warning = install_dict.get('show_proguard_warning')
+
+ apk_package = apk.GetPackageName()
+ device_incremental_dir = _GetDeviceIncrementalDir(apk_package)
+
+ # Install .apk(s) if any of them have changed.
+ def do_install():
+ install_timer.Start()
+ if split_globs:
+ splits = []
+ for split_glob in split_globs:
+ splits.extend((f for f in glob.glob(split_glob)))
+ device.InstallSplitApk(
+ apk,
+ splits,
+ allow_downgrade=True,
+ reinstall=True,
+ allow_cached_props=True,
+ permissions=permissions)
+ else:
+ device.Install(
+ apk, allow_downgrade=True, reinstall=True, permissions=permissions)
+ install_timer.Stop(log=False)
+
+ # Push .so and .dex files to the device (if they have changed).
+ def do_push_files():
+ push_native_timer.Start()
+ if native_libs:
+ with build_utils.TempDir() as temp_dir:
+ device_lib_dir = posixpath.join(device_incremental_dir, 'lib')
+ for path in native_libs:
+ # Note: Can't use symlinks as they don't work when
+ # "adb push parent_dir" is used (like we do here).
+ shutil.copy(path, os.path.join(temp_dir, os.path.basename(path)))
+ device.PushChangedFiles([(temp_dir, device_lib_dir)],
+ delete_device_stale=True)
+ push_native_timer.Stop(log=False)
+
+ push_dex_timer.Start()
+ if dex_files:
+ # Put all .dex files to be pushed into a temporary directory so that we
+ # can use delete_device_stale=True.
+ with build_utils.TempDir() as temp_dir:
+ device_dex_dir = posixpath.join(device_incremental_dir, 'dex')
+ # Ensure no two files have the same name.
+ transformed_names = _TransformDexPaths(dex_files)
+ for src_path, dest_name in zip(dex_files, transformed_names):
+ # Binary targets with no extra classes create .dex.jar without a
+ # classes.dex (which Android chokes on).
+ if _HasClasses(src_path):
+ shutil.copy(src_path, os.path.join(temp_dir, dest_name))
+ device.PushChangedFiles([(temp_dir, device_dex_dir)],
+ delete_device_stale=True)
+ push_dex_timer.Stop(log=False)
+
+ def check_selinux():
+ # Marshmallow has no filesystem access whatsoever. It might be possible to
+ # get things working on Lollipop, but attempts so far have failed.
+ # http://crbug.com/558818
+ has_selinux = device.build_version_sdk >= version_codes.LOLLIPOP
+ if has_selinux and apk.HasIsolatedProcesses():
+ raise Exception('Cannot use incremental installs on Android L+ without '
+ 'first disabling isolated processes.\n'
+ 'To do so, use GN arg:\n'
+ ' disable_incremental_isolated_processes=true')
+
+ cache_path = _DeviceCachePath(device)
+ def restore_cache():
+ if not enable_device_cache:
+ return
+ if os.path.exists(cache_path):
+ logging.info('Using device cache: %s', cache_path)
+ with open(cache_path) as f:
+ device.LoadCacheData(f.read())
+ # Delete the cached file so that any exceptions cause it to be cleared.
+ os.unlink(cache_path)
+ else:
+ logging.info('No device cache present: %s', cache_path)
+
+ def save_cache():
+ if not enable_device_cache:
+ return
+ with open(cache_path, 'w') as f:
+ f.write(device.DumpCacheData())
+ logging.info('Wrote device cache: %s', cache_path)
+
+ # Create 2 lock files:
+ # * install.lock tells the app to pause on start-up (until we release it).
+ # * firstrun.lock is used by the app to pause all secondary processes until
+ # the primary process finishes loading the .dex / .so files.
+ def create_lock_files():
+ # Creates or zeros out lock files.
+ cmd = ('D="%s";'
+ 'mkdir -p $D &&'
+ 'echo -n >$D/install.lock 2>$D/firstrun.lock')
+ device.RunShellCommand(
+ cmd % device_incremental_dir, shell=True, check_return=True)
+
+ # The firstrun.lock is released by the app itself.
+ def release_installer_lock():
+ device.RunShellCommand('echo > %s/install.lock' % device_incremental_dir,
+ check_return=True, shell=True)
+
+ # Concurrency here speeds things up quite a bit, but DeviceUtils hasn't
+ # been designed for multi-threading. Enabling only because this is a
+ # developer-only tool.
+ setup_timer = _Execute(
+ use_concurrency, create_lock_files, restore_cache, check_selinux)
+
+ _Execute(use_concurrency, do_install, do_push_files)
+
+ finalize_timer = _Execute(use_concurrency, release_installer_lock, save_cache)
+
+ logging.info(
+ 'Install of %s took %s seconds '
+ '(setup=%s, install=%s, libs=%s, dex=%s, finalize=%s)',
+ os.path.basename(apk.path), main_timer.GetDelta(), setup_timer.GetDelta(),
+ install_timer.GetDelta(), push_native_timer.GetDelta(),
+ push_dex_timer.GetDelta(), finalize_timer.GetDelta())
+ if show_proguard_warning:
+ logging.warning('Target had proguard enabled, but incremental install uses '
+ 'non-proguarded .dex files. Performance characteristics '
+ 'may differ.')
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('json_path',
+ help='The path to the generated incremental apk .json.')
+ parser.add_argument('-d', '--device', dest='device',
+ help='Target device for apk to install on.')
+ parser.add_argument('--uninstall',
+ action='store_true',
+ default=False,
+ help='Remove the app and all side-loaded files.')
+ parser.add_argument('--output-directory',
+ help='Path to the root build directory.')
+ parser.add_argument('--no-threading',
+ action='store_false',
+ default=True,
+ dest='threading',
+ help='Do not install and push concurrently')
+ parser.add_argument('--no-cache',
+ action='store_false',
+ default=True,
+ dest='cache',
+ help='Do not use cached information about what files are '
+ 'currently on the target device.')
+ parser.add_argument('-v',
+ '--verbose',
+ dest='verbose_count',
+ default=0,
+ action='count',
+ help='Verbose level (multiple times for more)')
+
+ args = parser.parse_args()
+
+ run_tests_helper.SetLogLevel(args.verbose_count)
+ if args.output_directory:
+ constants.SetOutputDirectory(args.output_directory)
+
+ devil_chromium.Initialize(output_directory=constants.GetOutDirectory())
+
+ # Retries are annoying when commands fail for legitimate reasons. Might want
+ # to enable them if this is ever used on bots though.
+ device = device_utils.DeviceUtils.HealthyDevices(
+ device_arg=args.device,
+ default_retries=0,
+ enable_device_files_cache=True)[0]
+
+ if args.uninstall:
+ with open(args.json_path) as f:
+ install_dict = json.load(f)
+ apk = apk_helper.ToHelper(install_dict['apk_path'])
+ Uninstall(device, apk.GetPackageName(), enable_device_cache=args.cache)
+ else:
+ Install(device, args.json_path, enable_device_cache=args.cache,
+ use_concurrency=args.threading)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java
new file mode 100644
index 0000000000..801a4126c2
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java
@@ -0,0 +1,288 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.app.Application;
+import android.app.Instrumentation;
+import android.content.Context;
+import android.content.pm.ApplicationInfo;
+import android.content.pm.PackageManager;
+import android.content.pm.PackageManager.NameNotFoundException;
+import android.os.Bundle;
+import android.util.Log;
+
+import dalvik.system.DexFile;
+
+import java.io.File;
+import java.lang.ref.WeakReference;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * An Application that replaces itself with another Application (as defined in
+ * an AndroidManifext.xml meta-data tag). It loads the other application only
+ * after side-loading its .so and .dex files from /data/local/tmp.
+ *
+ * This class is highly dependent on the private implementation details of
+ * Android's ActivityThread.java. However, it has been tested to work with
+ * JellyBean through Marshmallow.
+ */
+public final class BootstrapApplication extends Application {
+ private static final String TAG = "cr.incrementalinstall";
+ private static final String MANAGED_DIR_PREFIX = "/data/local/tmp/incremental-app-";
+ private static final String REAL_APP_META_DATA_NAME = "incremental-install-real-app";
+ private static final String REAL_INSTRUMENTATION_META_DATA_NAME0 =
+ "incremental-install-real-instrumentation-0";
+ private static final String REAL_INSTRUMENTATION_META_DATA_NAME1 =
+ "incremental-install-real-instrumentation-1";
+
+ private ClassLoaderPatcher mClassLoaderPatcher;
+ private Application mRealApplication;
+ private Instrumentation mOrigInstrumentation;
+ private Instrumentation mRealInstrumentation;
+ private Object mStashedProviderList;
+ private Object mActivityThread;
+ public static DexFile[] sIncrementalDexFiles; // Needed by junit test runner.
+
+ @Override
+ protected void attachBaseContext(Context context) {
+ super.attachBaseContext(context);
+ try {
+ mActivityThread = Reflect.invokeMethod(Class.forName("android.app.ActivityThread"),
+ "currentActivityThread");
+ mClassLoaderPatcher = new ClassLoaderPatcher(context);
+
+ mOrigInstrumentation =
+ (Instrumentation) Reflect.getField(mActivityThread, "mInstrumentation");
+ Context instContext = mOrigInstrumentation.getContext();
+ if (instContext == null) {
+ instContext = context;
+ }
+
+ // When running with an instrumentation that lives in a different package from the
+ // application, we must load the dex files and native libraries from both pacakges.
+ // This logic likely won't work when the instrumentation is incremental, but the app is
+ // non-incremental. This configuration isn't used right now though.
+ String appPackageName = getPackageName();
+ String instPackageName = instContext.getPackageName();
+ boolean instPackageNameDiffers = !appPackageName.equals(instPackageName);
+ Log.i(TAG, "App PackageName: " + appPackageName);
+ if (instPackageNameDiffers) {
+ Log.i(TAG, "Inst PackageName: " + instPackageName);
+ }
+
+ File appIncrementalRootDir = new File(MANAGED_DIR_PREFIX + appPackageName);
+ File appLibDir = new File(appIncrementalRootDir, "lib");
+ File appDexDir = new File(appIncrementalRootDir, "dex");
+ File appInstallLockFile = new File(appIncrementalRootDir, "install.lock");
+ File appFirstRunLockFile = new File(appIncrementalRootDir, "firstrun.lock");
+ File instIncrementalRootDir = new File(MANAGED_DIR_PREFIX + instPackageName);
+ File instLibDir = new File(instIncrementalRootDir, "lib");
+ File instDexDir = new File(instIncrementalRootDir, "dex");
+ File instInstallLockFile = new File(instIncrementalRootDir, "install.lock");
+ File instFirstRunLockFile = new File(instIncrementalRootDir, "firstrun.lock");
+
+ boolean isFirstRun = LockFile.installerLockExists(appFirstRunLockFile)
+ || (instPackageNameDiffers
+ && LockFile.installerLockExists(instFirstRunLockFile));
+ if (isFirstRun) {
+ if (mClassLoaderPatcher.mIsPrimaryProcess) {
+ // Wait for incremental_install.py to finish.
+ LockFile.waitForInstallerLock(appInstallLockFile, 30 * 1000);
+ LockFile.waitForInstallerLock(instInstallLockFile, 30 * 1000);
+ } else {
+ // Wait for the browser process to create the optimized dex files
+ // and copy the library files.
+ LockFile.waitForInstallerLock(appFirstRunLockFile, 60 * 1000);
+ LockFile.waitForInstallerLock(instFirstRunLockFile, 60 * 1000);
+ }
+ }
+
+ mClassLoaderPatcher.importNativeLibs(instLibDir);
+ sIncrementalDexFiles = mClassLoaderPatcher.loadDexFiles(instDexDir);
+ if (instPackageNameDiffers) {
+ mClassLoaderPatcher.importNativeLibs(appLibDir);
+ mClassLoaderPatcher.loadDexFiles(appDexDir);
+ }
+
+ if (isFirstRun && mClassLoaderPatcher.mIsPrimaryProcess) {
+ LockFile.clearInstallerLock(appFirstRunLockFile);
+ if (instPackageNameDiffers) {
+ LockFile.clearInstallerLock(instFirstRunLockFile);
+ }
+ }
+
+ // mInstrumentationAppDir is one of a set of fields that is initialized only when
+ // instrumentation is active.
+ if (Reflect.getField(mActivityThread, "mInstrumentationAppDir") != null) {
+ String metaDataName = REAL_INSTRUMENTATION_META_DATA_NAME0;
+ if (mOrigInstrumentation instanceof SecondInstrumentation) {
+ metaDataName = REAL_INSTRUMENTATION_META_DATA_NAME1;
+ }
+ mRealInstrumentation =
+ initInstrumentation(getClassNameFromMetadata(metaDataName, instContext));
+ } else {
+ Log.i(TAG, "No instrumentation active.");
+ }
+
+ // Even when instrumentation is not enabled, ActivityThread uses a default
+ // Instrumentation instance internally. We hook it here in order to hook into the
+ // call to Instrumentation.onCreate().
+ Reflect.setField(mActivityThread, "mInstrumentation",
+ new BootstrapInstrumentation(this));
+
+ // attachBaseContext() is called from ActivityThread#handleBindApplication() and
+ // Application#mApplication is changed right after we return. Thus, we cannot swap
+ // the Application instances until onCreate() is called.
+ String realApplicationName = getClassNameFromMetadata(REAL_APP_META_DATA_NAME, context);
+ Log.i(TAG, "Instantiating " + realApplicationName);
+ Instrumentation anyInstrumentation =
+ mRealInstrumentation != null ? mRealInstrumentation : mOrigInstrumentation;
+ mRealApplication = anyInstrumentation.newApplication(
+ getClassLoader(), realApplicationName, context);
+
+ // Between attachBaseContext() and onCreate(), ActivityThread tries to instantiate
+ // all ContentProviders. The ContentProviders break without the correct Application
+ // class being installed, so temporarily pretend there are no providers, and then
+ // instantiate them explicitly within onCreate().
+ disableContentProviders();
+ Log.i(TAG, "Waiting for Instrumentation.onCreate");
+ } catch (Exception e) {
+ throw new RuntimeException("Incremental install failed.", e);
+ }
+ }
+
+ /**
+ * Returns the fully-qualified class name for the given key, stored in a
+ * &lt;meta&gt; witin the manifest.
+ */
+ private static String getClassNameFromMetadata(String key, Context context)
+ throws NameNotFoundException {
+ String pkgName = context.getPackageName();
+ ApplicationInfo appInfo = context.getPackageManager().getApplicationInfo(pkgName,
+ PackageManager.GET_META_DATA);
+ String value = appInfo.metaData.getString(key);
+ if (value != null && !value.contains(".")) {
+ value = pkgName + "." + value;
+ }
+ return value;
+ }
+
+ /**
+ * Instantiates and initializes mRealInstrumentation (the real Instrumentation class).
+ */
+ private Instrumentation initInstrumentation(String realInstrumentationName)
+ throws ReflectiveOperationException {
+ if (realInstrumentationName == null) {
+ // This is the case when an incremental app is used as a target for an instrumentation
+ // test. In this case, ActivityThread can instantiate the proper class just fine since
+ // it exists within the test apk (as opposed to the incremental apk-under-test).
+ Log.i(TAG, "Running with external instrumentation");
+ return null;
+ }
+ // For unit tests, the instrumentation class is replaced in the manifest by a build step
+ // because ActivityThread tries to instantiate it before we get a chance to load the
+ // incremental dex files.
+ Log.i(TAG, "Instantiating instrumentation " + realInstrumentationName);
+ Instrumentation ret =
+ (Instrumentation) Reflect.newInstance(Class.forName(realInstrumentationName));
+
+ // Initialize the fields that are set by Instrumentation.init().
+ String[] initFields = {"mAppContext", "mComponent", "mInstrContext", "mMessageQueue",
+ "mThread", "mUiAutomationConnection", "mWatcher"};
+ for (String fieldName : initFields) {
+ Reflect.setField(ret, fieldName, Reflect.getField(mOrigInstrumentation, fieldName));
+ }
+ return ret;
+ }
+
+ /**
+ * Called by BootstrapInstrumentation from Instrumentation.onCreate().
+ * This happens regardless of whether or not instrumentation is enabled.
+ */
+ void onInstrumentationCreate(Bundle arguments) {
+ Log.i(TAG, "Instrumentation.onCreate() called. Swapping references.");
+ try {
+ swapApplicationReferences();
+ enableContentProviders();
+ if (mRealInstrumentation != null) {
+ Reflect.setField(mActivityThread, "mInstrumentation", mRealInstrumentation);
+ mRealInstrumentation.onCreate(arguments);
+ }
+ } catch (Exception e) {
+ throw new RuntimeException("Incremental install failed.", e);
+ }
+ }
+
+ @Override
+ public void onCreate() {
+ super.onCreate();
+ try {
+ Log.i(TAG, "Application.onCreate() called.");
+ mRealApplication.onCreate();
+ } catch (Exception e) {
+ throw new RuntimeException("Incremental install failed.", e);
+ }
+ }
+
+ /**
+ * Nulls out ActivityThread.mBoundApplication.providers.
+ */
+ private void disableContentProviders() throws ReflectiveOperationException {
+ Object data = Reflect.getField(mActivityThread, "mBoundApplication");
+ mStashedProviderList = Reflect.getField(data, "providers");
+ Reflect.setField(data, "providers", null);
+ }
+
+ /**
+ * Restores the value of ActivityThread.mBoundApplication.providers, and invokes
+ * ActivityThread#installContentProviders().
+ */
+ private void enableContentProviders() throws ReflectiveOperationException {
+ Object data = Reflect.getField(mActivityThread, "mBoundApplication");
+ Reflect.setField(data, "providers", mStashedProviderList);
+ if (mStashedProviderList != null && mClassLoaderPatcher.mIsPrimaryProcess) {
+ Log.i(TAG, "Instantiating content providers");
+ Reflect.invokeMethod(mActivityThread, "installContentProviders", mRealApplication,
+ mStashedProviderList);
+ }
+ mStashedProviderList = null;
+ }
+
+ /**
+ * Changes all fields within framework classes that have stored an reference to this
+ * BootstrapApplication to instead store references to mRealApplication.
+ */
+ @SuppressWarnings("unchecked")
+ private void swapApplicationReferences() throws ReflectiveOperationException {
+ if (Reflect.getField(mActivityThread, "mInitialApplication") == this) {
+ Reflect.setField(mActivityThread, "mInitialApplication", mRealApplication);
+ }
+
+ List<Application> allApplications =
+ (List<Application>) Reflect.getField(mActivityThread, "mAllApplications");
+ for (int i = 0; i < allApplications.size(); i++) {
+ if (allApplications.get(i) == this) {
+ allApplications.set(i, mRealApplication);
+ }
+ }
+
+ // Contains a reference to BootstrapApplication and will cause BroadCastReceivers to fail
+ // if not replaced.
+ Context contextImpl = mRealApplication.getBaseContext();
+ Reflect.setField(contextImpl, "mOuterContext", mRealApplication);
+
+ for (String fieldName : new String[] {"mPackages", "mResourcePackages"}) {
+ Map<String, WeakReference<?>> packageMap =
+ (Map<String, WeakReference<?>>) Reflect.getField(mActivityThread, fieldName);
+ for (Map.Entry<String, WeakReference<?>> entry : packageMap.entrySet()) {
+ Object loadedApk = entry.getValue().get();
+ if (loadedApk != null && Reflect.getField(loadedApk, "mApplication") == this) {
+ Reflect.setField(loadedApk, "mApplication", mRealApplication);
+ }
+ }
+ }
+ }
+}
diff --git a/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java
new file mode 100644
index 0000000000..f197406499
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java
@@ -0,0 +1,25 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.app.Instrumentation;
+import android.os.Bundle;
+
+/**
+ * Notifies BootstrapApplication of the call to Instrumentation.onCreate().
+ */
+public final class BootstrapInstrumentation extends Instrumentation {
+ private final BootstrapApplication mApp;
+
+ BootstrapInstrumentation(BootstrapApplication app) {
+ mApp = app;
+ }
+
+ @Override
+ public void onCreate(Bundle arguments) {
+ super.onCreate(arguments);
+ mApp.onInstrumentationCreate(arguments);
+ }
+}
diff --git a/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java
new file mode 100644
index 0000000000..10e438f670
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java
@@ -0,0 +1,291 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.os.Build;
+import android.os.Process;
+import android.util.Log;
+
+import dalvik.system.DexFile;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * Provides the ability to add native libraries and .dex files to an existing class loader.
+ * Tested with Jellybean MR2 - Marshmellow.
+ */
+final class ClassLoaderPatcher {
+ private static final String TAG = "cr.incrementalinstall";
+ private final File mAppFilesSubDir;
+ private final ClassLoader mClassLoader;
+ private final Object mLibcoreOs;
+ private final int mProcessUid;
+ final boolean mIsPrimaryProcess;
+
+ ClassLoaderPatcher(Context context) throws ReflectiveOperationException {
+ mAppFilesSubDir =
+ new File(context.getApplicationInfo().dataDir, "incremental-install-files");
+ mClassLoader = context.getClassLoader();
+ mLibcoreOs = Reflect.getField(Class.forName("libcore.io.Libcore"), "os");
+ mProcessUid = Process.myUid();
+ mIsPrimaryProcess = context.getApplicationInfo().uid == mProcessUid;
+ Log.i(TAG, "uid=" + mProcessUid + " (isPrimary=" + mIsPrimaryProcess + ")");
+ }
+
+ /**
+ * Loads all dex files within |dexDir| into the app's ClassLoader.
+ */
+ @SuppressLint({
+ "SetWorldReadable", "SetWorldWritable",
+ })
+ DexFile[] loadDexFiles(File dexDir) throws ReflectiveOperationException, IOException {
+ Log.i(TAG, "Installing dex files from: " + dexDir);
+
+ // The optimized dex files will be owned by this process' user.
+ // Store them within the app's data dir rather than on /data/local/tmp
+ // so that they are still deleted (by the OS) when we uninstall
+ // (even on a non-rooted device).
+ File incrementalDexesDir = new File(mAppFilesSubDir, "optimized-dexes");
+ File isolatedDexesDir = new File(mAppFilesSubDir, "isolated-dexes");
+ File optimizedDir;
+
+ // In O, optimizedDirectory is ignored, and the files are always put in an "oat"
+ // directory that is a sibling to the dex files themselves. SELinux policies
+ // prevent using odex files from /data/local/tmp, so we must first copy them
+ // into the app's data directory in order to get the odex files to live there.
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+ safeCopyAllFiles(dexDir, incrementalDexesDir);
+ dexDir = incrementalDexesDir;
+ }
+
+ // Ignore "oat" directory.
+ // Also ignore files that sometimes show up (e.g. .jar.arm.flock).
+ File[] dexFilesArr = dexDir.listFiles(f -> f.getName().endsWith(".jar"));
+ if (dexFilesArr == null) {
+ throw new FileNotFoundException("Dex dir does not exist: " + dexDir);
+ }
+
+ if (mIsPrimaryProcess) {
+ ensureAppFilesSubDirExists();
+ // Allows isolated processes to access the same files.
+ incrementalDexesDir.mkdir();
+ incrementalDexesDir.setReadable(true, false);
+ incrementalDexesDir.setExecutable(true, false);
+ // Create a directory for isolated processes to create directories in.
+ isolatedDexesDir.mkdir();
+ isolatedDexesDir.setWritable(true, false);
+ isolatedDexesDir.setExecutable(true, false);
+
+ optimizedDir = incrementalDexesDir;
+ } else {
+ // There is a UID check of the directory in dalvik.system.DexFile():
+ // https://android.googlesource.com/platform/libcore/+/45e0260/dalvik/src/main/java/dalvik/system/DexFile.java#101
+ // Rather than have each isolated process run DexOpt though, we use
+ // symlinks within the directory to point at the browser process'
+ // optimized dex files.
+ optimizedDir = new File(isolatedDexesDir, "isolated-" + mProcessUid);
+ optimizedDir.mkdir();
+ // Always wipe it out and re-create for simplicity.
+ Log.i(TAG, "Creating dex file symlinks for isolated process");
+ for (File f : optimizedDir.listFiles()) {
+ f.delete();
+ }
+ for (File f : incrementalDexesDir.listFiles()) {
+ String to = "../../" + incrementalDexesDir.getName() + "/" + f.getName();
+ File from = new File(optimizedDir, f.getName());
+ createSymlink(to, from);
+ }
+ }
+
+ Log.i(TAG, "Code cache dir: " + optimizedDir);
+ Log.i(TAG, "Loading " + dexFilesArr.length + " dex files");
+
+ Object dexPathList = Reflect.getField(mClassLoader, "pathList");
+ Object[] dexElements = (Object[]) Reflect.getField(dexPathList, "dexElements");
+ dexElements = addDexElements(dexFilesArr, optimizedDir, dexElements);
+ Reflect.setField(dexPathList, "dexElements", dexElements);
+
+ DexFile[] ret = new DexFile[dexElements.length];
+ for (int i = 0; i < ret.length; ++i) {
+ ret[i] = (DexFile) Reflect.getField(dexElements[i], "dexFile");
+ }
+ return ret;
+ }
+
+ /**
+ * Sets up all libraries within |libDir| to be loadable by System.loadLibrary().
+ */
+ @SuppressLint("SetWorldReadable")
+ void importNativeLibs(File libDir) throws ReflectiveOperationException, IOException {
+ Log.i(TAG, "Importing native libraries from: " + libDir);
+ if (!libDir.exists()) {
+ Log.i(TAG, "No native libs exist.");
+ return;
+ }
+ // The library copying is not necessary on older devices, but we do it anyways to
+ // simplify things (it's fast compared to dexing).
+ // https://code.google.com/p/android/issues/detail?id=79480
+ File localLibsDir = new File(mAppFilesSubDir, "lib");
+ safeCopyAllFiles(libDir, localLibsDir);
+ addNativeLibrarySearchPath(localLibsDir);
+ }
+
+ @SuppressLint("SetWorldReadable")
+ private void safeCopyAllFiles(File srcDir, File dstDir) throws IOException {
+ // The library copying is not necessary on older devices, but we do it anyways to
+ // simplify things (it's fast compared to dexing).
+ // https://code.google.com/p/android/issues/detail?id=79480
+ File lockFile = new File(mAppFilesSubDir, dstDir.getName() + ".lock");
+ if (mIsPrimaryProcess) {
+ ensureAppFilesSubDirExists();
+ LockFile lock = LockFile.acquireRuntimeLock(lockFile);
+ if (lock == null) {
+ LockFile.waitForRuntimeLock(lockFile, 10 * 1000);
+ } else {
+ try {
+ dstDir.mkdir();
+ dstDir.setReadable(true, false);
+ dstDir.setExecutable(true, false);
+ copyChangedFiles(srcDir, dstDir);
+ } finally {
+ lock.release();
+ }
+ }
+ } else {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ // TODO: Work around this issue by using APK splits to install each dex / lib.
+ throw new RuntimeException("Incremental install does not work on Android M+ "
+ + "with isolated processes. Use the gn arg:\n"
+ + " disable_incremental_isolated_processes=true\n"
+ + "and try again.");
+ }
+ // Other processes: Waits for primary process to finish copying.
+ LockFile.waitForRuntimeLock(lockFile, 10 * 1000);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void addNativeLibrarySearchPath(File nativeLibDir) throws ReflectiveOperationException {
+ Object dexPathList = Reflect.getField(mClassLoader, "pathList");
+ Object currentDirs = Reflect.getField(dexPathList, "nativeLibraryDirectories");
+ File[] newDirs = new File[] { nativeLibDir };
+ // Switched from an array to an ArrayList in Lollipop.
+ if (currentDirs instanceof List) {
+ List<File> dirsAsList = (List<File>) currentDirs;
+ dirsAsList.add(0, nativeLibDir);
+ } else {
+ File[] dirsAsArray = (File[]) currentDirs;
+ Reflect.setField(dexPathList, "nativeLibraryDirectories",
+ Reflect.concatArrays(newDirs, newDirs, dirsAsArray));
+ }
+
+ Object[] nativeLibraryPathElements;
+ try {
+ nativeLibraryPathElements =
+ (Object[]) Reflect.getField(dexPathList, "nativeLibraryPathElements");
+ } catch (NoSuchFieldException e) {
+ // This field doesn't exist pre-M.
+ return;
+ }
+ Object[] additionalElements = makeNativePathElements(newDirs);
+ Reflect.setField(dexPathList, "nativeLibraryPathElements",
+ Reflect.concatArrays(nativeLibraryPathElements, additionalElements,
+ nativeLibraryPathElements));
+ }
+
+ private static void copyChangedFiles(File srcDir, File dstDir) throws IOException {
+ // No need to delete stale libs since libraries are loaded explicitly.
+ int numNotChanged = 0;
+ for (File f : srcDir.listFiles()) {
+ // Note: Tried using hardlinks, but resulted in EACCES exceptions.
+ File dest = new File(dstDir, f.getName());
+ if (!copyIfModified(f, dest)) {
+ numNotChanged++;
+ }
+ }
+ if (numNotChanged > 0) {
+ Log.i(TAG, numNotChanged + " libs already up to date.");
+ }
+ }
+
+ @SuppressLint("SetWorldReadable")
+ private static boolean copyIfModified(File src, File dest) throws IOException {
+ long lastModified = src.lastModified();
+ if (dest.exists() && dest.lastModified() == lastModified) {
+ return false;
+ }
+ Log.i(TAG, "Copying " + src + " -> " + dest);
+ FileInputStream istream = new FileInputStream(src);
+ FileOutputStream ostream = new FileOutputStream(dest);
+ ostream.getChannel().transferFrom(istream.getChannel(), 0, istream.getChannel().size());
+ istream.close();
+ ostream.close();
+ dest.setReadable(true, false);
+ dest.setExecutable(true, false);
+ dest.setLastModified(lastModified);
+ return true;
+ }
+
+ private void ensureAppFilesSubDirExists() {
+ mAppFilesSubDir.mkdir();
+ mAppFilesSubDir.setExecutable(true, false);
+ }
+
+ private void createSymlink(String to, File from) throws ReflectiveOperationException {
+ Reflect.invokeMethod(mLibcoreOs, "symlink", to, from.getAbsolutePath());
+ }
+
+ private static Object[] makeNativePathElements(File[] paths)
+ throws ReflectiveOperationException {
+ Object[] entries = new Object[paths.length];
+ if (Build.VERSION.SDK_INT >= 26) {
+ Class<?> entryClazz = Class.forName("dalvik.system.DexPathList$NativeLibraryElement");
+ for (int i = 0; i < paths.length; ++i) {
+ entries[i] = Reflect.newInstance(entryClazz, paths[i]);
+ }
+ } else {
+ Class<?> entryClazz = Class.forName("dalvik.system.DexPathList$Element");
+ for (int i = 0; i < paths.length; ++i) {
+ entries[i] = Reflect.newInstance(entryClazz, paths[i], true, null, null);
+ }
+ }
+ return entries;
+ }
+
+ private Object[] addDexElements(File[] files, File optimizedDirectory, Object[] curDexElements)
+ throws ReflectiveOperationException {
+ Class<?> entryClazz = Class.forName("dalvik.system.DexPathList$Element");
+ Class<?> clazz = Class.forName("dalvik.system.DexPathList");
+ Object[] ret =
+ Reflect.concatArrays(curDexElements, curDexElements, new Object[files.length]);
+ File emptyDir = new File("");
+ for (int i = 0; i < files.length; ++i) {
+ File file = files[i];
+ Object dexFile;
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
+ // loadDexFile requires that ret contain all previously added elements.
+ dexFile = Reflect.invokeMethod(clazz, "loadDexFile", file, optimizedDirectory,
+ mClassLoader, ret);
+ } else {
+ dexFile = Reflect.invokeMethod(clazz, "loadDexFile", file, optimizedDirectory);
+ }
+ Object dexElement;
+ if (Build.VERSION.SDK_INT >= 26) {
+ dexElement = Reflect.newInstance(entryClazz, dexFile, file);
+ } else {
+ dexElement = Reflect.newInstance(entryClazz, emptyDir, false, file, dexFile);
+ }
+ ret[curDexElements.length + i] = dexElement;
+ }
+ return ret;
+ }
+}
diff --git a/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java
new file mode 100644
index 0000000000..6e48f3b1ea
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java
@@ -0,0 +1,129 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.util.Log;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.channels.FileLock;
+import java.util.concurrent.Callable;
+
+/**
+ * Helpers for dealing with .lock files used during install / first run.
+ */
+final class LockFile {
+ private static final String TAG = "cr.incrementalinstall";
+
+ private final File mFile;
+ private final FileOutputStream mOutputStream;
+ private final FileLock mFileLock;
+
+ private LockFile(File file, FileOutputStream outputStream, FileLock fileLock) {
+ mFile = file;
+ mOutputStream = outputStream;
+ mFileLock = fileLock;
+ }
+
+ /**
+ * Clears the lock file by writing to it (making it non-zero in length);
+ */
+ static void clearInstallerLock(File lockFile) throws IOException {
+ Log.i(TAG, "Clearing " + lockFile);
+ // On Android M+, we can't delete files in /data/local/tmp, so we write to it instead.
+ FileOutputStream os = new FileOutputStream(lockFile);
+ os.write(1);
+ os.close();
+ }
+
+ /**
+ * Waits for the given file to be non-zero in length.
+ */
+ static void waitForInstallerLock(final File file, long timeoutMs) {
+ pollingWait(new Callable<Boolean>() {
+ @Override public Boolean call() {
+ return !installerLockExists(file);
+ }
+ }, file, timeoutMs);
+ }
+
+ /**
+ * Waits for the given file to be non-zero in length.
+ */
+ private static void pollingWait(Callable<Boolean> func, File file, long timeoutMs) {
+ long pollIntervalMs = 200;
+ for (int i = 0; i < timeoutMs / pollIntervalMs; i++) {
+ try {
+ if (func.call()) {
+ if (i > 0) {
+ Log.i(TAG, "Finished waiting on lock file: " + file);
+ }
+ return;
+ } else if (i == 0) {
+ Log.i(TAG, "Waiting on lock file: " + file);
+ }
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ try {
+ Thread.sleep(pollIntervalMs);
+ } catch (InterruptedException e) {
+ // Should never happen.
+ }
+ }
+ throw new RuntimeException("Timed out waiting for lock file: " + file);
+ }
+
+ /**
+ * Returns whether the given lock file is missing or is in the locked state.
+ */
+ static boolean installerLockExists(File file) {
+ return !file.exists() || file.length() == 0;
+ }
+
+ /**
+ * Attempts to acquire a lock for the given file.
+ * @return Returns the FileLock if it was acquired, or null otherwise.
+ */
+ static LockFile acquireRuntimeLock(File file) {
+ try {
+ FileOutputStream outputStream = new FileOutputStream(file);
+ FileLock lock = outputStream.getChannel().tryLock();
+ if (lock != null) {
+ Log.i(TAG, "Created lock file: " + file);
+ return new LockFile(file, outputStream, lock);
+ }
+ outputStream.close();
+ } catch (IOException e) {
+ // Do nothing. We didn't get the lock.
+ Log.w(TAG, "Exception trying to acquire lock " + file, e);
+ }
+ return null;
+ }
+
+ /**
+ * Waits for the given file to not exist.
+ */
+ static void waitForRuntimeLock(final File file, long timeoutMs) {
+ pollingWait(new Callable<Boolean>() {
+ @Override public Boolean call() {
+ return !file.exists();
+ }
+ }, file, timeoutMs);
+ }
+
+ /**
+ * Releases and deletes the lock file.
+ */
+ void release() throws IOException {
+ Log.i(TAG, "Deleting lock file: " + mFile);
+ mFileLock.release();
+ mOutputStream.close();
+ if (!mFile.delete()) {
+ throw new IOException("Failed to delete lock file: " + mFile);
+ }
+ }
+}
diff --git a/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java
new file mode 100644
index 0000000000..c64dc1e8a3
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java
@@ -0,0 +1,142 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import java.lang.reflect.Array;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.util.Arrays;
+
+/**
+ * Reflection helper methods.
+ */
+final class Reflect {
+ /**
+ * Sets the value of an object's field (even if it's not visible).
+ *
+ * @param instance The object containing the field to set.
+ * @param name The name of the field to set.
+ * @param value The new value for the field.
+ */
+ static void setField(Object instance, String name, Object value)
+ throws ReflectiveOperationException {
+ Field field = findField(instance, name);
+ field.setAccessible(true);
+ field.set(instance, value);
+ }
+
+ /**
+ * Retrieves the value of an object's field (even if it's not visible).
+ *
+ * @param instance The object containing the field to set.
+ * @param name The name of the field to set.
+ * @return The field's value. Primitive values are returned as their boxed
+ * type.
+ */
+ static Object getField(Object instance, String name) throws ReflectiveOperationException {
+ Field field = findField(instance, name);
+ field.setAccessible(true);
+ return field.get(instance);
+ }
+
+ /**
+ * Concatenates two arrays into a new array. The arrays must be of the same
+ * type.
+ */
+ static Object[] concatArrays(Object[] arrType, Object[] left, Object[] right) {
+ Object[] result = (Object[]) Array.newInstance(
+ arrType.getClass().getComponentType(), left.length + right.length);
+ System.arraycopy(left, 0, result, 0, left.length);
+ System.arraycopy(right, 0, result, left.length, right.length);
+ return result;
+ }
+
+ /**
+ * Invokes a method with zero or more parameters. For static methods, use the Class as the
+ * instance.
+ */
+ static Object invokeMethod(Object instance, String name, Object... params)
+ throws ReflectiveOperationException {
+ boolean isStatic = instance instanceof Class;
+ Class<?> clazz = isStatic ? (Class<?>) instance : instance.getClass();
+ Method method = findMethod(clazz, name, params);
+ method.setAccessible(true);
+ return method.invoke(instance, params);
+ }
+
+ /**
+ * Calls a constructor with zero or more parameters.
+ */
+ static Object newInstance(Class<?> clazz, Object... params)
+ throws ReflectiveOperationException {
+ Constructor<?> constructor = findConstructor(clazz, params);
+ constructor.setAccessible(true);
+ return constructor.newInstance(params);
+ }
+
+ private static Field findField(Object instance, String name) throws NoSuchFieldException {
+ boolean isStatic = instance instanceof Class;
+ Class<?> clazz = isStatic ? (Class<?>) instance : instance.getClass();
+ for (; clazz != null; clazz = clazz.getSuperclass()) {
+ try {
+ return clazz.getDeclaredField(name);
+ } catch (NoSuchFieldException e) {
+ // Need to look in the super class.
+ }
+ }
+ throw new NoSuchFieldException("Field " + name + " not found in " + instance.getClass());
+ }
+
+ private static Method findMethod(Class<?> clazz, String name, Object... params)
+ throws NoSuchMethodException {
+ for (; clazz != null; clazz = clazz.getSuperclass()) {
+ for (Method method : clazz.getDeclaredMethods()) {
+ if (method.getName().equals(name)
+ && areParametersCompatible(method.getParameterTypes(), params)) {
+ return method;
+ }
+ }
+ }
+ throw new NoSuchMethodException("Method " + name + " with parameters "
+ + Arrays.asList(params) + " not found in " + clazz);
+ }
+
+ private static Constructor<?> findConstructor(Class<?> clazz, Object... params)
+ throws NoSuchMethodException {
+ for (Constructor<?> constructor : clazz.getDeclaredConstructors()) {
+ if (areParametersCompatible(constructor.getParameterTypes(), params)) {
+ return constructor;
+ }
+ }
+ throw new NoSuchMethodException("Constructor with parameters " + Arrays.asList(params)
+ + " not found in " + clazz);
+ }
+
+ private static boolean areParametersCompatible(Class<?>[] paramTypes, Object... params) {
+ if (params.length != paramTypes.length) {
+ return false;
+ }
+ for (int i = 0; i < params.length; i++) {
+ if (!isAssignableFrom(paramTypes[i], params[i])) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private static boolean isAssignableFrom(Class<?> left, Object right) {
+ if (right == null) {
+ return !left.isPrimitive();
+ }
+ Class<?> rightClazz = right.getClass();
+ if (left.isPrimitive()) {
+ // TODO(agrieve): Fill in the rest as needed.
+ return left == boolean.class && rightClazz == Boolean.class
+ || left == int.class && rightClazz == Integer.class;
+ }
+ return left.isAssignableFrom(rightClazz);
+ }
+}
diff --git a/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java
new file mode 100644
index 0000000000..3e0df0521e
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java
@@ -0,0 +1,12 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.app.Instrumentation;
+
+/**
+ * Exists to support an app having multiple instrumentations.
+ */
+public final class SecondInstrumentation extends Instrumentation {}
diff --git a/deps/v8/build/android/incremental_install/write_installer_json.py b/deps/v8/build/android/incremental_install/write_installer_json.py
new file mode 100755
index 0000000000..75bd6d1aab
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/write_installer_json.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes a .json file with the per-apk details for an incremental install."""
+
+import argparse
+import json
+import os
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'gyp'))
+
+from util import build_utils
+
+
+def _ParseArgs(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--output-path',
+ help='Output path for .json file.',
+ required=True)
+ parser.add_argument('--apk-path',
+ help='Path to .apk relative to output directory.',
+ required=True)
+ parser.add_argument('--split',
+ action='append',
+ dest='split_globs',
+ default=[],
+ help='A glob matching the apk splits. '
+ 'Can be specified multiple times.')
+ parser.add_argument('--native-libs-list',
+ action='append',
+ default=[],
+ help='GN-list of paths to native libraries relative to '
+ 'output directory. Can be repeated.')
+ parser.add_argument('--dex-file',
+ action='append',
+ default=[],
+ dest='dex_files',
+ help='.dex file to include relative to output directory. '
+ 'Can be repeated')
+ parser.add_argument('--dex-file-list',
+ help='GN-list of dex paths relative to output directory.')
+ parser.add_argument('--show-proguard-warning',
+ action='store_true',
+ default=False,
+ help='Print a warning about proguard being disabled')
+ parser.add_argument('--dont-even-try',
+ help='Prints the given message and exits.')
+
+ options = parser.parse_args(args)
+ options.dex_files += build_utils.ParseGnList(options.dex_file_list)
+ all_libs = []
+ for gn_list in options.native_libs_list:
+ all_libs.extend(build_utils.ParseGnList(gn_list))
+ options.native_libs_list = all_libs
+ return options
+
+
+def main(args):
+ options = _ParseArgs(args)
+
+ data = {
+ 'apk_path': options.apk_path,
+ 'native_libs': options.native_libs_list,
+ 'dex_files': options.dex_files,
+ 'dont_even_try': options.dont_even_try,
+ 'show_proguard_warning': options.show_proguard_warning,
+ 'split_globs': options.split_globs,
+ }
+
+ with build_utils.AtomicOutput(options.output_path) as f:
+ json.dump(data, f, indent=2, sort_keys=True)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/incremental_install/write_installer_json.pydeps b/deps/v8/build/android/incremental_install/write_installer_json.pydeps
new file mode 100644
index 0000000000..851e6c5bd1
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/write_installer_json.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/incremental_install --output build/android/incremental_install/write_installer_json.pydeps build/android/incremental_install/write_installer_json.py
+../../gn_helpers.py
+../gyp/util/__init__.py
+../gyp/util/build_utils.py
+../gyp/util/md5_check.py
+write_installer_json.py
diff --git a/deps/v8/build/android/lighttpd_server.py b/deps/v8/build/android/lighttpd_server.py
new file mode 100755
index 0000000000..c77d740d66
--- /dev/null
+++ b/deps/v8/build/android/lighttpd_server.py
@@ -0,0 +1,258 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides a convenient wrapper for spawning a test lighttpd instance.
+
+Usage:
+ lighttpd_server PATH_TO_DOC_ROOT
+"""
+
+import codecs
+import contextlib
+import httplib
+import os
+import random
+import shutil
+import socket
+import subprocess
+import sys
+import tempfile
+import time
+
+from pylib import constants
+from pylib import pexpect
+
+class LighttpdServer(object):
+ """Wraps lighttpd server, providing robust startup.
+
+ Args:
+ document_root: Path to root of this server's hosted files.
+ port: TCP port on the _host_ machine that the server will listen on. If
+ omitted it will attempt to use 9000, or if unavailable it will find
+ a free port from 8001 - 8999.
+ lighttpd_path, lighttpd_module_path: Optional paths to lighttpd binaries.
+ base_config_path: If supplied this file will replace the built-in default
+ lighttpd config file.
+ extra_config_contents: If specified, this string will be appended to the
+ base config (default built-in, or from base_config_path).
+ config_path, error_log, access_log: Optional paths where the class should
+ place temporary files for this session.
+ """
+
+ def __init__(self, document_root, port=None,
+ lighttpd_path=None, lighttpd_module_path=None,
+ base_config_path=None, extra_config_contents=None,
+ config_path=None, error_log=None, access_log=None):
+ self.temp_dir = tempfile.mkdtemp(prefix='lighttpd_for_chrome_android')
+ self.document_root = os.path.abspath(document_root)
+ self.fixed_port = port
+ self.port = port or constants.LIGHTTPD_DEFAULT_PORT
+ self.server_tag = 'LightTPD ' + str(random.randint(111111, 999999))
+ self.lighttpd_path = lighttpd_path or '/usr/sbin/lighttpd'
+ self.lighttpd_module_path = lighttpd_module_path or '/usr/lib/lighttpd'
+ self.base_config_path = base_config_path
+ self.extra_config_contents = extra_config_contents
+ self.config_path = config_path or self._Mktmp('config')
+ self.error_log = error_log or self._Mktmp('error_log')
+ self.access_log = access_log or self._Mktmp('access_log')
+ self.pid_file = self._Mktmp('pid_file')
+ self.process = None
+
+ def _Mktmp(self, name):
+ return os.path.join(self.temp_dir, name)
+
+ @staticmethod
+ def _GetRandomPort():
+ # The ports of test server is arranged in constants.py.
+ return random.randint(constants.LIGHTTPD_RANDOM_PORT_FIRST,
+ constants.LIGHTTPD_RANDOM_PORT_LAST)
+
+ def StartupHttpServer(self):
+ """Starts up a http server with specified document root and port."""
+ # If we want a specific port, make sure no one else is listening on it.
+ if self.fixed_port:
+ self._KillProcessListeningOnPort(self.fixed_port)
+ while True:
+ if self.base_config_path:
+ # Read the config
+ with codecs.open(self.base_config_path, 'r', 'utf-8') as f:
+ config_contents = f.read()
+ else:
+ config_contents = self._GetDefaultBaseConfig()
+ if self.extra_config_contents:
+ config_contents += self.extra_config_contents
+ # Write out the config, filling in placeholders from the members of |self|
+ with codecs.open(self.config_path, 'w', 'utf-8') as f:
+ f.write(config_contents % self.__dict__)
+ if (not os.path.exists(self.lighttpd_path) or
+ not os.access(self.lighttpd_path, os.X_OK)):
+ raise EnvironmentError(
+ 'Could not find lighttpd at %s.\n'
+ 'It may need to be installed (e.g. sudo apt-get install lighttpd)'
+ % self.lighttpd_path)
+ # pylint: disable=no-member
+ self.process = pexpect.spawn(self.lighttpd_path,
+ ['-D', '-f', self.config_path,
+ '-m', self.lighttpd_module_path],
+ cwd=self.temp_dir)
+ client_error, server_error = self._TestServerConnection()
+ if not client_error:
+ assert int(open(self.pid_file, 'r').read()) == self.process.pid
+ break
+ self.process.close()
+
+ if self.fixed_port or 'in use' not in server_error:
+ print 'Client error:', client_error
+ print 'Server error:', server_error
+ return False
+ self.port = self._GetRandomPort()
+ return True
+
+ def ShutdownHttpServer(self):
+ """Shuts down our lighttpd processes."""
+ if self.process:
+ self.process.terminate()
+ shutil.rmtree(self.temp_dir, ignore_errors=True)
+
+ def _TestServerConnection(self):
+ # Wait for server to start
+ server_msg = ''
+ for timeout in xrange(1, 5):
+ client_error = None
+ try:
+ with contextlib.closing(httplib.HTTPConnection(
+ '127.0.0.1', self.port, timeout=timeout)) as http:
+ http.set_debuglevel(timeout > 3)
+ http.request('HEAD', '/')
+ r = http.getresponse()
+ r.read()
+ if (r.status == 200 and r.reason == 'OK' and
+ r.getheader('Server') == self.server_tag):
+ return (None, server_msg)
+ client_error = ('Bad response: %s %s version %s\n ' %
+ (r.status, r.reason, r.version) +
+ '\n '.join([': '.join(h) for h in r.getheaders()]))
+ except (httplib.HTTPException, socket.error) as client_error:
+ pass # Probably too quick connecting: try again
+ # Check for server startup error messages
+ # pylint: disable=no-member
+ ix = self.process.expect([pexpect.TIMEOUT, pexpect.EOF, '.+'],
+ timeout=timeout)
+ if ix == 2: # stdout spew from the server
+ server_msg += self.process.match.group(0) # pylint: disable=no-member
+ elif ix == 1: # EOF -- server has quit so giveup.
+ client_error = client_error or 'Server exited'
+ break
+ return (client_error or 'Timeout', server_msg)
+
+ @staticmethod
+ def _KillProcessListeningOnPort(port):
+ """Checks if there is a process listening on port number |port| and
+ terminates it if found.
+
+ Args:
+ port: Port number to check.
+ """
+ if subprocess.call(['fuser', '-kv', '%d/tcp' % port]) == 0:
+ # Give the process some time to terminate and check that it is gone.
+ time.sleep(2)
+ assert subprocess.call(['fuser', '-v', '%d/tcp' % port]) != 0, \
+ 'Unable to kill process listening on port %d.' % port
+
+ @staticmethod
+ def _GetDefaultBaseConfig():
+ return """server.tag = "%(server_tag)s"
+server.modules = ( "mod_access",
+ "mod_accesslog",
+ "mod_alias",
+ "mod_cgi",
+ "mod_rewrite" )
+
+# default document root required
+#server.document-root = "."
+
+# files to check for if .../ is requested
+index-file.names = ( "index.php", "index.pl", "index.cgi",
+ "index.html", "index.htm", "default.htm" )
+# mimetype mapping
+mimetype.assign = (
+ ".gif" => "image/gif",
+ ".jpg" => "image/jpeg",
+ ".jpeg" => "image/jpeg",
+ ".png" => "image/png",
+ ".svg" => "image/svg+xml",
+ ".css" => "text/css",
+ ".html" => "text/html",
+ ".htm" => "text/html",
+ ".xhtml" => "application/xhtml+xml",
+ ".xhtmlmp" => "application/vnd.wap.xhtml+xml",
+ ".js" => "application/x-javascript",
+ ".log" => "text/plain",
+ ".conf" => "text/plain",
+ ".text" => "text/plain",
+ ".txt" => "text/plain",
+ ".dtd" => "text/xml",
+ ".xml" => "text/xml",
+ ".manifest" => "text/cache-manifest",
+ )
+
+# Use the "Content-Type" extended attribute to obtain mime type if possible
+mimetype.use-xattr = "enable"
+
+##
+# which extensions should not be handle via static-file transfer
+#
+# .php, .pl, .fcgi are most often handled by mod_fastcgi or mod_cgi
+static-file.exclude-extensions = ( ".php", ".pl", ".cgi" )
+
+server.bind = "127.0.0.1"
+server.port = %(port)s
+
+## virtual directory listings
+dir-listing.activate = "enable"
+#dir-listing.encoding = "iso-8859-2"
+#dir-listing.external-css = "style/oldstyle.css"
+
+## enable debugging
+#debug.log-request-header = "enable"
+#debug.log-response-header = "enable"
+#debug.log-request-handling = "enable"
+#debug.log-file-not-found = "enable"
+
+#### SSL engine
+#ssl.engine = "enable"
+#ssl.pemfile = "server.pem"
+
+# Autogenerated test-specific config follows.
+
+cgi.assign = ( ".cgi" => "/usr/bin/env",
+ ".pl" => "/usr/bin/env",
+ ".asis" => "/bin/cat",
+ ".php" => "/usr/bin/php-cgi" )
+
+server.errorlog = "%(error_log)s"
+accesslog.filename = "%(access_log)s"
+server.upload-dirs = ( "/tmp" )
+server.pid-file = "%(pid_file)s"
+server.document-root = "%(document_root)s"
+
+"""
+
+
+def main(argv):
+ server = LighttpdServer(*argv[1:])
+ try:
+ if server.StartupHttpServer():
+ raw_input('Server running at http://127.0.0.1:%s -'
+ ' press Enter to exit it.' % server.port)
+ else:
+ print 'Server exit code:', server.process.exitstatus
+ finally:
+ server.ShutdownHttpServer()
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/deps/v8/build/android/lint/OWNERS b/deps/v8/build/android/lint/OWNERS
new file mode 100644
index 0000000000..f47bc2f63a
--- /dev/null
+++ b/deps/v8/build/android/lint/OWNERS
@@ -0,0 +1,2 @@
+estevenson@chromium.org
+wnwen@chromium.org
diff --git a/deps/v8/build/android/lint/suppress.py b/deps/v8/build/android/lint/suppress.py
new file mode 100755
index 0000000000..a3719c18fe
--- /dev/null
+++ b/deps/v8/build/android/lint/suppress.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Add all generated lint_result.xml files to suppressions.xml"""
+
+# pylint: disable=no-member
+
+
+import argparse
+import collections
+import os
+import re
+import sys
+from xml.dom import minidom
+
+_BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..')
+sys.path.append(_BUILD_ANDROID_DIR)
+
+from pylib.constants import host_paths
+
+_TMP_DIR_RE = re.compile(r'^/tmp/.*/(SRC_ROOT[0-9]+|PRODUCT_DIR)/')
+_THIS_FILE = os.path.abspath(__file__)
+_DEFAULT_CONFIG_PATH = os.path.join(os.path.dirname(_THIS_FILE),
+ 'suppressions.xml')
+_DOC = (
+ '\nSTOP! It looks like you want to suppress some lint errors:\n'
+ '- Have you tried identifing the offending patch?\n'
+ ' Ask the author for a fix and/or revert the patch.\n'
+ '- It is preferred to add suppressions in the code instead of\n'
+ ' sweeping it under the rug here. See:\n\n'
+ ' http://developer.android.com/tools/debugging/improving-w-lint.html\n'
+ '\n'
+ 'Still reading?\n'
+ '- You can edit this file manually to suppress an issue\n'
+ ' globally if it is not applicable to the project.\n'
+ '- You can also automatically add issues found so for in the\n'
+ ' build process by running:\n\n'
+ ' ' + os.path.relpath(_THIS_FILE, host_paths.DIR_SOURCE_ROOT) + '\n\n'
+ ' which will generate this file (Comments are not preserved).\n'
+ ' Note: PRODUCT_DIR will be substituted at run-time with actual\n'
+ ' directory path (e.g. out/Debug)\n'
+)
+
+
+_Issue = collections.namedtuple('Issue', ['severity', 'paths', 'regexps'])
+
+
+def _ParseConfigFile(config_path):
+ print 'Parsing %s' % config_path
+ issues_dict = {}
+ dom = minidom.parse(config_path)
+ for issue in dom.getElementsByTagName('issue'):
+ issue_id = issue.attributes['id'].value
+ severity = issue.getAttribute('severity')
+
+ path_elements = (
+ p.attributes.get('path')
+ for p in issue.getElementsByTagName('ignore'))
+ paths = set(p.value for p in path_elements if p)
+
+ regexp_elements = (
+ p.attributes.get('regexp')
+ for p in issue.getElementsByTagName('ignore'))
+ regexps = set(r.value for r in regexp_elements if r)
+
+ issues_dict[issue_id] = _Issue(severity, paths, regexps)
+ return issues_dict
+
+
+def _ParseAndMergeResultFile(result_path, issues_dict):
+ print 'Parsing and merging %s' % result_path
+ dom = minidom.parse(result_path)
+ for issue in dom.getElementsByTagName('issue'):
+ issue_id = issue.attributes['id'].value
+ severity = issue.attributes['severity'].value
+ path = issue.getElementsByTagName('location')[0].attributes['file'].value
+ # Strip temporary file path.
+ path = re.sub(_TMP_DIR_RE, '', path)
+ # Escape Java inner class name separator and suppress with regex instead
+ # of path. Doesn't use re.escape() as it is a bit too aggressive and
+ # escapes '_', causing trouble with PRODUCT_DIR.
+ regexp = path.replace('$', r'\$')
+ if issue_id not in issues_dict:
+ issues_dict[issue_id] = _Issue(severity, set(), set())
+ issues_dict[issue_id].regexps.add(regexp)
+
+
+def _WriteConfigFile(config_path, issues_dict):
+ new_dom = minidom.getDOMImplementation().createDocument(None, 'lint', None)
+ top_element = new_dom.documentElement
+ top_element.appendChild(new_dom.createComment(_DOC))
+ for issue_id, issue in sorted(issues_dict.iteritems(), key=lambda i: i[0]):
+ issue_element = new_dom.createElement('issue')
+ issue_element.attributes['id'] = issue_id
+ if issue.severity:
+ issue_element.attributes['severity'] = issue.severity
+ if issue.severity == 'ignore':
+ print 'Warning: [%s] is suppressed globally.' % issue_id
+ else:
+ for path in sorted(issue.paths):
+ ignore_element = new_dom.createElement('ignore')
+ ignore_element.attributes['path'] = path
+ issue_element.appendChild(ignore_element)
+ for regexp in sorted(issue.regexps):
+ ignore_element = new_dom.createElement('ignore')
+ ignore_element.attributes['regexp'] = regexp
+ issue_element.appendChild(ignore_element)
+ top_element.appendChild(issue_element)
+
+ with open(config_path, 'w') as f:
+ f.write(new_dom.toprettyxml(indent=' ', encoding='utf-8'))
+ print 'Updated %s' % config_path
+
+
+def _Suppress(config_path, result_path):
+ issues_dict = _ParseConfigFile(config_path)
+ _ParseAndMergeResultFile(result_path, issues_dict)
+ _WriteConfigFile(config_path, issues_dict)
+
+
+def main():
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument('--config',
+ help='Path to suppression.xml config file',
+ default=_DEFAULT_CONFIG_PATH)
+ parser.add_argument('result_path',
+ help='Lint results xml file',
+ metavar='RESULT_FILE')
+ args = parser.parse_args()
+
+ _Suppress(args.config, args.result_path)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/lint/suppressions.xml b/deps/v8/build/android/lint/suppressions.xml
new file mode 100644
index 0000000000..ed0f8c3211
--- /dev/null
+++ b/deps/v8/build/android/lint/suppressions.xml
@@ -0,0 +1,404 @@
+<?xml version="1.0" encoding="utf-8"?>
+<lint>
+ <!--
+STOP! It looks like you want to suppress some lint errors:
+- Have you tried identifing the offending patch?
+ Ask the author for a fix and/or revert the patch.
+- It is preferred to add suppressions in the code instead of
+ sweeping it under the rug here. See:
+
+ http://developer.android.com/tools/debugging/improving-w-lint.html
+
+Still reading?
+- You can edit this file manually to suppress an issue
+ globally if it is not applicable to the project.
+- You can also automatically add issues found so for in the
+ build process by running:
+
+ build/android/lint/suppress.py
+
+ which will generate this file (Comments are not preserved).
+ Note: PRODUCT_DIR will be substituted at run-time with actual
+ directory path (e.g. out/Debug)
+-->
+ <!-- AllowBackup defaults to true, and causes a lint warning if not explicitly set. -->
+ <issue id="AllowBackup">
+ <ignore path="AndroidManifest.xml"/>
+ </issue>
+ <!-- TODO(crbug.com/804427): Remove this suppression or add rationale. -->
+ <issue id="AppCompatResource" severity="ignore"/>
+ <!-- We use asserts in Chromium. See https://chromium.googlesource.com/chromium/src/+/master/styleguide/java/java.md#Asserts -->
+ <issue id="Assert" severity="ignore"/>
+ <issue id="AuthLeak" severity="Error">
+ <ignore regexp="chrome/android/javatests"/>
+ </issue>
+ <issue id="BadHostnameVerifier" severity="Error">
+ </issue>
+ <issue id="ButtonOrder" severity="Error">
+ <ignore regexp="chrome/android/java/res/layout/homepage_editor.xml"/>
+ </issue>
+ <issue id="ButtonStyle" severity="Error">
+ <ignore regexp="remoting/android/host/res/layout/main.xml"/>
+ </issue>
+ <!-- Found in generated android_chrome_strings.xml. -->
+ <issue id="ByteOrderMark" severity="Error">
+ <ignore regexp="values-pt-rBR/android_chrome_strings.xml"/>
+ </issue>
+ <issue id="ClickableViewAccessibility" severity="ignore"/>
+ <issue id="CommitPrefEdits">
+ <ignore regexp="third_party/cacheinvalidation/src/java/com/google/ipc/invalidation/ticl/android2/channel/AndroidChannelPreferences.java"/>
+ </issue>
+ <issue id="ContentDescription" severity="Error">
+ <ignore regexp="chromecast/internal"/>
+ </issue>
+ <issue id="DefaultLocale">
+ <ignore regexp="clank"/>
+ <ignore regexp="com/android/tv"/>
+ <ignore regexp="org/chromium/chrome/browser/payments/PaymentRequestMetricsTest.class"/>
+ <ignore regexp="third_party/cacheinvalidation/src/java/com/google/ipc/invalidation/external/client/contrib/AndroidListenerState.java"/>
+ </issue>
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <issue id="Deprecated" severity="Error">
+ <ignore regexp="android:singleLine"/>
+ <ignore regexp="AndroidManifest.xml"/>
+ <ignore regexp="/test/"/>
+ </issue>
+ <issue id="DrawAllocation">
+ <ignore regexp="content/public/android/java/src/org/chromium/content/browser/ContentViewRenderView.java"/>
+ <ignore regexp="content/public/android/java/src/org/chromium/content/browser/PopupZoomer.java"/>
+ </issue>
+ <!-- TODO(crbug.com/804432): Remove this and fix the offending xml files. -->
+ <issue id="EllipsizeMaxLines" severity="ignore"/>
+ <issue id="ExifInterface">
+ <!-- TODO(crbug.com/804438): Cannot update until android.media.ExifInterface supports file descriptors -->
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/photo_picker/BitmapUtils.java"/>
+ </issue>
+ <issue id="ExportedContentProvider">
+ <ignore path="AndroidManifest.xml"/>
+ </issue>
+ <issue id="ExportedService" severity="Error">
+ <ignore regexp="AndroidManifest.xml"/>
+ </issue>
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <issue id="GoogleAppIndexingUrlError" severity="Error">
+ <ignore regexp="AndroidManifest.xml"/>
+ </issue>
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <issue id="GoogleAppIndexingWarning" severity="Error">
+ <ignore regexp="AndroidManifest.xml"/>
+ </issue>
+ <issue id="HandlerLeak">
+ <ignore regexp="android_webview/glue/java/src/com/android/webview/chromium/WebViewContentsClientAdapter.java" />
+ <ignore regexp="chromecast/internal" />
+ <ignore regexp="remoting/android/java/src/org/chromium/chromoting/TapGestureDetector.java" />
+ </issue>
+ <issue id="HardcodedDebugMode" severity="Fatal">
+ <ignore path="AndroidManifest.xml"/>
+ </issue>
+ <issue id="HardcodedText" severity="Error">
+ <ignore regexp="chromecast/internal"/>
+ <ignore regexp="remoting/android/host/res/layout/main.xml"/>
+ </issue>
+ <issue id="IconColors" severity="Error">
+ <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-hdpi/notification_icon.png"/>
+ <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-mdpi/notification_icon.png"/>
+ <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-xhdpi/notification_icon.png"/>
+ <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-xxhdpi/notification_icon.png"/>
+ <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-xxxhdpi/notification_icon.png"/>
+ </issue>
+ <issue id="IconDensities">
+ <!-- This is intentional to save on WebAPKs' size. -->
+ <ignore regexp="chrome/android/webapk/shell_apk/res/drawable-*"/>
+ <!-- crbug.com/457918 is tracking missing assets -->
+ <ignore regexp="chrome/android/java/res/drawable-xxhdpi"/>
+ <ignore regexp="chrome/android/java/res/drawable-xxxhdpi"/>
+ <ignore regexp="chromecast/internal"/>
+ <ignore regexp="content/public/android/java/res/drawable-xxhdpi"/>
+ <ignore regexp="content/public/android/java/res/drawable-xxxhdpi"/>
+ <ignore regexp="ui/android/java/res/drawable-xxhdpi"/>
+ <ignore regexp="ui/android/java/res/drawable-xxxhdpi"/>
+ <!-- This is intentional to reduce APK size. See: http://crrev/c/1352161 -->
+ <ignore regexp="chrome/android/features/autofill_assistant/java/res/drawable-*"/>
+ </issue>
+ <issue id="IconDipSize">
+ <ignore regexp="chromecast/internal"/>
+ <!-- These only need to be 1px for all densities. See: crbug.com/804449 -->
+ <ignore regexp="chrome/android/java/res/.*tab_strip_fade"/>
+ </issue>
+ <issue id="IconDuplicates" severity="Error">
+ <ignore regexp="chromecast/internal"/>
+ </issue>
+ <issue id="IconDuplicatesConfig" severity="Error">
+ <ignore regexp="chromecast/internal"/>
+ </issue>
+ <issue id="IconLauncherFormat" severity="ignore">
+ <!-- TODO(crbug.com/739746): Remove after lint version has been updated. -->
+ <ignore regexp="remoting/android/java/res/mipmap-anydpi-v26/ic_launcher.xml"/>
+ </issue>
+ <issue id="IconLauncherShape" severity="Error">
+ <ignore regexp="chromecast/internal"/>
+ <ignore regexp="chrome/android/webapk/shell_apk/res/mipmap-mdpi/ic_launcher_background.png"/>
+ </issue>
+ <issue id="IconLocation">
+ <ignore regexp="chromecast/internal"/>
+ <!-- This is just for testing -->
+ <ignore regexp="chrome/test/chromedriver/test/webview_shell/java/res/drawable/icon.png"/>
+ <!-- Memconsumer is only for tooling -->
+ <ignore regexp="tools/android/memconsumer/java/res/drawable/"/>
+ <!-- It is OK for content_shell_apk to have missing assets. -->
+ <ignore regexp="content/shell/android/java/res/"/>
+ </issue>
+ <issue id="IconMissingDensityFolder">
+ <!-- see crbug.com/542435 -->
+ <ignore regexp="android_webview/apk/java/res"/>
+ <ignore regexp="android_webview/tools/system_webview_shell/apk/res"/>
+ <ignore regexp="chrome/android/webapk/shell_apk/res"/>
+ <ignore regexp="chromecast/internal"/>
+ <ignore regexp="tools/android/push_apps_to_background/res"/>
+ <ignore regexp="ui/android/java/res"/>
+ <!-- crbug.com/457918 is tracking missing assets -->
+ <ignore regexp="components/embedder_support/android/java/res"/>
+ </issue>
+ <issue id="ImpliedQuantity" severity="Error">
+ <ignore regexp="chrome/android/chrome_strings_grd"/>
+ </issue>
+ <issue id="InconsistentArrays" severity="Error">
+ <ignore regexp="android_webview/locale_paks.resources.zip/values/locale-paks.xml"/>
+ <ignore regexp="chrome/android/chrome_locale_paks.resources.zip/values/locale-paks.xml"/>
+ <ignore regexp="preloaded_fonts.xml"/>
+ </issue>
+ <issue id="InconsistentLayout" severity="ignore"/>
+ <issue id="InefficientWeight" severity="Error">
+ <ignore regexp="android_webview/tools/system_webview_shell/apk/res/layout/activity_webview_browser.xml"/>
+ </issue>
+ <issue id="InflateParams" severity="ignore"/>
+ <issue id="InlinedApi" severity="ignore"/>
+ <issue id="InvalidPackage" severity="Error">
+ <ignore regexp="espresso/espresso_core_java.interface.jar"/>
+ </issue>
+ <issue id="InvalidVectorPath" severity="ignore"/>
+ <issue id="LabelFor" severity="Error">
+ <ignore regexp="android_webview/tools/system_webview_shell/apk/res/layout/activity_webview_browser.xml"/>
+ </issue>
+ <!-- TODO(crbug.com/804453): Remove this after fixing. -->
+ <issue id="KeyboardInaccessibleWidget" severity="ignore"/>
+ <issue id="LintError">
+ <!-- We no longer supply class files to lint. -->
+ <ignore regexp="No `.class` files were found in project"/>
+ </issue>
+ <issue id="LogConditional" severity="ignore"/>
+ <issue id="LongLogTag" severity="ignore"/>
+ <issue id="MissingApplicationIcon" severity="ignore"/>
+ <issue id="MissingDefaultResource">
+ <!-- Only used by ToolbarControlContainer guarded by tablet form-factor. -->
+ <ignore regexp="toolbar_background.9.png"/>
+ <!-- Only used by FirstRunFlowSequencer guarded by tablet form-factor. -->
+ <ignore regexp="window_background.xml"/>
+ </issue>
+ <issue id="MissingPermission" severity="ignore"/>
+ <!-- TODO(yolandyan) remove this once all tests are converted to junit4 -->
+ <issue id="MissingPrefix" severity="ignore"/>
+ <!--
+ TODO(estevenson) remove this once translations are added for
+ IDS_ACCESSIBILITY_TOOLBAR_BTN_TABSWITCHER_TOGGLE (http://crbug.com/635677)
+ -->
+ <issue id="MissingQuantity">
+ <ignore regexp="android_chrome_strings.xml"/>
+ </issue>
+ <issue id="MissingRegistered" severity="ignore"/>
+ <issue id="MissingSuperCall" severity="Error">
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/widget/selection/SelectionToolbar.java"/>
+ </issue>
+ <issue id="MissingTranslation">
+ <!-- http://crbug.com/450548 -->
+ <ignore regexp="chromecast/internal"/>
+ <ignore regexp="restriction_values.xml.*"/>
+ </issue>
+ <issue id="MissingVersion">
+ <ignore path="AndroidManifest.xml"/>
+ </issue>
+ <issue id="NewApi">
+ <!-- Do not add new suppressions without rationale. -->
+ <!-- 2 AutoCloseable has been available since API 15, just hidden. -->
+ <ignore regexp="Class requires API level 19.*java.lang.AutoCloseable"/>
+ <ignore regexp="Call requires API level 19.*java.lang.AutoCloseable#close"/>
+ <!-- 1 We support requireNonNull via desugar. -->
+ <ignore regexp="Call requires API level 19.*`java.util.Objects#requireNonNull`"/>
+ <!-- 2 We support try-with-resources via desugar. -->
+ <ignore regexp="Try-with-resources requires API level 19"/>
+ <ignore regexp="Call requires API level 19.*`java.lang.Throwable#addSuppressed`"/>
+ <!-- 1 We support default methods via desugar. -->
+ <ignore regexp="Default method requires API level 24"/>
+ <!-- 1 We support static interface methods via desugar. -->
+ <ignore regexp="Static interface method requires API level 24"/>
+ <!-- 1 This is for testonly target android_support_chromium_java. -->
+ <ignore regexp="third_party/android_tools/sdk/extras/chromium/support/src/org/chromium/android/support/PackageManagerWrapper.java"/>
+ <!-- 1 This is for testonly target android_support_chromium_java in android_sdk. -->
+ <ignore regexp="third_party/android_sdk/public/extras/chromium/support/src/org/chromium/android/support/PackageManagerWrapper.java"/>
+ <!-- Endnote: Please specify number of suppressions when adding more -->
+ </issue>
+ <!-- This warning just adds a lot of false positives. -->
+ <issue id="ObsoleteSdkInt" severity="ignore"/>
+ <issue id="OldTargetApi">
+ <ignore path="AndroidManifest.xml"/>
+ </issue>
+ <issue id="Overdraw" severity="ignore"/>
+ <issue id="Override">
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <ignore regexp="org/chromium/content/browser/input/ThreadedInputConnection.class"/>
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <ignore regexp="com/android/webview/chromium/ContentSettingsAdapter.class"/>
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <ignore regexp="com/android/webview/chromium/ServiceWorkerControllerAdapter.class"/>
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <ignore regexp="com/android/webview/chromium/ServiceWorkerSettingsAdapter.class"/>
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <ignore regexp="org/chromium/chrome/browser/ChromeActivity.class"/>
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <ignore regexp="org/chromium/chrome/browser/ChromeTabbedActivity.class"/>
+ </issue>
+ <issue id="PackageManagerGetSignatures">
+ <ignore regexp="chrome/android/webapk/libs/client/src/org/chromium/webapk/lib/client/WebApkValidator.java"/>
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/browserservices/OriginVerifier.java"/>
+ </issue>
+ <issue id="PluralsCandidate" severity="Error">
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-en-rGB/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values/android_chrome_strings.xml"/>
+ </issue>
+ <issue id="PrivateApi" severity="ignore"/>
+ <!-- Chrome is a system app. -->
+ <issue id="ProtectedPermissions" severity="ignore"/>
+ <issue id="Recycle" severity="ignore"/>
+ <issue id="Registered" severity="ignore"/>
+ <issue id="ResourceAsColor" severity="ignore"/>
+ <issue id="ResourceType" severity="Error">
+ <ignore regexp="/javatests/"/>
+ </issue>
+ <!-- TODO(crbug.com/831774): Play Services starts complaining about RestrictedApi. Needs investigation -->
+ <issue id="RestrictedApi" severity="ignore"/>
+ <issue id="RtlCompat" severity="ignore"/>
+ <issue id="RtlEnabled" severity="ignore"/>
+ <issue id="RtlSymmetry" severity="ignore"/>
+ <issue id="SetJavaScriptEnabled" severity="ignore"/>
+ <issue id="SignatureOrSystemPermissions" severity="ignore"/>
+ <issue id="SpUsage" severity="Error">
+ <ignore regexp="chromecast/internal"/>
+ </issue>
+ <issue id="StaticFieldLeak">
+ <!-- Nice to fix, but not necessary or performance critical. -->
+ <ignore regexp="This AsyncTask class should be static or leaks might occur"/>
+ </issue>
+ <issue id="StringFormatCount" severity="Error">
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-cs/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-fr/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-pl/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values/android_chrome_strings.xml"/>
+ </issue>
+ <issue id="StringFormatInvalid" severity="Error">
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-cs/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-da/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-et/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-pl/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-pt-rBR/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-sv/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-tl/android_chrome_strings.xml"/>
+ </issue>
+ <!-- We have many C++ enums that we don't care about in java -->
+ <issue id="SwitchIntDef" severity="ignore"/>
+ <issue id="TextFields" severity="Error">
+ <ignore regexp="chromecast/internal"/>
+ </issue>
+ <issue id="TypographyDashes" severity="Error">
+ <ignore regexp="chrome/app/policy/android/values-v21/restriction_values.xml"/>
+ </issue>
+ <!-- Typos check disabled due to lint bug: http://crbug.com/671170 -->
+ <issue id="Typos" severity="ignore" />
+ <issue id="UnusedAttribute" severity="ignore"/>
+ <issue id="UnusedIds" severity="ignore"/>
+ <issue id="UnusedQuantity" severity="Error">
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-cs/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-in/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-ja/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-ko/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-lt/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-ms/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-sk/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-th/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-vi/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-zh-rCN/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-zh-rTW/android_chrome_strings.xml"/>
+ </issue>
+ <!-- Our generated enums are allowed to have the same values. -->
+ <issue id="UniqueConstants" severity="ignore"/>
+ <issue id="UnusedResources">
+ <!-- Do not add new suppressions without rationale. -->
+ <!-- 3 raw resources are accessed by URL in various places -->
+ <ignore regexp="PRODUCT_DIR/gen/remoting/android/.*/res/raw/credits.html"/>
+ <ignore regexp="PRODUCT_DIR/gen/remoting/android/.*/res/raw/credits_css.css"/>
+ <ignore regexp="PRODUCT_DIR/gen/remoting/android/.*/res/raw/credits_js.js"/>
+ <!-- 1 string test only, used in CronetSmokeTestCase dynamically -->
+ <ignore regexp="R.string.TestSupportImplClass"/>
+ <!-- 1 string used by Android's policies system, pulled from app directly -->
+ <ignore regexp="restriction_values.xml"/>
+ <!-- 3 resources test only, used in webview tests dynamically -->
+ <ignore regexp="android_webview/tools/automated_ui_tests/java/res/layout/"/>
+ <ignore regexp="android_webview/test/shell/res/raw/resource_file.html"/>
+ <ignore regexp="android_webview/test/shell/res/raw/resource_icon.png"/>
+ <!-- 2 resources used by android webview glue layer, could be refactored -->
+ <ignore regexp="android_webview/java/res/drawable-hdpi/ic_play_circle_outline_black_48dp.png"/>
+ <ignore regexp="R.string.private_browsing_warning"/>
+ <!-- 2 resource sets used by clank widgets for each channel -->
+ <ignore regexp="The resource `R.string.bookmark_widget_title.*` appears to be unused"/>
+ <ignore regexp="The resource `R.string.search_widget_title.*` appears to be unused"/>
+ <!-- 1 resource used by android tv to generate resources.zip file -->
+ <ignore regexp="chromecast/internal/shell/browser/android/java/res/drawable-hdpi/ic_settings_cast.png"/>
+ <!-- TODO(crbug.com/909915): Remove this after full Lite mode launch. -->
+ <!-- 12 resources used by Data Saver during rebranding to Lite mode -->
+ <ignore regexp="The resource `R.string..*lite_mode` appears to be unused"/>
+ <!-- Module titles may only be used by the Play Store. -->
+ <ignore regexp="The resource `R.string.*_module_title` appears to be unused"/>
+ <!-- Endnote: Please specify number of suppressions when adding more -->
+ </issue>
+ <issue id="UseCompoundDrawables">
+ <!-- Upscaling 24dp to 48dp doesn't work as expected with a TextView compound drawable. -->
+ <ignore regexp="chrome/android/java/res/layout/photo_picker_bitmap_view.xml"/>
+ </issue>
+ <issue id="UselessParent">
+ <ignore regexp="android_webview/tools/system_webview_shell/apk/res/layout/activity_webview_browser.xml"/>
+ <ignore regexp="chrome/android/java/res/layout/data_usage_breakdown.xml"/>
+ <ignore regexp="chromecast/internal"/>
+ <ignore regexp="tools/android/kerberos/SpnegoAuthenticator/res/layout/activity_account_authenticator.xml"/>
+ </issue>
+ <issue id="UsesMinSdkAttributes" severity="Error">
+ <ignore regexp="AndroidManifest.xml"/>
+ </issue>
+ <!-- TODO(crbug.com/807734): Investigate and possible remove this -->
+ <issue id="UseSparseArrays" severity="ignore"/>
+ <issue id="ValidFragment" severity="Error">
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/media/router/BaseMediaRouteDialogManager.java"/>
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/media/router/MediaRouteChooserDialogManager.java"/>
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/media/router/MediaRouteControllerDialogManager.java"/>
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/preferences/privacy/OtherFormsOfHistoryDialogFragment.java"/>
+ <ignore regexp="media/capture/content/android/java/src/org/chromium/media/ScreenCapture.java"/>
+ </issue>
+ <issue id="VectorPath" severity="ignore"/>
+ <issue id="ViewConstructor" severity="ignore"/>
+ <issue id="VisibleForTests" severity="Error">
+ <ignore regexp="/javatests/"/>
+ <ignore regexp="/test/"/>
+ <!-- TODO(crbug.com/757124): Remove all these specific Feedback files after underlying issue is resolved -->
+ <!-- Underlying issue is that Android FeedbackOptions.Builder using @VisibleForTesting without 'otherwise='. -->
+ <ignore regexp="clank/java/src/com/google/android/apps/chrome/feedback/FeedbackUtil.java"/>
+ <ignore regexp="clank/java/src/com/google/android/apps/chrome/feedback/PlayServicesFeedbackReporter.java"/>
+ <ignore regexp="clank/java/src/com/google/android/apps/chrome/help/FeedbackCategoryChooserActivity.java"/>
+ <ignore regexp="clank/java/src/com/google/android/apps/chrome/help/HelpAndFeedbackInternal.java"/>
+ </issue>
+ <issue id="WrongCall" severity="ignore"/>
+ <issue id="WrongConstant">
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/bookmarks/BookmarkItemsAdapter.java"/>
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/instantapps/InstantAppsHandler.java"/>
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/SSLClientCertificateRequest.java"/>
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/widget/prefeditor/EditorDialog.java"/>
+ <ignore regexp="third_party/android_data_chart/java/src/org/chromium/third_party/android/datausagechart/ChartDataUsageView.java"/>
+ </issue>
+</lint>
diff --git a/deps/v8/build/android/list_class_verification_failures.py b/deps/v8/build/android/list_class_verification_failures.py
new file mode 100755
index 0000000000..2206f4bfee
--- /dev/null
+++ b/deps/v8/build/android/list_class_verification_failures.py
@@ -0,0 +1,282 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A helper script to list class verification errors.
+
+This is a wrapper around the device's oatdump executable, parsing desired output
+and accommodating API-level-specific details, such as file paths.
+"""
+
+from __future__ import print_function
+
+import argparse
+import exceptions
+import logging
+import os
+import re
+
+import devil_chromium
+from devil.android import device_errors
+from devil.android import device_temp_file
+from devil.android import device_utils
+from devil.android.ndk import abis
+from devil.android.sdk import version_codes
+from devil.android.tools import script_common
+from devil.utils import logging_common
+from py_utils import tempfile_ext
+
+STATUSES = [
+ 'NotReady',
+ 'RetryVerificationAtRuntime',
+ 'Verified',
+ 'Initialized',
+ 'SuperclassValidated',
+]
+
+
+def DetermineDeviceToUse(devices):
+ """Like DeviceUtils.HealthyDevices(), but only allow a single device.
+
+ Args:
+ devices: A (possibly empty) list of serial numbers, such as from the
+ --device flag.
+ Returns:
+ A single device_utils.DeviceUtils instance.
+ Raises:
+ device_errors.NoDevicesError: Raised when no non-blacklisted devices exist.
+ device_errors.MultipleDevicesError: Raise when multiple devices exist, but
+ |devices| does not distinguish which to use.
+ """
+ if not devices:
+ # If the user did not specify which device, we let HealthyDevices raise
+ # MultipleDevicesError.
+ devices = None
+ usable_devices = device_utils.DeviceUtils.HealthyDevices(device_arg=devices)
+ # If the user specified more than one device, we still only want to support a
+ # single device, so we explicitly raise MultipleDevicesError.
+ if len(usable_devices) > 1:
+ raise device_errors.MultipleDevicesError(usable_devices)
+ return usable_devices[0]
+
+
+class DeviceOSError(Exception):
+ """Raised when a file is missing from the device, or something similar."""
+ pass
+
+
+class UnsupportedDeviceError(Exception):
+ """Raised when the device is not supported by this script."""
+ pass
+
+
+def _GetFormattedArch(device):
+ abi = device.product_cpu_abi
+ # Some architectures don't map 1:1 with the folder names.
+ return {abis.ARM_64: 'arm64', abis.ARM: 'arm'}.get(abi, abi)
+
+
+def PathToDexForPlatformVersion(device, package_name):
+ """Gets the full path to the dex file on the device."""
+ sdk_level = device.build_version_sdk
+ paths_to_apk = device.GetApplicationPaths(package_name)
+ if not paths_to_apk:
+ raise DeviceOSError(
+ 'Could not find data directory for {}. Is it installed?'.format(
+ package_name))
+ if len(paths_to_apk) != 1:
+ raise DeviceOSError(
+ 'Expected exactly one path for {} but found {}'.format(
+ package_name,
+ paths_to_apk))
+ path_to_apk = paths_to_apk[0]
+
+ if version_codes.LOLLIPOP <= sdk_level <= version_codes.LOLLIPOP_MR1:
+ # Of the form "com.example.foo-\d", where \d is some digit (usually 1 or 2)
+ package_with_suffix = os.path.basename(os.path.dirname(path_to_apk))
+ dalvik_prefix = '/data/dalvik-cache/arm'
+ odex_file = '{prefix}/data@app@{package}@base.apk@classes.dex'.format(
+ prefix=dalvik_prefix,
+ package=package_with_suffix)
+ elif sdk_level >= version_codes.MARSHMALLOW:
+ arch = _GetFormattedArch(device)
+ odex_file = '{data_dir}/oat/{arch}/base.odex'.format(
+ data_dir=os.path.dirname(path_to_apk), arch=arch)
+ else:
+ raise UnsupportedDeviceError('Unsupported API level: {}'.format(sdk_level))
+
+ odex_file_exists = device.FileExists(odex_file)
+ if odex_file_exists:
+ return odex_file
+ elif sdk_level >= version_codes.PIE:
+ raise DeviceOSError(
+ 'Unable to find odex file: you must run dex2oat on debuggable apps '
+ 'on >= P after installation.')
+ raise DeviceOSError('Unable to find odex file')
+
+
+def _AdbOatDumpForPackage(device, package_name, out_file):
+ """Runs oatdump on the device."""
+ # Get the path to the odex file.
+ odex_file = PathToDexForPlatformVersion(device, package_name)
+ device.RunShellCommand(['oatdump',
+ '--oat-file=' + odex_file,
+ '--output=' + out_file],
+ shell=True, check_return=True)
+
+
+class JavaClass(object):
+ """This represents a Java Class and its ART Class Verification status."""
+
+ def __init__(self, name, verification_status):
+ self.name = name
+ self.verification_status = verification_status
+
+
+def _ParseMappingFile(proguard_map_file):
+ """Creates a map of obfuscated names to deobfuscated names."""
+ mappings = {}
+ with open(proguard_map_file, 'r') as f:
+ pattern = re.compile(r'^(\S+) -> (\S+):')
+ for line in f:
+ m = pattern.match(line)
+ if m is not None:
+ deobfuscated_name = m.group(1)
+ obfuscated_name = m.group(2)
+ mappings[obfuscated_name] = deobfuscated_name
+ return mappings
+
+
+def _DeobfuscateJavaClassName(dex_code_name, proguard_mappings):
+ return proguard_mappings.get(dex_code_name, dex_code_name)
+
+
+def FormatJavaClassName(dex_code_name, proguard_mappings):
+ obfuscated_name = dex_code_name.replace('/', '.')
+ if proguard_mappings is not None:
+ return _DeobfuscateJavaClassName(obfuscated_name, proguard_mappings)
+ else:
+ return obfuscated_name
+
+
+def ListClassesAndVerificationStatus(oatdump_output, proguard_mappings):
+ """Lists all Java classes in the dex along with verification status."""
+ java_classes = []
+ pattern = re.compile(r'\d+: L([^;]+).*\(type_idx=[^(]+\((\w+)\).*')
+ for line in oatdump_output:
+ m = pattern.match(line)
+ if m is not None:
+ name = FormatJavaClassName(m.group(1), proguard_mappings)
+ # Some platform levels prefix this with "Status" while other levels do
+ # not. Strip this for consistency.
+ verification_status = m.group(2).replace('Status', '')
+ java_classes.append(JavaClass(name, verification_status))
+ return java_classes
+
+
+def _PrintVerificationResults(target_status, java_classes, show_summary):
+ """Prints results for user output."""
+ # Sort to keep output consistent between runs.
+ java_classes.sort(key=lambda c: c.name)
+ d = {}
+ for status in STATUSES:
+ d[status] = 0
+
+ for java_class in java_classes:
+ if java_class.verification_status == target_status:
+ print(java_class.name)
+ if java_class.verification_status not in d:
+ raise exceptions.RuntimeError('Unexpected status: {0}'.format(
+ java_class.verification_status))
+ else:
+ d[java_class.verification_status] += 1
+
+ if show_summary:
+ for status in d:
+ count = d[status]
+ print('Total {status} classes: {num}'.format(
+ status=status, num=count))
+ print('Total number of classes: {num}'.format(
+ num=len(java_classes)))
+
+
+def RealMain(mapping, device_arg, package, status, hide_summary, workdir):
+ if mapping is None:
+ logging.warn('Skipping deobfuscation because no map file was provided.')
+ device = DetermineDeviceToUse(device_arg)
+ device.EnableRoot()
+ with device_temp_file.DeviceTempFile(
+ device.adb) as file_on_device:
+ _AdbOatDumpForPackage(device, package, file_on_device.name)
+ file_on_host = os.path.join(workdir, 'out.dump')
+ device.PullFile(file_on_device.name, file_on_host)
+ proguard_mappings = (_ParseMappingFile(mapping) if mapping else None)
+ with open(file_on_host, 'r') as f:
+ java_classes = ListClassesAndVerificationStatus(f, proguard_mappings)
+ _PrintVerificationResults(status, java_classes, not hide_summary)
+
+
+def main():
+ devil_chromium.Initialize()
+ parser = argparse.ArgumentParser(description="""
+List Java classes in an APK which fail ART class verification.
+""")
+ parser.add_argument(
+ '--package',
+ '-P',
+ type=str,
+ default=None,
+ required=True,
+ help='Specify the full application package name')
+ parser.add_argument(
+ '--mapping',
+ '-m',
+ type=os.path.realpath,
+ default=None,
+ help='Mapping file for the desired APK to deobfuscate class names')
+ parser.add_argument(
+ '--hide-summary',
+ default=False,
+ action='store_true',
+ help='Do not output the total number of classes in each Status.')
+ parser.add_argument(
+ '--status',
+ type=str,
+ default='RetryVerificationAtRuntime',
+ choices=STATUSES,
+ help='Which category of classes to list at the end of the script')
+ parser.add_argument(
+ '--workdir',
+ '-w',
+ type=os.path.realpath,
+ default=None,
+ help=('Work directory for oatdump output (default = temporary '
+ 'directory). If specified, this will not be cleaned up at the end '
+ 'of the script (useful if you want to inspect oatdump output '
+ 'manually)'))
+
+ script_common.AddEnvironmentArguments(parser)
+ script_common.AddDeviceArguments(parser)
+ logging_common.AddLoggingArguments(parser)
+
+ args = parser.parse_args()
+ script_common.InitializeEnvironment(args)
+ logging_common.InitializeLogging(args)
+
+ if args.workdir:
+ if not os.path.isdir(args.workdir):
+ raise RuntimeError('Specified working directory does not exist')
+ RealMain(args.mapping, args.devices, args.package, args.status,
+ args.hide_summary, args.workdir)
+ # Assume the user wants the workdir to persist (useful for debugging).
+ logging.warn('Not cleaning up explicitly-specified workdir: %s',
+ args.workdir)
+ else:
+ with tempfile_ext.NamedTemporaryDirectory() as workdir:
+ RealMain(args.mapping, args.devices, args.package, args.status,
+ args.hide_summary, workdir)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/list_class_verification_failures_test.py b/deps/v8/build/android/list_class_verification_failures_test.py
new file mode 100644
index 0000000000..a3da0fd6d7
--- /dev/null
+++ b/deps/v8/build/android/list_class_verification_failures_test.py
@@ -0,0 +1,233 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import list_class_verification_failures as list_verification
+
+from pylib.constants import host_paths
+
+import devil_chromium # pylint: disable=unused-import
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android.ndk import abis
+from devil.android.sdk import version_codes
+
+with host_paths.SysPath(host_paths.PYMOCK_PATH):
+ import mock # pylint: disable=import-error
+
+
+def _CreateOdexLine(java_class_name, type_idx, verification_status):
+ """Create a rough approximation of a line of oatdump output."""
+ return ('{type_idx}: L{java_class}; (offset=0xac) (type_idx={type_idx}) '
+ '({verification}) '
+ '(OatClassNoneCompiled)'.format(type_idx=type_idx,
+ java_class=java_class_name,
+ verification=verification_status))
+
+
+def _ClassForName(name, classes):
+ return next(c for c in classes if c.name == name)
+
+
+class _DetermineDeviceToUseTest(unittest.TestCase):
+
+ def testDetermineDeviceToUse_emptyListWithOneAttachedDevice(self):
+ fake_attached_devices = ['123']
+ user_specified_devices = []
+ device_utils.DeviceUtils.HealthyDevices = mock.MagicMock(
+ return_value=fake_attached_devices)
+ result = list_verification.DetermineDeviceToUse(user_specified_devices)
+ self.assertEqual(result, fake_attached_devices[0])
+ device_utils.DeviceUtils.HealthyDevices.assert_called_with(device_arg=None)
+
+ def testDetermineDeviceToUse_emptyListWithNoAttachedDevices(self):
+ user_specified_devices = []
+ device_utils.DeviceUtils.HealthyDevices = mock.MagicMock(
+ side_effect=device_errors.NoDevicesError())
+ with self.assertRaises(device_errors.NoDevicesError) as _:
+ list_verification.DetermineDeviceToUse(user_specified_devices)
+ device_utils.DeviceUtils.HealthyDevices.assert_called_with(device_arg=None)
+
+ def testDetermineDeviceToUse_oneElementListWithOneAttachedDevice(self):
+ user_specified_devices = ['123']
+ fake_attached_devices = ['123']
+ device_utils.DeviceUtils.HealthyDevices = mock.MagicMock(
+ return_value=fake_attached_devices)
+ result = list_verification.DetermineDeviceToUse(user_specified_devices)
+ self.assertEqual(result, fake_attached_devices[0])
+ device_utils.DeviceUtils.HealthyDevices.assert_called_with(
+ device_arg=user_specified_devices)
+
+
+class _ListClassVerificationFailuresTest(unittest.TestCase):
+
+ def testPathToDexForPlatformVersion_noPaths(self):
+ sdk_int = version_codes.LOLLIPOP
+ paths_to_apk = []
+ package_name = 'package.name'
+ arch = abis.ARM_64
+
+ device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+ device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+
+ with self.assertRaises(list_verification.DeviceOSError) as cm:
+ list_verification.PathToDexForPlatformVersion(device, package_name)
+ message = str(cm.exception)
+ self.assertIn('Could not find data directory', message)
+
+ def testPathToDexForPlatformVersion_multiplePaths(self):
+ sdk_int = version_codes.LOLLIPOP
+ paths_to_apk = ['/first/path', '/second/path']
+ package_name = 'package.name'
+ arch = abis.ARM_64
+
+ device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+ device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+
+ with self.assertRaises(list_verification.DeviceOSError) as cm:
+ list_verification.PathToDexForPlatformVersion(device, package_name)
+ message = str(cm.exception)
+ self.assertIn('Expected exactly one path for', message)
+
+ def testPathToDexForPlatformVersion_dalvikApiLevel(self):
+ sdk_int = version_codes.KITKAT
+ paths_to_apk = ['/some/path']
+ package_name = 'package.name'
+ arch = abis.ARM_64
+
+ device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+ device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+
+ with self.assertRaises(list_verification.UnsupportedDeviceError) as _:
+ list_verification.PathToDexForPlatformVersion(device, package_name)
+
+ def testPathToDexForPlatformVersion_lollipopArm(self):
+ sdk_int = version_codes.LOLLIPOP
+ package_name = 'package.name'
+ paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+ arch = 'arm'
+
+ device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+ device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+ device.FileExists = mock.MagicMock(return_value=True)
+
+ odex_file = list_verification.PathToDexForPlatformVersion(device,
+ package_name)
+ self.assertEqual(odex_file,
+ ('/data/dalvik-cache/arm/data@app'
+ '@package.name-1@base.apk@classes.dex'))
+
+ def testPathToDexForPlatformVersion_mashmallowArm(self):
+ sdk_int = version_codes.MARSHMALLOW
+ package_name = 'package.name'
+ paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+ arch = 'arm'
+
+ device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+ device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+ device.FileExists = mock.MagicMock(return_value=True)
+
+ odex_file = list_verification.PathToDexForPlatformVersion(device,
+ package_name)
+ self.assertEqual(odex_file,
+ '/some/path/package.name-1/oat/arm/base.odex')
+
+ def testPathToDexForPlatformVersion_mashmallowArm64(self):
+ sdk_int = version_codes.MARSHMALLOW
+ package_name = 'package.name'
+ paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+ arch = abis.ARM_64
+
+ device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+ device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+ device.FileExists = mock.MagicMock(return_value=True)
+
+ odex_file = list_verification.PathToDexForPlatformVersion(device,
+ package_name)
+ self.assertEqual(odex_file,
+ '/some/path/package.name-1/oat/arm64/base.odex')
+
+ def testPathToDexForPlatformVersion_pieNoOdexFile(self):
+ sdk_int = version_codes.PIE
+ package_name = 'package.name'
+ paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+ arch = abis.ARM_64
+
+ device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+ device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+ device.FileExists = mock.MagicMock(return_value=False)
+
+ with self.assertRaises(list_verification.DeviceOSError) as cm:
+ list_verification.PathToDexForPlatformVersion(device, package_name)
+ message = str(cm.exception)
+ self.assertIn('you must run dex2oat on debuggable apps on >= P', message)
+
+ def testPathToDexForPlatformVersion_lowerApiLevelNoOdexFile(self):
+ sdk_int = version_codes.MARSHMALLOW
+ package_name = 'package.name'
+ paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+ arch = abis.ARM_64
+
+ device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+ device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+ device.FileExists = mock.MagicMock(return_value=False)
+
+ with self.assertRaises(list_verification.DeviceOSError) as _:
+ list_verification.PathToDexForPlatformVersion(device, package_name)
+
+ def testListClasses_noProguardMap(self):
+ oatdump_output = [
+ _CreateOdexLine('a.b.JavaClass1', 6, 'StatusVerified'),
+ _CreateOdexLine('a.b.JavaClass2', 7,
+ 'StatusRetryVerificationAtRuntime'),
+ ]
+
+ classes = list_verification.ListClassesAndVerificationStatus(oatdump_output,
+ None)
+ self.assertEqual(2, len(classes))
+ java_class_1 = _ClassForName('a.b.JavaClass1', classes)
+ java_class_2 = _ClassForName('a.b.JavaClass2', classes)
+ self.assertEqual(java_class_1.verification_status, 'Verified')
+ self.assertEqual(java_class_2.verification_status,
+ 'RetryVerificationAtRuntime')
+
+ def testListClasses_proguardMap(self):
+ oatdump_output = [
+ _CreateOdexLine('a.b.ObfuscatedJavaClass1', 6, 'StatusVerified'),
+ _CreateOdexLine('a.b.ObfuscatedJavaClass2', 7,
+ 'StatusRetryVerificationAtRuntime'),
+ ]
+
+ mapping = {
+ 'a.b.ObfuscatedJavaClass1': 'a.b.JavaClass1',
+ 'a.b.ObfuscatedJavaClass2': 'a.b.JavaClass2',
+ }
+ classes = list_verification.ListClassesAndVerificationStatus(oatdump_output,
+ mapping)
+ self.assertEqual(2, len(classes))
+ java_class_1 = _ClassForName('a.b.JavaClass1', classes)
+ java_class_2 = _ClassForName('a.b.JavaClass2', classes)
+ self.assertEqual(java_class_1.verification_status, 'Verified')
+ self.assertEqual(java_class_2.verification_status,
+ 'RetryVerificationAtRuntime')
+
+ def testListClasses_noStatusPrefix(self):
+ oatdump_output = [
+ _CreateOdexLine('a.b.JavaClass1', 6, 'Verified'),
+ _CreateOdexLine('a.b.JavaClass2', 7, 'RetryVerificationAtRuntime'),
+ ]
+
+ classes = list_verification.ListClassesAndVerificationStatus(oatdump_output,
+ None)
+ self.assertEqual(2, len(classes))
+ java_class_1 = _ClassForName('a.b.JavaClass1', classes)
+ java_class_2 = _ClassForName('a.b.JavaClass2', classes)
+ self.assertEqual(java_class_1.verification_status, 'Verified')
+ self.assertEqual(java_class_2.verification_status,
+ 'RetryVerificationAtRuntime')
+
+if __name__ == '__main__':
+ # Suppress logging messages.
+ unittest.main(buffer=True)
diff --git a/deps/v8/build/android/main_dex_classes.flags b/deps/v8/build/android/main_dex_classes.flags
new file mode 100644
index 0000000000..9163c5097f
--- /dev/null
+++ b/deps/v8/build/android/main_dex_classes.flags
@@ -0,0 +1,61 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Proguard flags for what should be kept in the main dex. Only used
+# during main dex list determination, not during actual proguarding.
+
+-keep @org.chromium.base.annotations.MainDex class * {
+ *;
+}
+
+-keepclasseswithmembers class * {
+ @org.chromium.base.annotations.MainDex <methods>;
+}
+
+# Assume all IDL-generated classes should be kept. They can't reference other
+# non-framework classes, so fairly low-risk.
+-keepclasseswithmembers class * {
+ public static ** asInterface(android.os.IBinder);
+}
+
+# Required when code coverage is enabled.
+-keep class com.vladium.** {
+ *;
+}
+
+# Renderers / GPU process don't load secondary dex.
+-keep public class * extends org.chromium.base.process_launcher.ChildProcessService {
+ *;
+}
+
+# WebView doesn't load secondary dex.
+-keep public class com.android.webview.** {
+ *;
+}
+
+-keep public class org.chromium.android_webview.** {
+ *;
+}
+
+# Used by tests for secondary dex extraction.
+-keep class android.support.v4.content.ContextCompat {
+ *;
+}
+
+# The following are based on $SDK_BUILD_TOOLS/mainDexClasses.rules
+# Ours differ in that:
+# 1. It omits -keeps for application / instrumentation / backupagents (these are
+# redundant since they are added by aapt's main dex list rules output).
+# 2. Omits keep for Application.attachBaseContext(), which is overly broad.
+# 3. Omits keep for all annotations, which is also overly broad (and pulls in
+# any class that has an @IntDef).
+
+######## START mainDexClasses.rules ########
+
+# Keep old fashion tests in the main dex or they'll be silently ignored by InstrumentationTestRunner
+-keep public class * extends android.test.InstrumentationTestCase {
+ <init>();
+}
+
+######## END mainDexClasses.rules ########
diff --git a/deps/v8/build/android/method_count.py b/deps/v8/build/android/method_count.py
new file mode 100755
index 0000000000..490887adbc
--- /dev/null
+++ b/deps/v8/build/android/method_count.py
@@ -0,0 +1,116 @@
+#! /usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+
+import devil_chromium
+from devil.android.sdk import dexdump
+from pylib.constants import host_paths
+
+sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'build', 'util', 'lib',
+ 'common'))
+import perf_tests_results_helper # pylint: disable=import-error
+
+# Example dexdump output:
+# DEX file header:
+# magic : 'dex\n035\0'
+# checksum : b664fc68
+# signature : ae73...87f1
+# file_size : 4579656
+# header_size : 112
+# link_size : 0
+# link_off : 0 (0x000000)
+# string_ids_size : 46148
+# string_ids_off : 112 (0x000070)
+# type_ids_size : 5730
+# type_ids_off : 184704 (0x02d180)
+# proto_ids_size : 8289
+# proto_ids_off : 207624 (0x032b08)
+# field_ids_size : 17854
+# field_ids_off : 307092 (0x04af94)
+# method_ids_size : 33699
+# method_ids_off : 449924 (0x06dd84)
+# class_defs_size : 2616
+# class_defs_off : 719516 (0x0afa9c)
+# data_size : 3776428
+# data_off : 803228 (0x0c419c)
+
+# For what these mean, refer to:
+# https://source.android.com/devices/tech/dalvik/dex-format.html
+
+
+CONTRIBUTORS_TO_DEX_CACHE = {'type_ids_size': 'types',
+ 'string_ids_size': 'strings',
+ 'method_ids_size': 'methods',
+ 'field_ids_size': 'fields'}
+
+
+def _ExtractSizesFromDexFile(dex_path):
+ counts = {}
+ for line in dexdump.DexDump(dex_path, file_summary=True):
+ if not line.strip():
+ # Each method, type, field, and string contributes 4 bytes (1 reference)
+ # to our DexCache size.
+ return counts, sum(counts[x] for x in CONTRIBUTORS_TO_DEX_CACHE) * 4
+ m = re.match(r'([a-z_]+_size) *: (\d+)', line)
+ if m and m.group(1) in CONTRIBUTORS_TO_DEX_CACHE:
+ counts[m.group(1)] = int(m.group(2))
+ raise Exception('Unexpected end of output.')
+
+
+def ExtractSizesFromZip(path):
+ tmpdir = tempfile.mkdtemp(suffix='_dex_extract')
+ try:
+ counts = {}
+ total = 0
+ with zipfile.ZipFile(path, 'r') as z:
+ for subpath in z.namelist():
+ if not subpath.endswith('.dex'):
+ continue
+ extracted_path = z.extract(subpath, tmpdir)
+ cur_counts, cur_total = _ExtractSizesFromDexFile(extracted_path)
+ dex_basename = os.path.basename(extracted_path)
+ counts[dex_basename] = cur_counts
+ total += cur_total
+ return counts, total
+ finally:
+ shutil.rmtree(tmpdir)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('filename')
+
+ args = parser.parse_args()
+
+ devil_chromium.Initialize()
+
+ if os.path.splitext(args.filename)[1] in ('.zip', '.apk', '.jar'):
+ sizes, total_size = ExtractSizesFromZip(args.filename)
+ else:
+ single_set_of_sizes, total_size = _ExtractSizesFromDexFile(args.filename)
+ sizes = {"": single_set_of_sizes}
+
+ file_basename = os.path.basename(args.filename)
+ for classes_dex_file, classes_dex_sizes in sizes.iteritems():
+ for dex_header_name, readable_name in CONTRIBUTORS_TO_DEX_CACHE.iteritems():
+ if dex_header_name in classes_dex_sizes:
+ perf_tests_results_helper.PrintPerfResult(
+ '%s_%s_%s' % (file_basename, classes_dex_file, readable_name),
+ 'total', [classes_dex_sizes[dex_header_name]], readable_name)
+
+ perf_tests_results_helper.PrintPerfResult(
+ '%s_DexCache_size' % (file_basename), 'total', [total_size],
+ 'bytes of permanent dirty memory')
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/multidex.flags b/deps/v8/build/android/multidex.flags
new file mode 100644
index 0000000000..e3543c1324
--- /dev/null
+++ b/deps/v8/build/android/multidex.flags
@@ -0,0 +1,8 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# When multidex is enabled, need to keep the @MainDex annotation so that it
+# can be used to create the main dex list.
+-keepattributes *Annotations*
+-keep @interface org.chromium.base.annotations.MainDex
diff --git a/deps/v8/build/android/play_services/__init__.py b/deps/v8/build/android/play_services/__init__.py
new file mode 100644
index 0000000000..50b23dff63
--- /dev/null
+++ b/deps/v8/build/android/play_services/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/play_services/preprocess.py b/deps/v8/build/android/play_services/preprocess.py
new file mode 100755
index 0000000000..bb3424a80d
--- /dev/null
+++ b/deps/v8/build/android/play_services/preprocess.py
@@ -0,0 +1,244 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+'''Prepares the Google Play services split client libraries before usage by
+Chrome's build system.
+
+We need to preprocess Google Play services before using it in Chrome builds
+mostly to remove unused resources (unsupported languages, unused drawables,
+etc.) as proper resource shrinking is not yet supported by our build system.
+(See https://crbug.com/636448)
+
+The script is meant to be used with an unpacked library repository. One can
+be obtained by downloading the "extra-google-m2repository" from the Android SDK
+Manager and extracting the AARs from the desired version as the following
+structure:
+
+ REPOSITORY_DIR
+ +-- CLIENT_1
+ | +-- <content of the first AAR file>
+ +-- CLIENT_2
+ +-- etc.
+
+The output will follow the same structure, with fewer resource files, in the
+provided output directory.
+'''
+
+import argparse
+import glob
+import itertools
+import os
+import shutil
+import stat
+import sys
+import tempfile
+import textwrap
+import zipfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+from play_services import utils
+from pylib.utils import argparse_utils
+
+
+def main():
+ parser = argparse.ArgumentParser(description=(
+ "Prepares the Google Play services split client libraries before usage "
+ "by Chrome's build system. See the script's documentation for more a "
+ "detailed help."))
+ argparse_utils.CustomHelpAction.EnableFor(parser)
+ required_args = parser.add_argument_group('required named arguments')
+ required_args.add_argument('-r',
+ '--repository',
+ help=('the Google Play services repository '
+ 'location'),
+ required=True,
+ metavar='FILE')
+ required_args.add_argument('-d',
+ '--root-dir',
+ help='the directory which GN considers the root',
+ required=True,
+ metavar='FILE')
+ required_args.add_argument('-o',
+ '--out-dir',
+ help='the output directory',
+ required=True,
+ metavar='FILE')
+ required_args.add_argument('-g',
+ '--gni-out-file',
+ help='the GN output file',
+ required=True,
+ metavar='FILE')
+ required_args.add_argument('-c',
+ '--config-file',
+ help='the config file path',
+ required=True,
+ metavar='FILE')
+ parser.add_argument('--config-help',
+ action='custom_help',
+ custom_help_text=utils.ConfigParser.__doc__,
+ help='show the configuration file format help')
+
+ args = parser.parse_args()
+
+ return ProcessGooglePlayServices(args.repository,
+ args.root_dir,
+ args.out_dir,
+ args.gni_out_file,
+ args.config_file)
+
+
+def ProcessGooglePlayServices(
+ repo, root_dir, out_dir, gni_out_file, config_path):
+ config = utils.ConfigParser(config_path)
+
+ tmp_root = tempfile.mkdtemp()
+ try:
+ tmp_paths = _SetupTempDir(tmp_root)
+ _ImportFromExtractedRepo(config, tmp_paths, repo)
+ _ProcessResources(config, tmp_paths, repo)
+ _CopyToOutput(tmp_paths, out_dir)
+ _EnumerateProguardFiles(root_dir, out_dir, gni_out_file)
+ _UpdateVersionInConfig(config, tmp_paths)
+ finally:
+ shutil.rmtree(tmp_root)
+
+ return 0
+
+
+def _SetupTempDir(tmp_root):
+ tmp_paths = {
+ 'root': tmp_root,
+ 'imported_clients': os.path.join(tmp_root, 'imported_clients'),
+ 'extracted_jars': os.path.join(tmp_root, 'jar'),
+ 'combined_jar': os.path.join(tmp_root, 'google-play-services.jar'),
+ }
+ os.mkdir(tmp_paths['imported_clients'])
+ os.mkdir(tmp_paths['extracted_jars'])
+
+ return tmp_paths
+
+
+def _MakeWritable(dir_path):
+ for root, dirs, files in os.walk(dir_path):
+ for path in itertools.chain(dirs, files):
+ st = os.stat(os.path.join(root, path))
+ os.chmod(os.path.join(root, path), st.st_mode | stat.S_IWUSR)
+
+
+# E.g. turn "base_1p" into "base"
+def _RemovePartySuffix(client):
+ return client[:-3] if client[-3:] == '_1p' else client
+
+
+def _ImportFromExtractedRepo(config, tmp_paths, repo):
+ # Import the clients
+ try:
+ for client in config.clients:
+ client_out_dir = os.path.join(tmp_paths['imported_clients'], client)
+ shutil.copytree(os.path.join(repo, client), client_out_dir)
+ finally:
+ _MakeWritable(tmp_paths['imported_clients'])
+
+
+def _ProcessResources(config, tmp_paths, repo):
+ LOCALIZED_VALUES_BASE_NAME = 'values-'
+ locale_whitelist = set(config.locale_whitelist)
+
+ # The directory structure here is:
+ # <imported_clients temp dir>/<client name>_1p/res/<res type>/<res file>.xml
+ for client_dir in os.listdir(tmp_paths['imported_clients']):
+ client_prefix = _RemovePartySuffix(client_dir) + '_'
+
+ res_path = os.path.join(tmp_paths['imported_clients'], client_dir, 'res')
+ if not os.path.isdir(res_path):
+ continue
+
+ for res_type in os.listdir(res_path):
+ res_type_path = os.path.join(res_path, res_type)
+
+ if res_type.startswith('drawable'):
+ shutil.rmtree(res_type_path)
+ continue
+
+ if res_type.startswith(LOCALIZED_VALUES_BASE_NAME):
+ dir_locale = res_type[len(LOCALIZED_VALUES_BASE_NAME):]
+ if dir_locale not in locale_whitelist:
+ shutil.rmtree(res_type_path)
+ continue
+
+ if res_type.startswith('values'):
+ # Beginning with v3, resource file names are not necessarily unique,
+ # and would overwrite each other when merged at build time. Prefix each
+ # "values" resource file with its client name.
+ for res_file in os.listdir(res_type_path):
+ os.rename(os.path.join(res_type_path, res_file),
+ os.path.join(res_type_path, client_prefix + res_file))
+
+ # Reimport files from the whitelist.
+ for res_path in config.resource_whitelist:
+ for whitelisted_file in glob.glob(os.path.join(repo, res_path)):
+ resolved_file = os.path.relpath(whitelisted_file, repo)
+ rebased_res = os.path.join(tmp_paths['imported_clients'], resolved_file)
+
+ if not os.path.exists(os.path.dirname(rebased_res)):
+ os.makedirs(os.path.dirname(rebased_res))
+
+ try:
+ shutil.copy(os.path.join(repo, whitelisted_file), rebased_res)
+ finally:
+ _MakeWritable(rebased_res)
+
+
+def _CopyToOutput(tmp_paths, out_dir):
+ shutil.rmtree(out_dir, ignore_errors=True)
+ shutil.copytree(tmp_paths['imported_clients'], out_dir)
+
+
+# Write a GN file containing a list of each GMS client's proguard file (if any).
+def _EnumerateProguardFiles(root_dir, out_dir, gni_path):
+ gni_dir = os.path.dirname(gni_path)
+ gni_template = textwrap.dedent('''\
+ # Copyright 2017 The Chromium Authors. All rights reserved.
+ # Use of this source code is governed by a BSD-style license that can be
+ # found in the LICENSE file.
+
+ # This file generated by {script}
+ gms_proguard_configs = [
+ {body}
+ ]
+ ''')
+
+ gni_lines = []
+ for client_dir in os.listdir(out_dir):
+ proguard_path = os.path.join(
+ out_dir, client_dir, 'proguard.txt')
+ if os.path.exists(proguard_path):
+ rooted_path = os.path.relpath(proguard_path, root_dir)
+ gni_lines.append(' "//{}",'.format(rooted_path))
+ gni_lines.sort()
+
+ gni_text = gni_template.format(
+ script=os.path.relpath(sys.argv[0], gni_dir),
+ body='\n'.join(gni_lines))
+
+ with open(gni_path, 'w') as gni_file:
+ gni_file.write(gni_text)
+
+
+def _UpdateVersionInConfig(config, tmp_paths):
+ version_xml_path = os.path.join(tmp_paths['imported_clients'],
+ config.version_xml_path)
+ play_services_full_version = utils.GetVersionNumberFromLibraryResources(
+ version_xml_path)
+ config.UpdateVersionNumber(play_services_full_version)
+
+
+def _ExtractAll(zip_path, out_path):
+ with zipfile.ZipFile(zip_path, 'r') as zip_file:
+ zip_file.extractall(out_path)
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/play_services/utils.py b/deps/v8/build/android/play_services/utils.py
new file mode 100644
index 0000000000..76b3679957
--- /dev/null
+++ b/deps/v8/build/android/play_services/utils.py
@@ -0,0 +1,144 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+'''
+Utility functions for all things related to manipulating google play services
+related files.
+'''
+
+import argparse
+import filecmp
+import json
+import os
+import re
+
+
+_XML_VERSION_NUMBER_PATTERN = re.compile(
+ r'<integer name="google_play_services_version">(\d+)<\/integer>')
+
+
+class DefaultsRawHelpFormatter(argparse.ArgumentDefaultsHelpFormatter,
+ argparse.RawDescriptionHelpFormatter):
+ '''
+ Combines the features of RawDescriptionHelpFormatter and
+ ArgumentDefaultsHelpFormatter, providing defaults for the arguments and raw
+ text for the description.
+ '''
+ pass
+
+
+class ConfigParser(object):
+ '''Reads and writes the configuration files for play services related scripts
+
+ The configuration files are JSON files. Here is the data they are expected
+ to contain:
+
+ - version_number
+ Number. Mirrors @integer/google_play_services_version from the library.
+ Example: 815000
+
+ - sdk_version
+ Version of the Play Services SDK to retrieve, when preprocessing the
+ library from a maven/gradle repository.
+ Example: "8.1.0"
+
+ - clients
+ List of strings. Name of the clients (or play services modules) to
+ include when preprocessing the library.
+ Example: ["play-services-base", "play-services-cast"]
+
+ - version_xml_path
+ String. Path to the version.xml string describing the current version.
+ Should be relative to the library base directory
+ Example: "res/values/version.xml"
+
+ - locale_whitelist
+ List of strings. List of locales to keep from the resources. Can be
+ obtained by generating an android build and looking at the content of
+ `out/Debug/gen/chrome/java/res`; or looking at the android section in
+ `//chrome/app/generated_resources.grd`
+ Example: ["am", "ar", "bg", "ca", "cs"]
+
+ - resource_whitelist
+ List of strings. List of resource files to explicitely keep in the final
+ output. Use it to keep drawables for example, as we currently remove them
+ all.
+ Example: ["play-services-base/res/drawables/foobar.xml"]
+ '''
+ _VERSION_NUMBER_KEY = 'version_number'
+
+ def __init__(self, path):
+ self.path = path
+ self._data = {}
+
+ with open(path, 'r') as stream:
+ self._data = json.load(stream)
+
+ @property
+ def version_number(self):
+ return self._data.get(self._VERSION_NUMBER_KEY)
+
+ @property
+ def sdk_version(self):
+ return self._data.get('sdk_version')
+
+ @property
+ def clients(self):
+ return self._data.get('clients') or []
+
+ @property
+ def version_xml_path(self):
+ return self._data.get('version_xml_path')
+
+ @property
+ def locale_whitelist(self):
+ return self._data.get('locale_whitelist') or []
+
+ @property
+ def resource_whitelist(self):
+ return self._data.get('resource_whitelist') or []
+
+ def UpdateVersionNumber(self, new_version_number):
+ '''Updates the version number and saves it in the configuration file. '''
+
+ with open(self.path, 'w') as stream:
+ self._data[self._VERSION_NUMBER_KEY] = new_version_number
+ stream.write(DumpTrimmedJson(self._data))
+
+
+def DumpTrimmedJson(json_data):
+ '''
+ Default formatting when dumping json to string has trailing spaces and lacks
+ a new line at the end. This function fixes that.
+ '''
+
+ out = json.dumps(json_data, sort_keys=True, indent=2)
+ out = out.replace(' ' + os.linesep, os.linesep)
+ return out + os.linesep
+
+
+def FileEquals(expected_file, actual_file):
+ '''
+ Returns whether the two files are equal. Returns False if any of the files
+ doesn't exist.
+ '''
+
+ if not os.path.isfile(actual_file) or not os.path.isfile(expected_file):
+ return False
+ return filecmp.cmp(expected_file, actual_file)
+
+
+def GetVersionNumberFromLibraryResources(version_xml):
+ '''
+ Extracts a Google Play services version number from its version.xml file.
+ '''
+
+ with open(version_xml, 'r') as version_file:
+ version_file_content = version_file.read()
+
+ match = _XML_VERSION_NUMBER_PATTERN.search(version_file_content)
+ if not match:
+ raise AttributeError('A value for google_play_services_version was not '
+ 'found in ' + version_xml)
+ return int(match.group(1))
diff --git a/deps/v8/build/android/provision_devices.py b/deps/v8/build/android/provision_devices.py
new file mode 100755
index 0000000000..ecf22c9a56
--- /dev/null
+++ b/deps/v8/build/android/provision_devices.py
@@ -0,0 +1,561 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provisions Android devices with settings required for bots.
+
+Usage:
+ ./provision_devices.py [-d <device serial number>]
+"""
+
+import argparse
+import datetime
+import json
+import logging
+import os
+import posixpath
+import re
+import subprocess
+import sys
+import time
+
+# Import _strptime before threaded code. datetime.datetime.strptime is
+# threadsafe except for the initial import of the _strptime module.
+# See crbug.com/584730 and https://bugs.python.org/issue7980.
+import _strptime # pylint: disable=unused-import
+
+import devil_chromium
+from devil.android import battery_utils
+from devil.android import device_blacklist
+from devil.android import device_errors
+from devil.android import device_temp_file
+from devil.android import device_utils
+from devil.android.sdk import keyevent
+from devil.android.sdk import version_codes
+from devil.constants import exit_codes
+from devil.utils import run_tests_helper
+from devil.utils import timeout_retry
+from pylib import constants
+from pylib import device_settings
+from pylib.constants import host_paths
+
+_SYSTEM_WEBVIEW_PATHS = ['/system/app/webview', '/system/app/WebViewGoogle']
+_CHROME_PACKAGE_REGEX = re.compile('.*chrom.*')
+_TOMBSTONE_REGEX = re.compile('tombstone.*')
+
+
+class _DEFAULT_TIMEOUTS(object):
+ # L can take a while to reboot after a wipe.
+ LOLLIPOP = 600
+ PRE_LOLLIPOP = 180
+
+ HELP_TEXT = '{}s on L, {}s on pre-L'.format(LOLLIPOP, PRE_LOLLIPOP)
+
+
+class _PHASES(object):
+ WIPE = 'wipe'
+ PROPERTIES = 'properties'
+ FINISH = 'finish'
+
+ ALL = [WIPE, PROPERTIES, FINISH]
+
+
+def ProvisionDevices(args):
+ blacklist = (device_blacklist.Blacklist(args.blacklist_file)
+ if args.blacklist_file
+ else None)
+ devices = [d for d in device_utils.DeviceUtils.HealthyDevices(blacklist)
+ if not args.emulators or d.adb.is_emulator]
+ if args.device:
+ devices = [d for d in devices if d == args.device]
+ if not devices:
+ raise device_errors.DeviceUnreachableError(args.device)
+ parallel_devices = device_utils.DeviceUtils.parallel(devices)
+ if args.emulators:
+ parallel_devices.pMap(SetProperties, args)
+ else:
+ parallel_devices.pMap(ProvisionDevice, blacklist, args)
+ if args.auto_reconnect:
+ _LaunchHostHeartbeat()
+ blacklisted_devices = blacklist.Read() if blacklist else []
+ if args.output_device_blacklist:
+ with open(args.output_device_blacklist, 'w') as f:
+ json.dump(blacklisted_devices, f)
+ if all(d in blacklisted_devices for d in devices):
+ raise device_errors.NoDevicesError
+ return 0
+
+
+def ProvisionDevice(device, blacklist, options):
+ def should_run_phase(phase_name):
+ return not options.phases or phase_name in options.phases
+
+ def run_phase(phase_func, reboot_timeout, reboot=True):
+ try:
+ device.WaitUntilFullyBooted(timeout=reboot_timeout, retries=0)
+ except device_errors.CommandTimeoutError:
+ logging.error('Device did not finish booting. Will try to reboot.')
+ device.Reboot(timeout=reboot_timeout)
+ phase_func(device, options)
+ if reboot:
+ device.Reboot(False, retries=0)
+ device.adb.WaitForDevice()
+
+ try:
+ if options.reboot_timeout:
+ reboot_timeout = options.reboot_timeout
+ elif device.build_version_sdk >= version_codes.LOLLIPOP:
+ reboot_timeout = _DEFAULT_TIMEOUTS.LOLLIPOP
+ else:
+ reboot_timeout = _DEFAULT_TIMEOUTS.PRE_LOLLIPOP
+
+ if should_run_phase(_PHASES.WIPE):
+ if (options.chrome_specific_wipe or device.IsUserBuild() or
+ device.build_version_sdk >= version_codes.MARSHMALLOW):
+ run_phase(WipeChromeData, reboot_timeout)
+ else:
+ run_phase(WipeDevice, reboot_timeout)
+
+ if should_run_phase(_PHASES.PROPERTIES):
+ run_phase(SetProperties, reboot_timeout)
+
+ if should_run_phase(_PHASES.FINISH):
+ run_phase(FinishProvisioning, reboot_timeout, reboot=False)
+
+ if options.chrome_specific_wipe:
+ package = "com.google.android.gms"
+ version_name = device.GetApplicationVersion(package)
+ logging.info("Version name for %s is %s", package, version_name)
+
+ CheckExternalStorage(device)
+
+ except device_errors.CommandTimeoutError:
+ logging.exception('Timed out waiting for device %s. Adding to blacklist.',
+ str(device))
+ if blacklist:
+ blacklist.Extend([str(device)], reason='provision_timeout')
+
+ except (device_errors.CommandFailedError,
+ device_errors.DeviceUnreachableError):
+ logging.exception('Failed to provision device %s. Adding to blacklist.',
+ str(device))
+ if blacklist:
+ blacklist.Extend([str(device)], reason='provision_failure')
+
+def CheckExternalStorage(device):
+ """Checks that storage is writable and if not makes it writable.
+
+ Arguments:
+ device: The device to check.
+ """
+ try:
+ with device_temp_file.DeviceTempFile(
+ device.adb, suffix='.sh', dir=device.GetExternalStoragePath()) as f:
+ device.WriteFile(f.name, 'test')
+ except device_errors.CommandFailedError:
+ logging.info('External storage not writable. Remounting / as RW')
+ device.RunShellCommand(['mount', '-o', 'remount,rw', '/'],
+ check_return=True, as_root=True)
+ device.EnableRoot()
+ with device_temp_file.DeviceTempFile(
+ device.adb, suffix='.sh', dir=device.GetExternalStoragePath()) as f:
+ device.WriteFile(f.name, 'test')
+
+def WipeChromeData(device, options):
+ """Wipes chrome specific data from device
+
+ (1) uninstall any app whose name matches *chrom*, except
+ com.android.chrome, which is the chrome stable package. Doing so also
+ removes the corresponding dirs under /data/data/ and /data/app/
+ (2) remove any dir under /data/app-lib/ whose name matches *chrom*
+ (3) remove any files under /data/tombstones/ whose name matches "tombstone*"
+ (4) remove /data/local.prop if there is any
+ (5) remove /data/local/chrome-command-line if there is any
+ (6) remove anything under /data/local/.config/ if the dir exists
+ (this is telemetry related)
+ (7) remove anything under /data/local/tmp/
+
+ Arguments:
+ device: the device to wipe
+ """
+ if options.skip_wipe:
+ return
+
+ try:
+ if device.IsUserBuild():
+ _UninstallIfMatch(device, _CHROME_PACKAGE_REGEX,
+ constants.PACKAGE_INFO['chrome_stable'].package)
+ device.RunShellCommand('rm -rf %s/*' % device.GetExternalStoragePath(),
+ check_return=True)
+ device.RunShellCommand('rm -rf /data/local/tmp/*', check_return=True)
+ else:
+ device.EnableRoot()
+ _UninstallIfMatch(device, _CHROME_PACKAGE_REGEX,
+ constants.PACKAGE_INFO['chrome_stable'].package)
+ _WipeUnderDirIfMatch(device, '/data/app-lib/', _CHROME_PACKAGE_REGEX)
+ _WipeUnderDirIfMatch(device, '/data/tombstones/', _TOMBSTONE_REGEX)
+
+ _WipeFileOrDir(device, '/data/local.prop')
+ _WipeFileOrDir(device, '/data/local/chrome-command-line')
+ _WipeFileOrDir(device, '/data/local/.config/')
+ _WipeFileOrDir(device, '/data/local/tmp/')
+ device.RunShellCommand('rm -rf %s/*' % device.GetExternalStoragePath(),
+ check_return=True)
+ except device_errors.CommandFailedError:
+ logging.exception('Possible failure while wiping the device. '
+ 'Attempting to continue.')
+
+
+def WipeDevice(device, options):
+ """Wipes data from device, keeping only the adb_keys for authorization.
+
+ After wiping data on a device that has been authorized, adb can still
+ communicate with the device, but after reboot the device will need to be
+ re-authorized because the adb keys file is stored in /data/misc/adb/.
+ Thus, adb_keys file is rewritten so the device does not need to be
+ re-authorized.
+
+ Arguments:
+ device: the device to wipe
+ """
+ if options.skip_wipe:
+ return
+
+ try:
+ device.EnableRoot()
+ device_authorized = device.FileExists(constants.ADB_KEYS_FILE)
+ if device_authorized:
+ adb_keys = device.ReadFile(constants.ADB_KEYS_FILE,
+ as_root=True).splitlines()
+ device.RunShellCommand(['wipe', 'data'],
+ as_root=True, check_return=True)
+ device.adb.WaitForDevice()
+
+ if device_authorized:
+ adb_keys_set = set(adb_keys)
+ for adb_key_file in options.adb_key_files or []:
+ try:
+ with open(adb_key_file, 'r') as f:
+ adb_public_keys = f.readlines()
+ adb_keys_set.update(adb_public_keys)
+ except IOError:
+ logging.warning('Unable to find adb keys file %s.', adb_key_file)
+ _WriteAdbKeysFile(device, '\n'.join(adb_keys_set))
+ except device_errors.CommandFailedError:
+ logging.exception('Possible failure while wiping the device. '
+ 'Attempting to continue.')
+
+
+def _WriteAdbKeysFile(device, adb_keys_string):
+ dir_path = posixpath.dirname(constants.ADB_KEYS_FILE)
+ device.RunShellCommand(['mkdir', '-p', dir_path],
+ as_root=True, check_return=True)
+ device.RunShellCommand(['restorecon', dir_path],
+ as_root=True, check_return=True)
+ device.WriteFile(constants.ADB_KEYS_FILE, adb_keys_string, as_root=True)
+ device.RunShellCommand(['restorecon', constants.ADB_KEYS_FILE],
+ as_root=True, check_return=True)
+
+
+def SetProperties(device, options):
+ try:
+ device.EnableRoot()
+ except device_errors.CommandFailedError as e:
+ logging.warning(str(e))
+
+ if not device.IsUserBuild():
+ _ConfigureLocalProperties(device, options.enable_java_debug)
+ else:
+ logging.warning('Cannot configure properties in user builds.')
+ device_settings.ConfigureContentSettings(
+ device, device_settings.DETERMINISTIC_DEVICE_SETTINGS)
+ if options.disable_location:
+ device_settings.ConfigureContentSettings(
+ device, device_settings.DISABLE_LOCATION_SETTINGS)
+ else:
+ device_settings.ConfigureContentSettings(
+ device, device_settings.ENABLE_LOCATION_SETTINGS)
+
+ if options.disable_mock_location:
+ device_settings.ConfigureContentSettings(
+ device, device_settings.DISABLE_MOCK_LOCATION_SETTINGS)
+ else:
+ device_settings.ConfigureContentSettings(
+ device, device_settings.ENABLE_MOCK_LOCATION_SETTINGS)
+
+ device_settings.SetLockScreenSettings(device)
+ if options.disable_network:
+ device_settings.ConfigureContentSettings(
+ device, device_settings.NETWORK_DISABLED_SETTINGS)
+ if device.build_version_sdk >= version_codes.MARSHMALLOW:
+ # Ensure that NFC is also switched off.
+ device.RunShellCommand(['svc', 'nfc', 'disable'],
+ as_root=True, check_return=True)
+
+ if options.disable_system_chrome:
+ # The system chrome version on the device interferes with some tests.
+ device.RunShellCommand(['pm', 'disable', 'com.android.chrome'],
+ check_return=True)
+
+ if options.remove_system_webview:
+ if any(device.PathExists(p) for p in _SYSTEM_WEBVIEW_PATHS):
+ logging.info('System WebView exists and needs to be removed')
+ if device.HasRoot():
+ # Disabled Marshmallow's Verity security feature
+ if device.build_version_sdk >= version_codes.MARSHMALLOW:
+ device.adb.DisableVerity()
+ device.Reboot()
+ device.WaitUntilFullyBooted()
+ device.EnableRoot()
+
+ # This is required, e.g., to replace the system webview on a device.
+ device.adb.Remount()
+ device.RunShellCommand(['stop'], check_return=True)
+ device.RunShellCommand(['rm', '-rf'] + _SYSTEM_WEBVIEW_PATHS,
+ check_return=True)
+ device.RunShellCommand(['start'], check_return=True)
+ else:
+ logging.warning('Cannot remove system webview from a non-rooted device')
+ else:
+ logging.info('System WebView already removed')
+
+ # Some device types can momentarily disappear after setting properties.
+ device.adb.WaitForDevice()
+
+
+def _ConfigureLocalProperties(device, java_debug=True):
+ """Set standard readonly testing device properties prior to reboot."""
+ local_props = [
+ 'persist.sys.usb.config=adb',
+ 'ro.monkey=1',
+ 'ro.test_harness=1',
+ 'ro.audio.silent=1',
+ 'ro.setupwizard.mode=DISABLED',
+ ]
+ if java_debug:
+ local_props.append(
+ '%s=all' % device_utils.DeviceUtils.JAVA_ASSERT_PROPERTY)
+ local_props.append('debug.checkjni=1')
+ try:
+ device.WriteFile(
+ device.LOCAL_PROPERTIES_PATH,
+ '\n'.join(local_props), as_root=True)
+ # Android will not respect the local props file if it is world writable.
+ device.RunShellCommand(
+ ['chmod', '644', device.LOCAL_PROPERTIES_PATH],
+ as_root=True, check_return=True)
+ except device_errors.CommandFailedError:
+ logging.exception('Failed to configure local properties.')
+
+
+def FinishProvisioning(device, options):
+ # The lockscreen can't be disabled on user builds, so send a keyevent
+ # to unlock it.
+ if device.IsUserBuild():
+ device.SendKeyEvent(keyevent.KEYCODE_MENU)
+
+ if options.min_battery_level is not None:
+ battery = battery_utils.BatteryUtils(device)
+ try:
+ battery.ChargeDeviceToLevel(options.min_battery_level)
+ except device_errors.DeviceChargingError:
+ device.Reboot()
+ battery.ChargeDeviceToLevel(options.min_battery_level)
+
+ if options.max_battery_temp is not None:
+ try:
+ battery = battery_utils.BatteryUtils(device)
+ battery.LetBatteryCoolToTemperature(options.max_battery_temp)
+ except device_errors.CommandFailedError:
+ logging.exception('Unable to let battery cool to specified temperature.')
+
+ def _set_and_verify_date():
+ if device.build_version_sdk >= version_codes.MARSHMALLOW:
+ date_format = '%m%d%H%M%Y.%S'
+ set_date_command = ['date', '-u']
+ get_date_command = ['date', '-u']
+ else:
+ date_format = '%Y%m%d.%H%M%S'
+ set_date_command = ['date', '-s']
+ get_date_command = ['date']
+
+ # TODO(jbudorick): This is wrong on pre-M devices -- get/set are
+ # dealing in local time, but we're setting based on GMT.
+ strgmtime = time.strftime(date_format, time.gmtime())
+ set_date_command.append(strgmtime)
+ device.RunShellCommand(set_date_command, as_root=True, check_return=True)
+
+ get_date_command.append('+"%Y%m%d.%H%M%S"')
+ device_time = device.RunShellCommand(
+ get_date_command, as_root=True, single_line=True).replace('"', '')
+ device_time = datetime.datetime.strptime(device_time, "%Y%m%d.%H%M%S")
+ correct_time = datetime.datetime.strptime(strgmtime, date_format)
+ tdelta = (correct_time - device_time).seconds
+ if tdelta <= 1:
+ logging.info('Date/time successfully set on %s', device)
+ return True
+ else:
+ logging.error('Date mismatch. Device: %s Correct: %s',
+ device_time.isoformat(), correct_time.isoformat())
+ return False
+
+ # Sometimes the date is not set correctly on the devices. Retry on failure.
+ if device.IsUserBuild():
+ # TODO(bpastene): Figure out how to set the date & time on user builds.
+ pass
+ else:
+ if not timeout_retry.WaitFor(
+ _set_and_verify_date, wait_period=1, max_tries=2):
+ raise device_errors.CommandFailedError(
+ 'Failed to set date & time.', device_serial=str(device))
+
+ props = device.RunShellCommand('getprop', check_return=True)
+ for prop in props:
+ logging.info(' %s', prop)
+ if options.auto_reconnect:
+ _PushAndLaunchAdbReboot(device, options.target)
+
+
+def _UninstallIfMatch(device, pattern, app_to_keep):
+ installed_packages = device.RunShellCommand(['pm', 'list', 'packages'])
+ installed_system_packages = [
+ pkg.split(':')[1] for pkg in device.RunShellCommand(['pm', 'list',
+ 'packages', '-s'])]
+ for package_output in installed_packages:
+ package = package_output.split(":")[1]
+ if pattern.match(package) and not package == app_to_keep:
+ if not device.IsUserBuild() or package not in installed_system_packages:
+ device.Uninstall(package)
+
+
+def _WipeUnderDirIfMatch(device, path, pattern):
+ for filename in device.ListDirectory(path):
+ if pattern.match(filename):
+ _WipeFileOrDir(device, posixpath.join(path, filename))
+
+
+def _WipeFileOrDir(device, path):
+ if device.PathExists(path):
+ device.RunShellCommand(['rm', '-rf', path], check_return=True)
+
+
+def _PushAndLaunchAdbReboot(device, target):
+ """Pushes and launches the adb_reboot binary on the device.
+
+ Arguments:
+ device: The DeviceUtils instance for the device to which the adb_reboot
+ binary should be pushed.
+ target: The build target (example, Debug or Release) which helps in
+ locating the adb_reboot binary.
+ """
+ logging.info('Will push and launch adb_reboot on %s', str(device))
+ # Kill if adb_reboot is already running.
+ device.KillAll('adb_reboot', blocking=True, timeout=2, quiet=True)
+ # Push adb_reboot
+ logging.info(' Pushing adb_reboot ...')
+ adb_reboot = os.path.join(host_paths.DIR_SOURCE_ROOT,
+ 'out/%s/adb_reboot' % target)
+ device.PushChangedFiles([(adb_reboot, '/data/local/tmp/')])
+ # Launch adb_reboot
+ logging.info(' Launching adb_reboot ...')
+ device.RunShellCommand(
+ ['/data/local/tmp/adb_reboot'],
+ check_return=True)
+
+
+def _LaunchHostHeartbeat():
+ # Kill if existing host_heartbeat
+ KillHostHeartbeat()
+ # Launch a new host_heartbeat
+ logging.info('Spawning host heartbeat...')
+ subprocess.Popen([os.path.join(host_paths.DIR_SOURCE_ROOT,
+ 'build/android/host_heartbeat.py')])
+
+def KillHostHeartbeat():
+ ps = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
+ stdout, _ = ps.communicate()
+ matches = re.findall('\\n.*host_heartbeat.*', stdout)
+ for match in matches:
+ logging.info('An instance of host heart beart running... will kill')
+ pid = re.findall(r'(\S+)', match)[1]
+ subprocess.call(['kill', str(pid)])
+
+def main():
+ # Recommended options on perf bots:
+ # --disable-network
+ # TODO(tonyg): We eventually want network on. However, currently radios
+ # can cause perfbots to drain faster than they charge.
+ # --min-battery-level 95
+ # Some perf bots run benchmarks with USB charging disabled which leads
+ # to gradual draining of the battery. We must wait for a full charge
+ # before starting a run in order to keep the devices online.
+
+ parser = argparse.ArgumentParser(
+ description='Provision Android devices with settings required for bots.')
+ parser.add_argument('-d', '--device', metavar='SERIAL',
+ help='the serial number of the device to be provisioned'
+ ' (the default is to provision all devices attached)')
+ parser.add_argument('--adb-path',
+ help='Absolute path to the adb binary to use.')
+ parser.add_argument('--blacklist-file', help='Device blacklist JSON file.')
+ parser.add_argument('--phase', action='append', choices=_PHASES.ALL,
+ dest='phases',
+ help='Phases of provisioning to run. '
+ '(If omitted, all phases will be run.)')
+ parser.add_argument('--skip-wipe', action='store_true', default=False,
+ help="don't wipe device data during provisioning")
+ parser.add_argument('--reboot-timeout', metavar='SECS', type=int,
+ help='when wiping the device, max number of seconds to'
+ ' wait after each reboot '
+ '(default: %s)' % _DEFAULT_TIMEOUTS.HELP_TEXT)
+ parser.add_argument('--min-battery-level', type=int, metavar='NUM',
+ help='wait for the device to reach this minimum battery'
+ ' level before trying to continue')
+ parser.add_argument('--disable-location', action='store_true',
+ help='disable Google location services on devices')
+ parser.add_argument('--disable-mock-location', action='store_true',
+ default=False, help='Set ALLOW_MOCK_LOCATION to false')
+ parser.add_argument('--disable-network', action='store_true',
+ help='disable network access on devices')
+ parser.add_argument('--disable-java-debug', action='store_false',
+ dest='enable_java_debug', default=True,
+ help='disable Java property asserts and JNI checking')
+ parser.add_argument('--disable-system-chrome', action='store_true',
+ help='Disable the system chrome from devices.')
+ parser.add_argument('--remove-system-webview', action='store_true',
+ help='Remove the system webview from devices.')
+ parser.add_argument('-t', '--target', default='Debug',
+ help='the build target (default: %(default)s)')
+ parser.add_argument('-r', '--auto-reconnect', action='store_true',
+ help='push binary which will reboot the device on adb'
+ ' disconnections')
+ parser.add_argument('--adb-key-files', type=str, nargs='+',
+ help='list of adb keys to push to device')
+ parser.add_argument('-v', '--verbose', action='count', default=1,
+ help='Log more information.')
+ parser.add_argument('--max-battery-temp', type=int, metavar='NUM',
+ help='Wait for the battery to have this temp or lower.')
+ parser.add_argument('--output-device-blacklist',
+ help='Json file to output the device blacklist.')
+ parser.add_argument('--chrome-specific-wipe', action='store_true',
+ help='only wipe chrome specific data during provisioning')
+ parser.add_argument('--emulators', action='store_true',
+ help='provision only emulators and ignore usb devices')
+ args = parser.parse_args()
+ constants.SetBuildType(args.target)
+
+ run_tests_helper.SetLogLevel(args.verbose)
+
+ devil_chromium.Initialize(adb_path=args.adb_path)
+
+ try:
+ return ProvisionDevices(args)
+ except (device_errors.DeviceUnreachableError, device_errors.NoDevicesError):
+ logging.exception('Unable to provision local devices.')
+ return exit_codes.INFRA
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/pylib/OWNERS b/deps/v8/build/android/pylib/OWNERS
new file mode 100644
index 0000000000..f008c99765
--- /dev/null
+++ b/deps/v8/build/android/pylib/OWNERS
@@ -0,0 +1,6 @@
+jbudorick@chromium.org
+klundberg@chromium.org
+navabi@chromium.org
+skyostil@chromium.org
+
+# COMPONENT: Test>Android
diff --git a/deps/v8/build/android/pylib/__init__.py b/deps/v8/build/android/pylib/__init__.py
new file mode 100644
index 0000000000..b93eb4fe0b
--- /dev/null
+++ b/deps/v8/build/android/pylib/__init__.py
@@ -0,0 +1,31 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+
+_CATAPULT_PATH = os.path.abspath(os.path.join(
+ os.path.dirname(__file__), '..', '..', '..', 'third_party', 'catapult'))
+
+_DEVIL_PATH = os.path.join(_CATAPULT_PATH, 'devil')
+
+_PYTRACE_PATH = os.path.join(_CATAPULT_PATH, 'common', 'py_trace_event')
+
+_PY_UTILS_PATH = os.path.join(_CATAPULT_PATH, 'common', 'py_utils')
+
+_TRACE2HTML_PATH = os.path.join(_CATAPULT_PATH, 'tracing')
+
+
+if _DEVIL_PATH not in sys.path:
+ sys.path.append(_DEVIL_PATH)
+
+if _PYTRACE_PATH not in sys.path:
+ sys.path.append(_PYTRACE_PATH)
+
+if _PY_UTILS_PATH not in sys.path:
+ sys.path.append(_PY_UTILS_PATH)
+
+if _TRACE2HTML_PATH not in sys.path:
+ sys.path.append(_TRACE2HTML_PATH)
diff --git a/deps/v8/build/android/pylib/android/__init__.py b/deps/v8/build/android/pylib/android/__init__.py
new file mode 100644
index 0000000000..a67c3501b2
--- /dev/null
+++ b/deps/v8/build/android/pylib/android/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/android/logcat_symbolizer.py b/deps/v8/build/android/pylib/android/logcat_symbolizer.py
new file mode 100644
index 0000000000..720629b989
--- /dev/null
+++ b/deps/v8/build/android/pylib/android/logcat_symbolizer.py
@@ -0,0 +1,98 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+
+from devil.android import logcat_monitor
+
+BACKTRACE_LINE_RE = re.compile(r'#\d+')
+THREADTIME_RE = re.compile(
+ logcat_monitor.LogcatMonitor.THREADTIME_RE_FORMAT % (
+ r' *\S* *', r' *\S* *', r' *\S* *', r' *\S* *', r'.*'))
+
+def SymbolizeLogcat(logcat, dest, symbolizer, abi):
+ """Symbolize stack trace in the logcat.
+
+ Symbolize the logcat and write the symbolized logcat to a new file.
+
+ Args:
+ logcat: Path to logcat file.
+ dest: Path to where to write the symbolized logcat.
+ symbolizer: The stack symbolizer to symbolize stack trace in logcat.
+ abi: The device's product_cpu_abi. Symbolizer needs it to symbolize.
+
+ A sample logcat that needs to be symbolized, after stripping the prefix,
+ such as '08-07 18:39:37.692 28649 28649 E Ion : ', would be:
+ Build fingerprint: 'google/shamu/shamu:7.1.1/NMF20B/3370:userdebug/dev-keys'
+ Revision: '0'
+ ABI: 'arm'
+ pid: 28936, tid: 28936, name: chromium.chrome >>> org.chromium.chrome <<<
+ signal 6 (SIGABRT), code -6 (SI_TKILL), fault addr --------
+ Abort message: '[FATAL:debug_urls.cc(151)] Check failed: false.
+ #00 0x63e16c41 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0006cc4
+ #01 0x63f19be3 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016fbe
+ #02 0x63f19737 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016f73
+ #03 0x63f18ddf /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016edd
+ #04 0x63f18b79 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016eb7
+ #05 0xab53f319 /system/lib/libart.so+0x000a3319
+ #06
+ r0 00000000 r1 00007108 r2 00000006 r3 00000008
+ r4 ae60258c r5 00000006 r6 ae602534 r7 0000010c
+ r8 bede5cd0 r9 00000030 sl 00000000 fp 9265a800
+ ip 0000000b sp bede5c38 lr ac8e5537 pc ac8e7da0 cpsr 600f0010
+
+ backtrace:
+ #00 pc 00049da0 /system/lib/libc.so (tgkill+12)
+ #01 pc 00047533 /system/lib/libc.so (pthread_kill+34)
+ #02 pc 0001d635 /system/lib/libc.so (raise+10)
+ #03 pc 00019181 /system/lib/libc.so (__libc_android_abort+34)
+ #04 pc 00017048 /system/lib/libc.so (abort+4)
+ #05 pc 00948605 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+ #06 pc 002c9f73 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+ #07 pc 003ccbe1 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+ #08 pc 003cc735 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+ #09 pc 003cbddf /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+ #10 pc 003cbb77 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+ """
+
+ with open(logcat) as logcat_file:
+ with open(dest, 'w') as dest_file:
+ # The current stack script will only print out the symbolized stack,
+ # and completely ignore logs other than the crash log that is used for
+ # symbolization, if any exists. Thus the code here extracts the
+ # crash log inside the logcat and pass only the crash log to the script,
+ # because we don't want to lose other information in the logcat that,
+ # if passed to the stack script, will just be ignored by it.
+ # TODO(crbug.com/755225): Rewrite the logic here.
+ outside_of_crash_log = True
+ in_lower_half_crash = False
+ data_to_symbolize = []
+
+ for line in logcat_file:
+ if outside_of_crash_log:
+ # Check whether it is the start of crash log.
+ if 'Build fingerprint: ' in line:
+ outside_of_crash_log = False
+ # Only include necessary information for symbolization.
+ # The logic here that removes date, time, proc_id etc.
+ # should be in sync with _THREADTIME_RE_FORMAT in logcat_monitor.
+ data_to_symbolize.append(
+ re.search(THREADTIME_RE, line).group(7))
+ else:
+ dest_file.write(line)
+ else:
+ # Once we have reached the end of the backtrace section,
+ # we will start symbolizing.
+ if in_lower_half_crash and not bool(BACKTRACE_LINE_RE.search(line)):
+ outside_of_crash_log = True
+ in_lower_half_crash = False
+ symbolized_lines = symbolizer.ExtractAndResolveNativeStackTraces(
+ data_to_symbolize, abi)
+ dest_file.write('\n'.join(symbolized_lines) + '\n' + line)
+ data_to_symbolize = []
+ else:
+ if not in_lower_half_crash and 'backtrace:' in line:
+ in_lower_half_crash = True
+ data_to_symbolize.append(
+ re.search(THREADTIME_RE, line).group(7))
diff --git a/deps/v8/build/android/pylib/base/__init__.py b/deps/v8/build/android/pylib/base/__init__.py
new file mode 100644
index 0000000000..96196cffb2
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/base/base_test_result.py b/deps/v8/build/android/pylib/base/base_test_result.py
new file mode 100644
index 0000000000..bb25a74186
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/base_test_result.py
@@ -0,0 +1,262 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing base test results classes."""
+
+import threading
+
+
+class ResultType(object):
+ """Class enumerating test types."""
+ # The test passed.
+ PASS = 'SUCCESS'
+
+ # The test was intentionally skipped.
+ SKIP = 'SKIPPED'
+
+ # The test failed.
+ FAIL = 'FAILURE'
+
+ # The test caused the containing process to crash.
+ CRASH = 'CRASH'
+
+ # The test timed out.
+ TIMEOUT = 'TIMEOUT'
+
+ # The test ran, but we couldn't determine what happened.
+ UNKNOWN = 'UNKNOWN'
+
+ # The test did not run.
+ NOTRUN = 'NOTRUN'
+
+ @staticmethod
+ def GetTypes():
+ """Get a list of all test types."""
+ return [ResultType.PASS, ResultType.SKIP, ResultType.FAIL,
+ ResultType.CRASH, ResultType.TIMEOUT, ResultType.UNKNOWN,
+ ResultType.NOTRUN]
+
+
+class BaseTestResult(object):
+ """Base class for a single test result."""
+
+ def __init__(self, name, test_type, duration=0, log=''):
+ """Construct a BaseTestResult.
+
+ Args:
+ name: Name of the test which defines uniqueness.
+ test_type: Type of the test result as defined in ResultType.
+ duration: Time it took for the test to run in milliseconds.
+ log: An optional string listing any errors.
+ """
+ assert name
+ assert test_type in ResultType.GetTypes()
+ self._name = name
+ self._test_type = test_type
+ self._duration = duration
+ self._log = log
+ self._links = {}
+
+ def __str__(self):
+ return self._name
+
+ def __repr__(self):
+ return self._name
+
+ def __cmp__(self, other):
+ # pylint: disable=W0212
+ return cmp(self._name, other._name)
+
+ def __hash__(self):
+ return hash(self._name)
+
+ def SetName(self, name):
+ """Set the test name.
+
+ Because we're putting this into a set, this should only be used if moving
+ this test result into another set.
+ """
+ self._name = name
+
+ def GetName(self):
+ """Get the test name."""
+ return self._name
+
+ def SetType(self, test_type):
+ """Set the test result type."""
+ assert test_type in ResultType.GetTypes()
+ self._test_type = test_type
+
+ def GetType(self):
+ """Get the test result type."""
+ return self._test_type
+
+ def GetDuration(self):
+ """Get the test duration."""
+ return self._duration
+
+ def SetLog(self, log):
+ """Set the test log."""
+ self._log = log
+
+ def GetLog(self):
+ """Get the test log."""
+ return self._log
+
+ def SetLink(self, name, link_url):
+ """Set link with test result data."""
+ self._links[name] = link_url
+
+ def GetLinks(self):
+ """Get dict containing links to test result data."""
+ return self._links
+
+
+class TestRunResults(object):
+ """Set of results for a test run."""
+
+ def __init__(self):
+ self._links = {}
+ self._results = set()
+ self._results_lock = threading.RLock()
+
+ def SetLink(self, name, link_url):
+ """Add link with test run results data."""
+ self._links[name] = link_url
+
+ def GetLinks(self):
+ """Get dict containing links to test run result data."""
+ return self._links
+
+ def GetLogs(self):
+ """Get the string representation of all test logs."""
+ with self._results_lock:
+ s = []
+ for test_type in ResultType.GetTypes():
+ if test_type != ResultType.PASS:
+ for t in sorted(self._GetType(test_type)):
+ log = t.GetLog()
+ if log:
+ s.append('[%s] %s:' % (test_type, t))
+ s.append(log)
+ return '\n'.join(s)
+
+ def GetGtestForm(self):
+ """Get the gtest string representation of this object."""
+ with self._results_lock:
+ s = []
+ plural = lambda n, s, p: '%d %s' % (n, p if n != 1 else s)
+ tests = lambda n: plural(n, 'test', 'tests')
+
+ s.append('[==========] %s ran.' % (tests(len(self.GetAll()))))
+ s.append('[ PASSED ] %s.' % (tests(len(self.GetPass()))))
+
+ skipped = self.GetSkip()
+ if skipped:
+ s.append('[ SKIPPED ] Skipped %s, listed below:' % tests(len(skipped)))
+ for t in sorted(skipped):
+ s.append('[ SKIPPED ] %s' % str(t))
+
+ all_failures = self.GetFail().union(self.GetCrash(), self.GetTimeout(),
+ self.GetUnknown())
+ if all_failures:
+ s.append('[ FAILED ] %s, listed below:' % tests(len(all_failures)))
+ for t in sorted(self.GetFail()):
+ s.append('[ FAILED ] %s' % str(t))
+ for t in sorted(self.GetCrash()):
+ s.append('[ FAILED ] %s (CRASHED)' % str(t))
+ for t in sorted(self.GetTimeout()):
+ s.append('[ FAILED ] %s (TIMEOUT)' % str(t))
+ for t in sorted(self.GetUnknown()):
+ s.append('[ FAILED ] %s (UNKNOWN)' % str(t))
+ s.append('')
+ s.append(plural(len(all_failures), 'FAILED TEST', 'FAILED TESTS'))
+ return '\n'.join(s)
+
+ def GetShortForm(self):
+ """Get the short string representation of this object."""
+ with self._results_lock:
+ s = []
+ s.append('ALL: %d' % len(self._results))
+ for test_type in ResultType.GetTypes():
+ s.append('%s: %d' % (test_type, len(self._GetType(test_type))))
+ return ''.join([x.ljust(15) for x in s])
+
+ def __str__(self):
+ return self.GetGtestForm()
+
+ def AddResult(self, result):
+ """Add |result| to the set.
+
+ Args:
+ result: An instance of BaseTestResult.
+ """
+ assert isinstance(result, BaseTestResult)
+ with self._results_lock:
+ self._results.discard(result)
+ self._results.add(result)
+
+ def AddResults(self, results):
+ """Add |results| to the set.
+
+ Args:
+ results: An iterable of BaseTestResult objects.
+ """
+ with self._results_lock:
+ for t in results:
+ self.AddResult(t)
+
+ def AddTestRunResults(self, results):
+ """Add the set of test results from |results|.
+
+ Args:
+ results: An instance of TestRunResults.
+ """
+ assert isinstance(results, TestRunResults), (
+ 'Expected TestRunResult object: %s' % type(results))
+ with self._results_lock:
+ # pylint: disable=W0212
+ self._results.update(results._results)
+
+ def GetAll(self):
+ """Get the set of all test results."""
+ with self._results_lock:
+ return self._results.copy()
+
+ def _GetType(self, test_type):
+ """Get the set of test results with the given test type."""
+ with self._results_lock:
+ return set(t for t in self._results if t.GetType() == test_type)
+
+ def GetPass(self):
+ """Get the set of all passed test results."""
+ return self._GetType(ResultType.PASS)
+
+ def GetSkip(self):
+ """Get the set of all skipped test results."""
+ return self._GetType(ResultType.SKIP)
+
+ def GetFail(self):
+ """Get the set of all failed test results."""
+ return self._GetType(ResultType.FAIL)
+
+ def GetCrash(self):
+ """Get the set of all crashed test results."""
+ return self._GetType(ResultType.CRASH)
+
+ def GetTimeout(self):
+ """Get the set of all timed out test results."""
+ return self._GetType(ResultType.TIMEOUT)
+
+ def GetUnknown(self):
+ """Get the set of all unknown test results."""
+ return self._GetType(ResultType.UNKNOWN)
+
+ def GetNotPass(self):
+ """Get the set of all non-passed test results."""
+ return self.GetAll() - self.GetPass()
+
+ def DidRunPass(self):
+ """Return whether the test run was successful."""
+ return not self.GetNotPass() - self.GetSkip()
diff --git a/deps/v8/build/android/pylib/base/base_test_result_unittest.py b/deps/v8/build/android/pylib/base/base_test_result_unittest.py
new file mode 100644
index 0000000000..6f0cba7726
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/base_test_result_unittest.py
@@ -0,0 +1,82 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for TestRunResults."""
+
+import unittest
+
+from pylib.base.base_test_result import BaseTestResult
+from pylib.base.base_test_result import TestRunResults
+from pylib.base.base_test_result import ResultType
+
+
+class TestTestRunResults(unittest.TestCase):
+ def setUp(self):
+ self.p1 = BaseTestResult('p1', ResultType.PASS, log='pass1')
+ other_p1 = BaseTestResult('p1', ResultType.PASS)
+ self.p2 = BaseTestResult('p2', ResultType.PASS)
+ self.f1 = BaseTestResult('f1', ResultType.FAIL, log='failure1')
+ self.c1 = BaseTestResult('c1', ResultType.CRASH, log='crash1')
+ self.u1 = BaseTestResult('u1', ResultType.UNKNOWN)
+ self.tr = TestRunResults()
+ self.tr.AddResult(self.p1)
+ self.tr.AddResult(other_p1)
+ self.tr.AddResult(self.p2)
+ self.tr.AddResults(set([self.f1, self.c1, self.u1]))
+
+ def testGetAll(self):
+ self.assertFalse(
+ self.tr.GetAll().symmetric_difference(
+ [self.p1, self.p2, self.f1, self.c1, self.u1]))
+
+ def testGetPass(self):
+ self.assertFalse(self.tr.GetPass().symmetric_difference(
+ [self.p1, self.p2]))
+
+ def testGetNotPass(self):
+ self.assertFalse(self.tr.GetNotPass().symmetric_difference(
+ [self.f1, self.c1, self.u1]))
+
+ def testGetAddTestRunResults(self):
+ tr2 = TestRunResults()
+ other_p1 = BaseTestResult('p1', ResultType.PASS)
+ f2 = BaseTestResult('f2', ResultType.FAIL)
+ tr2.AddResult(other_p1)
+ tr2.AddResult(f2)
+ tr2.AddTestRunResults(self.tr)
+ self.assertFalse(
+ tr2.GetAll().symmetric_difference(
+ [self.p1, self.p2, self.f1, self.c1, self.u1, f2]))
+
+ def testGetLogs(self):
+ log_print = ('[FAIL] f1:\n'
+ 'failure1\n'
+ '[CRASH] c1:\n'
+ 'crash1')
+ self.assertEqual(self.tr.GetLogs(), log_print)
+
+ def testGetShortForm(self):
+ short_print = ('ALL: 5 PASS: 2 FAIL: 1 '
+ 'CRASH: 1 TIMEOUT: 0 UNKNOWN: 1 ')
+ self.assertEqual(self.tr.GetShortForm(), short_print)
+
+ def testGetGtestForm(self):
+ gtest_print = ('[==========] 5 tests ran.\n'
+ '[ PASSED ] 2 tests.\n'
+ '[ FAILED ] 3 tests, listed below:\n'
+ '[ FAILED ] f1\n'
+ '[ FAILED ] c1 (CRASHED)\n'
+ '[ FAILED ] u1 (UNKNOWN)\n'
+ '\n'
+ '3 FAILED TESTS')
+ self.assertEqual(gtest_print, self.tr.GetGtestForm())
+
+ def testRunPassed(self):
+ self.assertFalse(self.tr.DidRunPass())
+ tr2 = TestRunResults()
+ self.assertTrue(tr2.DidRunPass())
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/base/environment.py b/deps/v8/build/android/pylib/base/environment.py
new file mode 100644
index 0000000000..744c392c1b
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/environment.py
@@ -0,0 +1,49 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class Environment(object):
+ """An environment in which tests can be run.
+
+ This is expected to handle all logic that is applicable to an entire specific
+ environment but is independent of the test type.
+
+ Examples include:
+ - The local device environment, for running tests on devices attached to
+ the local machine.
+ - The local machine environment, for running tests directly on the local
+ machine.
+ """
+
+ def __init__(self, output_manager):
+ """Environment constructor.
+
+ Args:
+ output_manager: Instance of |output_manager.OutputManager| used to
+ save test output.
+ """
+ self._output_manager = output_manager
+
+ # Some subclasses have different teardown behavior on receiving SIGTERM.
+ self._received_sigterm = False
+
+ def SetUp(self):
+ raise NotImplementedError
+
+ def TearDown(self):
+ raise NotImplementedError
+
+ def __enter__(self):
+ self.SetUp()
+ return self
+
+ def __exit__(self, _exc_type, _exc_val, _exc_tb):
+ self.TearDown()
+
+ @property
+ def output_manager(self):
+ return self._output_manager
+
+ def ReceivedSigterm(self):
+ self._received_sigterm = True
diff --git a/deps/v8/build/android/pylib/base/environment_factory.py b/deps/v8/build/android/pylib/base/environment_factory.py
new file mode 100644
index 0000000000..fdca803eff
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/environment_factory.py
@@ -0,0 +1,19 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import constants
+from pylib.local.device import local_device_environment
+from pylib.local.machine import local_machine_environment
+
+def CreateEnvironment(args, output_manager, error_func):
+
+ if args.environment == 'local':
+ if args.command not in constants.LOCAL_MACHINE_TESTS:
+ return local_device_environment.LocalDeviceEnvironment(
+ args, output_manager, error_func)
+ else:
+ return local_machine_environment.LocalMachineEnvironment(
+ args, output_manager, error_func)
+
+ error_func('Unable to create %s environment.' % args.environment)
diff --git a/deps/v8/build/android/pylib/base/mock_environment.py b/deps/v8/build/android/pylib/base/mock_environment.py
new file mode 100644
index 0000000000..9ebb083a08
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/mock_environment.py
@@ -0,0 +1,12 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import environment
+from pylib.constants import host_paths
+
+with host_paths.SysPath(host_paths.PYMOCK_PATH):
+ import mock # pylint: disable=import-error
+
+
+MockEnvironment = mock.MagicMock(environment.Environment)
diff --git a/deps/v8/build/android/pylib/base/mock_test_instance.py b/deps/v8/build/android/pylib/base/mock_test_instance.py
new file mode 100644
index 0000000000..18def01990
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/mock_test_instance.py
@@ -0,0 +1,12 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import test_instance
+from pylib.constants import host_paths
+
+with host_paths.SysPath(host_paths.PYMOCK_PATH):
+ import mock # pylint: disable=import-error
+
+
+MockTestInstance = mock.MagicMock(test_instance.TestInstance)
diff --git a/deps/v8/build/android/pylib/base/output_manager.py b/deps/v8/build/android/pylib/base/output_manager.py
new file mode 100644
index 0000000000..60b8123b8d
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/output_manager.py
@@ -0,0 +1,158 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import logging
+import os
+import tempfile
+
+from devil.utils import reraiser_thread
+
+
+class Datatype(object):
+ HTML = 'text/html'
+ JSON = 'application/json'
+ PNG = 'image/png'
+ TEXT = 'text/plain'
+
+
+class OutputManager(object):
+
+ def __init__(self):
+ """OutputManager Constructor.
+
+ This class provides a simple interface to save test output. Subclasses
+ of this will allow users to save test results in the cloud or locally.
+ """
+ self._allow_upload = False
+ self._thread_group = None
+
+ @contextlib.contextmanager
+ def ArchivedTempfile(
+ self, out_filename, out_subdir, datatype=Datatype.TEXT):
+ """Archive file contents asynchonously and then deletes file.
+
+ Args:
+ out_filename: Name for saved file.
+ out_subdir: Directory to save |out_filename| to.
+ datatype: Datatype of file.
+
+ Returns:
+ An ArchivedFile file. This file will be uploaded async when the context
+ manager exits. AFTER the context manager exits, you can get the link to
+ where the file will be stored using the Link() API. You can use typical
+ file APIs to write and flish the ArchivedFile. You can also use file.name
+ to get the local filepath to where the underlying file exists. If you do
+ this, you are responsible of flushing the file before exiting the context
+ manager.
+ """
+ if not self._allow_upload:
+ raise Exception('Must run |SetUp| before attempting to upload!')
+
+ f = self._CreateArchivedFile(out_filename, out_subdir, datatype)
+ try:
+ yield f
+ finally:
+ f.PrepareArchive()
+
+ def archive():
+ try:
+ f.Archive()
+ finally:
+ f.Delete()
+
+ thread = reraiser_thread.ReraiserThread(func=archive)
+ thread.start()
+ self._thread_group.Add(thread)
+
+ def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
+ """Returns an instance of ArchivedFile."""
+ raise NotImplementedError
+
+ def SetUp(self):
+ self._allow_upload = True
+ self._thread_group = reraiser_thread.ReraiserThreadGroup()
+
+ def TearDown(self):
+ self._allow_upload = False
+ logging.info('Finishing archiving output.')
+ self._thread_group.JoinAll()
+
+ def __enter__(self):
+ self.SetUp()
+ return self
+
+ def __exit__(self, _exc_type, _exc_val, _exc_tb):
+ self.TearDown()
+
+
+class ArchivedFile(object):
+
+ def __init__(self, out_filename, out_subdir, datatype):
+ self._out_filename = out_filename
+ self._out_subdir = out_subdir
+ self._datatype = datatype
+
+ self._f = tempfile.NamedTemporaryFile(delete=False)
+ self._ready_to_archive = False
+
+ @property
+ def name(self):
+ return self._f.name
+
+ def write(self, *args, **kwargs):
+ if self._ready_to_archive:
+ raise Exception('Cannot write to file after archiving has begun!')
+ self._f.write(*args, **kwargs)
+
+ def flush(self, *args, **kwargs):
+ if self._ready_to_archive:
+ raise Exception('Cannot flush file after archiving has begun!')
+ self._f.flush(*args, **kwargs)
+
+ def Link(self):
+ """Returns location of archived file."""
+ if not self._ready_to_archive:
+ raise Exception('Cannot get link to archived file before archiving '
+ 'has begun')
+ return self._Link()
+
+ def _Link(self):
+ """Note for when overriding this function.
+
+ This function will certainly be called before the file
+ has finished being archived. Therefore, this needs to be able to know the
+ exact location of the archived file before it is finished being archived.
+ """
+ raise NotImplementedError
+
+ def PrepareArchive(self):
+ """Meant to be called synchronously to prepare file for async archiving."""
+ self.flush()
+ self._ready_to_archive = True
+ self._PrepareArchive()
+
+ def _PrepareArchive(self):
+ """Note for when overriding this function.
+
+ This function is needed for things such as computing the location of
+ content addressed files. This is called after the file is written but
+ before archiving has begun.
+ """
+ pass
+
+ def Archive(self):
+ """Archives file."""
+ if not self._ready_to_archive:
+ raise Exception('File is not ready to archive. Be sure you are not '
+ 'writing to the file and PrepareArchive has been called')
+ self._Archive()
+
+ def _Archive(self):
+ raise NotImplementedError
+
+ def Delete(self):
+ """Deletes the backing file."""
+ self._f.close()
+ os.remove(self.name)
diff --git a/deps/v8/build/android/pylib/base/output_manager_factory.py b/deps/v8/build/android/pylib/base/output_manager_factory.py
new file mode 100644
index 0000000000..7a644bcf8a
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/output_manager_factory.py
@@ -0,0 +1,16 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import constants
+from pylib.output import local_output_manager
+from pylib.output import remote_output_manager
+
+
+def CreateOutputManager(args):
+ if args.local_output:
+ return local_output_manager.LocalOutputManager(
+ output_dir=constants.GetOutDirectory())
+ else:
+ return remote_output_manager.RemoteOutputManager(
+ bucket=args.gs_results_bucket)
diff --git a/deps/v8/build/android/pylib/base/output_manager_test_case.py b/deps/v8/build/android/pylib/base/output_manager_test_case.py
new file mode 100644
index 0000000000..1e4cd7ef68
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/output_manager_test_case.py
@@ -0,0 +1,14 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os.path
+import unittest
+
+
+class OutputManagerTestCase(unittest.TestCase):
+
+ def assertUsableTempFile(self, archived_tempfile):
+ self.assertTrue(bool(archived_tempfile.name))
+ self.assertTrue(os.path.exists(archived_tempfile.name))
+ self.assertTrue(os.path.isfile(archived_tempfile.name))
diff --git a/deps/v8/build/android/pylib/base/test_collection.py b/deps/v8/build/android/pylib/base/test_collection.py
new file mode 100644
index 0000000000..de510272bd
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/test_collection.py
@@ -0,0 +1,80 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import threading
+
+class TestCollection(object):
+ """A threadsafe collection of tests.
+
+ Args:
+ tests: List of tests to put in the collection.
+ """
+
+ def __init__(self, tests=None):
+ if not tests:
+ tests = []
+ self._lock = threading.Lock()
+ self._tests = []
+ self._tests_in_progress = 0
+ # Used to signal that an item is available or all items have been handled.
+ self._item_available_or_all_done = threading.Event()
+ for t in tests:
+ self.add(t)
+
+ def _pop(self):
+ """Pop a test from the collection.
+
+ Waits until a test is available or all tests have been handled.
+
+ Returns:
+ A test or None if all tests have been handled.
+ """
+ while True:
+ # Wait for a test to be available or all tests to have been handled.
+ self._item_available_or_all_done.wait()
+ with self._lock:
+ # Check which of the two conditions triggered the signal.
+ if self._tests_in_progress == 0:
+ return None
+ try:
+ return self._tests.pop(0)
+ except IndexError:
+ # Another thread beat us to the available test, wait again.
+ self._item_available_or_all_done.clear()
+
+ def add(self, test):
+ """Add a test to the collection.
+
+ Args:
+ test: A test to add.
+ """
+ with self._lock:
+ self._tests.append(test)
+ self._item_available_or_all_done.set()
+ self._tests_in_progress += 1
+
+ def test_completed(self):
+ """Indicate that a test has been fully handled."""
+ with self._lock:
+ self._tests_in_progress -= 1
+ if self._tests_in_progress == 0:
+ # All tests have been handled, signal all waiting threads.
+ self._item_available_or_all_done.set()
+
+ def __iter__(self):
+ """Iterate through tests in the collection until all have been handled."""
+ while True:
+ r = self._pop()
+ if r is None:
+ break
+ yield r
+
+ def __len__(self):
+ """Return the number of tests currently in the collection."""
+ return len(self._tests)
+
+ def test_names(self):
+ """Return a list of the names of the tests currently in the collection."""
+ with self._lock:
+ return list(t.test for t in self._tests)
diff --git a/deps/v8/build/android/pylib/base/test_exception.py b/deps/v8/build/android/pylib/base/test_exception.py
new file mode 100644
index 0000000000..c98d2cb73e
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/test_exception.py
@@ -0,0 +1,8 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class TestException(Exception):
+ """Base class for exceptions thrown by the test runner."""
+ pass
diff --git a/deps/v8/build/android/pylib/base/test_instance.py b/deps/v8/build/android/pylib/base/test_instance.py
new file mode 100644
index 0000000000..7b1099cffa
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/test_instance.py
@@ -0,0 +1,40 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class TestInstance(object):
+ """A type of test.
+
+ This is expected to handle all logic that is test-type specific but
+ independent of the environment or device.
+
+ Examples include:
+ - gtests
+ - instrumentation tests
+ """
+
+ def __init__(self):
+ pass
+
+ def TestType(self):
+ raise NotImplementedError
+
+ # pylint: disable=no-self-use
+ def GetPreferredAbis(self):
+ return None
+
+ # pylint: enable=no-self-use
+
+ def SetUp(self):
+ raise NotImplementedError
+
+ def TearDown(self):
+ raise NotImplementedError
+
+ def __enter__(self):
+ self.SetUp()
+ return self
+
+ def __exit__(self, _exc_type, _exc_val, _exc_tb):
+ self.TearDown()
diff --git a/deps/v8/build/android/pylib/base/test_instance_factory.py b/deps/v8/build/android/pylib/base/test_instance_factory.py
new file mode 100644
index 0000000000..7c21260161
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/test_instance_factory.py
@@ -0,0 +1,31 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.gtest import gtest_test_instance
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.junit import junit_test_instance
+from pylib.linker import linker_test_instance
+from pylib.monkey import monkey_test_instance
+from pylib.perf import perf_test_instance
+from pylib.utils import device_dependencies
+
+
+def CreateTestInstance(args, error_func):
+
+ if args.command == 'gtest':
+ return gtest_test_instance.GtestTestInstance(
+ args, device_dependencies.GetDataDependencies, error_func)
+ elif args.command == 'instrumentation':
+ return instrumentation_test_instance.InstrumentationTestInstance(
+ args, device_dependencies.GetDataDependencies, error_func)
+ elif args.command == 'junit':
+ return junit_test_instance.JunitTestInstance(args, error_func)
+ elif args.command == 'linker':
+ return linker_test_instance.LinkerTestInstance(args)
+ elif args.command == 'monkey':
+ return monkey_test_instance.MonkeyTestInstance(args, error_func)
+ elif args.command == 'perf':
+ return perf_test_instance.PerfTestInstance(args, error_func)
+
+ error_func('Unable to create %s test instance.' % args.command)
diff --git a/deps/v8/build/android/pylib/base/test_run.py b/deps/v8/build/android/pylib/base/test_run.py
new file mode 100644
index 0000000000..fc72d3a547
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/test_run.py
@@ -0,0 +1,50 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class TestRun(object):
+ """An execution of a particular test on a particular device.
+
+ This is expected to handle all logic that is specific to the combination of
+ environment and test type.
+
+ Examples include:
+ - local gtests
+ - local instrumentation tests
+ """
+
+ def __init__(self, env, test_instance):
+ self._env = env
+ self._test_instance = test_instance
+
+ # Some subclasses have different teardown behavior on receiving SIGTERM.
+ self._received_sigterm = False
+
+ def TestPackage(self):
+ raise NotImplementedError
+
+ def SetUp(self):
+ raise NotImplementedError
+
+ def RunTests(self, results):
+ """Runs Tests and populates |results|.
+
+ Args:
+ results: An array that should be populated with
+ |base_test_result.TestRunResults| objects.
+ """
+ raise NotImplementedError
+
+ def TearDown(self):
+ raise NotImplementedError
+
+ def __enter__(self):
+ self.SetUp()
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.TearDown()
+
+ def ReceivedSigterm(self):
+ self._received_sigterm = True
diff --git a/deps/v8/build/android/pylib/base/test_run_factory.py b/deps/v8/build/android/pylib/base/test_run_factory.py
new file mode 100644
index 0000000000..1f63a059c9
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/test_run_factory.py
@@ -0,0 +1,56 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.gtest import gtest_test_instance
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.junit import junit_test_instance
+from pylib.linker import linker_test_instance
+from pylib.monkey import monkey_test_instance
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_gtest_run
+from pylib.local.device import local_device_instrumentation_test_run
+from pylib.local.device import local_device_linker_test_run
+from pylib.local.device import local_device_monkey_test_run
+from pylib.local.device import local_device_perf_test_run
+from pylib.local.machine import local_machine_environment
+from pylib.local.machine import local_machine_junit_test_run
+from pylib.perf import perf_test_instance
+
+
+def _CreatePerfTestRun(args, env, test_instance):
+ if args.print_step:
+ return local_device_perf_test_run.PrintStep(
+ env, test_instance)
+ elif args.output_json_list:
+ return local_device_perf_test_run.OutputJsonList(
+ env, test_instance)
+ return local_device_perf_test_run.LocalDevicePerfTestRun(
+ env, test_instance)
+
+
+def CreateTestRun(args, env, test_instance, error_func):
+ if isinstance(env, local_device_environment.LocalDeviceEnvironment):
+ if isinstance(test_instance, gtest_test_instance.GtestTestInstance):
+ return local_device_gtest_run.LocalDeviceGtestRun(env, test_instance)
+ if isinstance(test_instance,
+ instrumentation_test_instance.InstrumentationTestInstance):
+ return (local_device_instrumentation_test_run
+ .LocalDeviceInstrumentationTestRun(env, test_instance))
+ if isinstance(test_instance, linker_test_instance.LinkerTestInstance):
+ return (local_device_linker_test_run
+ .LocalDeviceLinkerTestRun(env, test_instance))
+ if isinstance(test_instance, monkey_test_instance.MonkeyTestInstance):
+ return (local_device_monkey_test_run
+ .LocalDeviceMonkeyTestRun(env, test_instance))
+ if isinstance(test_instance,
+ perf_test_instance.PerfTestInstance):
+ return _CreatePerfTestRun(args, env, test_instance)
+
+ if isinstance(env, local_machine_environment.LocalMachineEnvironment):
+ if isinstance(test_instance, junit_test_instance.JunitTestInstance):
+ return (local_machine_junit_test_run
+ .LocalMachineJunitTestRun(env, test_instance))
+
+ error_func('Unable to create test run for %s tests in %s environment'
+ % (str(test_instance), str(env)))
diff --git a/deps/v8/build/android/pylib/base/test_server.py b/deps/v8/build/android/pylib/base/test_server.py
new file mode 100644
index 0000000000..763e1212c3
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/test_server.py
@@ -0,0 +1,18 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+class TestServer(object):
+ """Base class for any server that needs to be set up for the tests."""
+
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def SetUp(self):
+ raise NotImplementedError
+
+ def Reset(self):
+ raise NotImplementedError
+
+ def TearDown(self):
+ raise NotImplementedError
diff --git a/deps/v8/build/android/pylib/constants/__init__.py b/deps/v8/build/android/pylib/constants/__init__.py
new file mode 100644
index 0000000000..901a942482
--- /dev/null
+++ b/deps/v8/build/android/pylib/constants/__init__.py
@@ -0,0 +1,274 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines a set of constants shared by test runners and other scripts."""
+
+# TODO(jbudorick): Split these constants into coherent modules.
+
+# pylint: disable=W0212
+
+import collections
+import glob
+import logging
+import os
+import subprocess
+
+import devil.android.sdk.keyevent
+from devil.android.constants import chrome
+from devil.android.sdk import version_codes
+from devil.constants import exit_codes
+
+
+keyevent = devil.android.sdk.keyevent
+
+
+DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT',
+ os.path.abspath(os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir, os.pardir, os.pardir)))
+
+PACKAGE_INFO = dict(chrome.PACKAGE_INFO)
+PACKAGE_INFO.update({
+ 'legacy_browser': chrome.PackageInfo(
+ 'com.google.android.browser',
+ 'com.android.browser.BrowserActivity',
+ None,
+ None),
+ 'chromecast_shell': chrome.PackageInfo(
+ 'com.google.android.apps.mediashell',
+ 'com.google.android.apps.mediashell.MediaShellActivity',
+ 'castshell-command-line',
+ None),
+ 'android_webview_shell': chrome.PackageInfo(
+ 'org.chromium.android_webview.shell',
+ 'org.chromium.android_webview.shell.AwShellActivity',
+ 'android-webview-command-line',
+ None),
+ 'gtest': chrome.PackageInfo(
+ 'org.chromium.native_test',
+ 'org.chromium.native_test.NativeUnitTestActivity',
+ 'chrome-native-tests-command-line',
+ None),
+ 'components_browsertests': chrome.PackageInfo(
+ 'org.chromium.components_browsertests_apk',
+ ('org.chromium.components_browsertests_apk' +
+ '.ComponentsBrowserTestsActivity'),
+ 'chrome-native-tests-command-line',
+ None),
+ 'content_browsertests': chrome.PackageInfo(
+ 'org.chromium.content_browsertests_apk',
+ 'org.chromium.content_browsertests_apk.ContentBrowserTestsActivity',
+ 'chrome-native-tests-command-line',
+ None),
+ 'chromedriver_webview_shell': chrome.PackageInfo(
+ 'org.chromium.chromedriver_webview_shell',
+ 'org.chromium.chromedriver_webview_shell.Main',
+ None,
+ None),
+ 'android_webview_cts': chrome.PackageInfo(
+ 'com.android.webview',
+ 'com.android.cts.webkit.WebViewStartupCtsActivity',
+ 'webview-command-line',
+ None),
+})
+
+
+# Ports arrangement for various test servers used in Chrome for Android.
+# Lighttpd server will attempt to use 9000 as default port, if unavailable it
+# will find a free port from 8001 - 8999.
+LIGHTTPD_DEFAULT_PORT = 9000
+LIGHTTPD_RANDOM_PORT_FIRST = 8001
+LIGHTTPD_RANDOM_PORT_LAST = 8999
+TEST_SYNC_SERVER_PORT = 9031
+TEST_SEARCH_BY_IMAGE_SERVER_PORT = 9041
+TEST_POLICY_SERVER_PORT = 9051
+
+
+TEST_EXECUTABLE_DIR = '/data/local/tmp'
+# Directories for common java libraries for SDK build.
+# These constants are defined in build/android/ant/common.xml
+SDK_BUILD_JAVALIB_DIR = 'lib.java'
+SDK_BUILD_TEST_JAVALIB_DIR = 'test.lib.java'
+SDK_BUILD_APKS_DIR = 'apks'
+
+ADB_KEYS_FILE = '/data/misc/adb/adb_keys'
+
+PERF_OUTPUT_DIR = os.path.join(DIR_SOURCE_ROOT, 'out', 'step_results')
+# The directory on the device where perf test output gets saved to.
+DEVICE_PERF_OUTPUT_DIR = (
+ '/data/data/' + PACKAGE_INFO['chrome'].package + '/files')
+
+SCREENSHOTS_DIR = os.path.join(DIR_SOURCE_ROOT, 'out_screenshots')
+
+ANDROID_SDK_VERSION = version_codes.OREO_MR1
+ANDROID_SDK_BUILD_TOOLS_VERSION = '27.0.3'
+ANDROID_SDK_ROOT = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'android_sdk',
+ 'public')
+ANDROID_SDK_TOOLS = os.path.join(ANDROID_SDK_ROOT,
+ 'build-tools', ANDROID_SDK_BUILD_TOOLS_VERSION)
+ANDROID_NDK_ROOT = os.path.join(DIR_SOURCE_ROOT,
+ 'third_party', 'android_ndk')
+
+BAD_DEVICES_JSON = os.path.join(DIR_SOURCE_ROOT,
+ os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+ 'bad_devices.json')
+
+UPSTREAM_FLAKINESS_SERVER = 'test-results.appspot.com'
+
+# TODO(jbudorick): Remove once unused.
+DEVICE_LOCAL_PROPERTIES_PATH = '/data/local.prop'
+
+# Configure ubsan to print stack traces in the format understood by "stack" so
+# that they will be symbolized, and disable signal handlers because they
+# interfere with the breakpad and sandbox tests.
+# This value is duplicated in
+# base/android/java/src/org/chromium/base/library_loader/LibraryLoader.java
+UBSAN_OPTIONS = (
+ 'print_stacktrace=1 stack_trace_format=\'#%n pc %o %m\' '
+ 'handle_segv=0 handle_sigbus=0 handle_sigfpe=0')
+
+# TODO(jbudorick): Rework this into testing/buildbot/
+PYTHON_UNIT_TEST_SUITES = {
+ 'pylib_py_unittests': {
+ 'path':
+ os.path.join(DIR_SOURCE_ROOT, 'build', 'android'),
+ 'test_modules': [
+ 'devil.android.device_utils_test',
+ 'devil.android.md5sum_test',
+ 'devil.utils.cmd_helper_test',
+ 'pylib.results.json_results_test',
+ 'pylib.utils.proguard_test',
+ ]
+ },
+ 'gyp_py_unittests': {
+ 'path':
+ os.path.join(DIR_SOURCE_ROOT, 'build', 'android', 'gyp'),
+ 'test_modules': [
+ 'java_cpp_enum_tests',
+ 'java_cpp_strings_tests',
+ 'java_google_api_keys_tests',
+ 'extract_unwind_tables_tests',
+ ]
+ },
+}
+
+LOCAL_MACHINE_TESTS = ['junit', 'python']
+VALID_ENVIRONMENTS = ['local']
+VALID_TEST_TYPES = ['gtest', 'instrumentation', 'junit', 'linker', 'monkey',
+ 'perf', 'python']
+VALID_DEVICE_TYPES = ['Android', 'iOS']
+
+
+def SetBuildType(build_type):
+ """Set the BUILDTYPE environment variable.
+
+ NOTE: Using this function is deprecated, in favor of SetOutputDirectory(),
+ it is still maintained for a few scripts that typically call it
+ to implement their --release and --debug command-line options.
+
+ When writing a new script, consider supporting an --output-dir or
+ --chromium-output-dir option instead, and calling SetOutputDirectory()
+ instead.
+
+ NOTE: If CHROMIUM_OUTPUT_DIR if defined, or if SetOutputDirectory() was
+ called previously, this will be completely ignored.
+ """
+ chromium_output_dir = os.environ.get('CHROMIUM_OUTPUT_DIR')
+ if chromium_output_dir:
+ logging.warning(
+ 'SetBuildType("%s") ignored since CHROMIUM_OUTPUT_DIR is already '
+ 'defined as (%s)', build_type, chromium_output_dir)
+ os.environ['BUILDTYPE'] = build_type
+
+
+def SetOutputDirectory(output_directory):
+ """Set the Chromium output directory.
+
+ This must be called early by scripts that rely on GetOutDirectory() or
+ CheckOutputDirectory(). Typically by providing an --output-dir or
+ --chromium-output-dir option.
+ """
+ os.environ['CHROMIUM_OUTPUT_DIR'] = output_directory
+
+
+# The message that is printed when the Chromium output directory cannot
+# be found. Note that CHROMIUM_OUT_DIR and BUILDTYPE are not mentioned
+# intentionally to encourage the use of CHROMIUM_OUTPUT_DIR instead.
+_MISSING_OUTPUT_DIR_MESSAGE = '\
+The Chromium output directory could not be found. Please use an option such as \
+--output-directory to provide it (see --help for details). Otherwise, \
+define the CHROMIUM_OUTPUT_DIR environment variable.'
+
+
+def GetOutDirectory():
+ """Returns the Chromium build output directory.
+
+ NOTE: This is determined in the following way:
+ - From a previous call to SetOutputDirectory()
+ - Otherwise, from the CHROMIUM_OUTPUT_DIR env variable, if it is defined.
+ - Otherwise, from the current Chromium source directory, and a previous
+ call to SetBuildType() or the BUILDTYPE env variable, in combination
+ with the optional CHROMIUM_OUT_DIR env variable.
+ """
+ if 'CHROMIUM_OUTPUT_DIR' in os.environ:
+ return os.path.abspath(os.path.join(
+ DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUTPUT_DIR')))
+
+ build_type = os.environ.get('BUILDTYPE')
+ if not build_type:
+ raise EnvironmentError(_MISSING_OUTPUT_DIR_MESSAGE)
+
+ return os.path.abspath(os.path.join(
+ DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+ build_type))
+
+
+def CheckOutputDirectory():
+ """Checks that the Chromium output directory is set, or can be found.
+
+ If it is not already set, this will also perform a little auto-detection:
+
+ - If the current directory contains a build.ninja file, use it as
+ the output directory.
+
+ - If CHROME_HEADLESS is defined in the environment (e.g. on a bot),
+ look if there is a single output directory under DIR_SOURCE_ROOT/out/,
+ and if so, use it as the output directory.
+
+ Raises:
+ Exception: If no output directory is detected.
+ """
+ output_dir = os.environ.get('CHROMIUM_OUTPUT_DIR')
+ if output_dir:
+ return
+
+ build_type = os.environ.get('BUILDTYPE')
+ if build_type and len(build_type) > 1:
+ return
+
+ # If CWD is an output directory, then assume it's the desired one.
+ if os.path.exists('build.ninja'):
+ output_dir = os.getcwd()
+ SetOutputDirectory(output_dir)
+ return
+
+ # When running on bots, see if the output directory is obvious.
+ # TODO(http://crbug.com/833808): Get rid of this by ensuring bots always set
+ # CHROMIUM_OUTPUT_DIR correctly.
+ if os.environ.get('CHROME_HEADLESS'):
+ dirs = glob.glob(os.path.join(DIR_SOURCE_ROOT, 'out', '*', 'build.ninja'))
+ if len(dirs) == 1:
+ SetOutputDirectory(dirs[0])
+ return
+
+ raise Exception(
+ 'Chromium output directory not set, and CHROME_HEADLESS detected. ' +
+ 'However, multiple out dirs exist: %r' % dirs)
+
+ raise Exception(_MISSING_OUTPUT_DIR_MESSAGE)
+
+
+# Exit codes
+ERROR_EXIT_CODE = exit_codes.ERROR
+INFRA_EXIT_CODE = exit_codes.INFRA
+WARNING_EXIT_CODE = exit_codes.WARNING
diff --git a/deps/v8/build/android/pylib/constants/host_paths.py b/deps/v8/build/android/pylib/constants/host_paths.py
new file mode 100644
index 0000000000..b249d3c291
--- /dev/null
+++ b/deps/v8/build/android/pylib/constants/host_paths.py
@@ -0,0 +1,95 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import os
+import sys
+
+from pylib import constants
+
+DIR_SOURCE_ROOT = os.environ.get(
+ 'CHECKOUT_SOURCE_ROOT',
+ os.path.abspath(os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir, os.pardir, os.pardir)))
+
+BUILD_COMMON_PATH = os.path.join(
+ DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common')
+
+# third-party libraries
+ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH = os.path.join(
+ DIR_SOURCE_ROOT, 'third_party', 'android_platform', 'development',
+ 'scripts')
+DEVIL_PATH = os.path.join(
+ DIR_SOURCE_ROOT, 'third_party', 'catapult', 'devil')
+PYMOCK_PATH = os.path.join(
+ DIR_SOURCE_ROOT, 'third_party', 'pymock')
+TRACING_PATH = os.path.join(
+ DIR_SOURCE_ROOT, 'third_party', 'catapult', 'tracing')
+
+@contextlib.contextmanager
+def SysPath(path, position=None):
+ if position is None:
+ sys.path.append(path)
+ else:
+ sys.path.insert(position, path)
+ try:
+ yield
+ finally:
+ if sys.path[-1] == path:
+ sys.path.pop()
+ else:
+ sys.path.remove(path)
+
+
+# Map of CPU architecture name to (toolchain_name, binprefix) pairs.
+# TODO(digit): Use the build_vars.txt file generated by gn.
+_TOOL_ARCH_MAP = {
+ 'arm': ('arm-linux-androideabi-4.9', 'arm-linux-androideabi'),
+ 'arm64': ('aarch64-linux-android-4.9', 'aarch64-linux-android'),
+ 'x86': ('x86-4.9', 'i686-linux-android'),
+ 'x86_64': ('x86_64-4.9', 'x86_64-linux-android'),
+ 'x64': ('x86_64-4.9', 'x86_64-linux-android'),
+ 'mips': ('mipsel-linux-android-4.9', 'mipsel-linux-android'),
+}
+
+# Cache used to speed up the results of ToolPath()
+# Maps (arch, tool_name) pairs to fully qualified program paths.
+# Useful because ToolPath() is called repeatedly for demangling C++ symbols.
+_cached_tool_paths = {}
+
+
+def ToolPath(tool, cpu_arch):
+ """Return a fully qualifed path to an arch-specific toolchain program.
+
+ Args:
+ tool: Unprefixed toolchain program name (e.g. 'objdump')
+ cpu_arch: Target CPU architecture (e.g. 'arm64')
+ Returns:
+ Fully qualified path (e.g. ..../aarch64-linux-android-objdump')
+ Raises:
+ Exception if the toolchain could not be found.
+ """
+ tool_path = _cached_tool_paths.get((tool, cpu_arch))
+ if tool_path:
+ return tool_path
+
+ toolchain_source, toolchain_prefix = _TOOL_ARCH_MAP.get(
+ cpu_arch, (None, None))
+ if not toolchain_source:
+ raise Exception('Could not find tool chain for ' + cpu_arch)
+
+ toolchain_subdir = (
+ 'toolchains/%s/prebuilt/linux-x86_64/bin' % toolchain_source)
+
+ tool_path = os.path.join(constants.ANDROID_NDK_ROOT,
+ toolchain_subdir,
+ toolchain_prefix + '-' + tool)
+
+ _cached_tool_paths[(tool, cpu_arch)] = tool_path
+ return tool_path
+
+
+def GetAaptPath():
+ """Returns the path to the 'aapt' executable."""
+ return os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt')
diff --git a/deps/v8/build/android/pylib/constants/host_paths_unittest.py b/deps/v8/build/android/pylib/constants/host_paths_unittest.py
new file mode 100755
index 0000000000..658ed08bd9
--- /dev/null
+++ b/deps/v8/build/android/pylib/constants/host_paths_unittest.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import unittest
+
+import pylib.constants as constants
+import pylib.constants.host_paths as host_paths
+
+
+# This map corresponds to the binprefix of NDK prebuilt toolchains for various
+# target CPU architectures. Note that 'x86_64' and 'x64' are the same.
+_EXPECTED_NDK_TOOL_SUBDIR_MAP = {
+ 'arm': 'toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin/' +
+ 'arm-linux-androideabi-',
+ 'arm64':
+ 'toolchains/aarch64-linux-android-4.9/prebuilt/linux-x86_64/bin/' +
+ 'aarch64-linux-android-',
+ 'x86': 'toolchains/x86-4.9/prebuilt/linux-x86_64/bin/i686-linux-android-',
+ 'x86_64':
+ 'toolchains/x86_64-4.9/prebuilt/linux-x86_64/bin/x86_64-linux-android-',
+ 'x64':
+ 'toolchains/x86_64-4.9/prebuilt/linux-x86_64/bin/x86_64-linux-android-',
+ 'mips':
+ 'toolchains/mipsel-linux-android-4.9/prebuilt/linux-x86_64/bin/' +
+ 'mipsel-linux-android-'
+}
+
+
+class HostPathsTest(unittest.TestCase):
+ def setUp(self):
+ logging.getLogger().setLevel(logging.ERROR)
+
+ def test_GetAaptPath(self):
+ _EXPECTED_AAPT_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt')
+ self.assertEqual(host_paths.GetAaptPath(), _EXPECTED_AAPT_PATH)
+ self.assertEqual(host_paths.GetAaptPath(), _EXPECTED_AAPT_PATH)
+
+ def test_ToolPath(self):
+ for cpu_arch, binprefix in _EXPECTED_NDK_TOOL_SUBDIR_MAP.iteritems():
+ expected_binprefix = os.path.join(constants.ANDROID_NDK_ROOT, binprefix)
+ expected_path = expected_binprefix + 'foo'
+ self.assertEqual(host_paths.ToolPath('foo', cpu_arch), expected_path)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/content_settings.py b/deps/v8/build/android/pylib/content_settings.py
new file mode 100644
index 0000000000..3bf11bc490
--- /dev/null
+++ b/deps/v8/build/android/pylib/content_settings.py
@@ -0,0 +1,80 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class ContentSettings(dict):
+
+ """A dict interface to interact with device content settings.
+
+ System properties are key/value pairs as exposed by adb shell content.
+ """
+
+ def __init__(self, table, device):
+ super(ContentSettings, self).__init__()
+ self._table = table
+ self._device = device
+
+ @staticmethod
+ def _GetTypeBinding(value):
+ if isinstance(value, bool):
+ return 'b'
+ if isinstance(value, float):
+ return 'f'
+ if isinstance(value, int):
+ return 'i'
+ if isinstance(value, long):
+ return 'l'
+ if isinstance(value, str):
+ return 's'
+ raise ValueError('Unsupported type %s' % type(value))
+
+ def iteritems(self):
+ # Example row:
+ # 'Row: 0 _id=13, name=logging_id2, value=-1fccbaa546705b05'
+ for row in self._device.RunShellCommand(
+ 'content query --uri content://%s' % self._table, as_root=True):
+ fields = row.split(', ')
+ key = None
+ value = None
+ for field in fields:
+ k, _, v = field.partition('=')
+ if k == 'name':
+ key = v
+ elif k == 'value':
+ value = v
+ if not key:
+ continue
+ if not value:
+ value = ''
+ yield key, value
+
+ def __getitem__(self, key):
+ return self._device.RunShellCommand(
+ 'content query --uri content://%s --where "name=\'%s\'" '
+ '--projection value' % (self._table, key), as_root=True).strip()
+
+ def __setitem__(self, key, value):
+ if key in self:
+ self._device.RunShellCommand(
+ 'content update --uri content://%s '
+ '--bind value:%s:%s --where "name=\'%s\'"' % (
+ self._table,
+ self._GetTypeBinding(value), value, key),
+ as_root=True)
+ else:
+ self._device.RunShellCommand(
+ 'content insert --uri content://%s '
+ '--bind name:%s:%s --bind value:%s:%s' % (
+ self._table,
+ self._GetTypeBinding(key), key,
+ self._GetTypeBinding(value), value),
+ as_root=True)
+
+ def __delitem__(self, key):
+ self._device.RunShellCommand(
+ 'content delete --uri content://%s '
+ '--bind name:%s:%s' % (
+ self._table,
+ self._GetTypeBinding(key), key),
+ as_root=True)
diff --git a/deps/v8/build/android/pylib/device/__init__.py b/deps/v8/build/android/pylib/device/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/deps/v8/build/android/pylib/device/__init__.py
diff --git a/deps/v8/build/android/pylib/device/commands/BUILD.gn b/deps/v8/build/android/pylib/device/commands/BUILD.gn
new file mode 100644
index 0000000000..480db1e88f
--- /dev/null
+++ b/deps/v8/build/android/pylib/device/commands/BUILD.gn
@@ -0,0 +1,20 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+group("commands") {
+ data_deps = [
+ ":chromium_commands_java",
+ ]
+}
+
+android_library("chromium_commands_java") {
+ emma_never_instrument = true
+ java_files = [ "java/src/org/chromium/android/commands/unzip/Unzip.java" ]
+ dex_path = "$root_build_dir/lib.java/chromium_commands.dex.jar"
+ data = [
+ dex_path,
+ ]
+}
diff --git a/deps/v8/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java b/deps/v8/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java
new file mode 100644
index 0000000000..5428af25f9
--- /dev/null
+++ b/deps/v8/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java
@@ -0,0 +1,95 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.android.commands.unzip;
+
+import android.util.Log;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+/**
+ * Minimal implementation of the command-line unzip utility for Android.
+ */
+public class Unzip {
+
+ private static final String TAG = "Unzip";
+
+ public static void main(String[] args) {
+ try {
+ (new Unzip()).run(args);
+ } catch (RuntimeException e) {
+ Log.e(TAG, e.toString());
+ System.exit(1);
+ }
+ }
+
+ private void showUsage(PrintStream s) {
+ s.println("Usage:");
+ s.println("unzip [zipfile]");
+ }
+
+ @SuppressWarnings("Finally")
+ private void unzip(String[] args) {
+ ZipInputStream zis = null;
+ try {
+ String zipfile = args[0];
+ zis = new ZipInputStream(new BufferedInputStream(new FileInputStream(zipfile)));
+ ZipEntry ze = null;
+
+ byte[] bytes = new byte[1024];
+ while ((ze = zis.getNextEntry()) != null) {
+ File outputFile = new File(ze.getName());
+ if (ze.isDirectory()) {
+ if (!outputFile.exists() && !outputFile.mkdirs()) {
+ throw new RuntimeException(
+ "Failed to create directory: " + outputFile.toString());
+ }
+ } else {
+ File parentDir = outputFile.getParentFile();
+ if (!parentDir.exists() && !parentDir.mkdirs()) {
+ throw new RuntimeException(
+ "Failed to create directory: " + parentDir.toString());
+ }
+ OutputStream out = new BufferedOutputStream(new FileOutputStream(outputFile));
+ int actual_bytes = 0;
+ int total_bytes = 0;
+ while ((actual_bytes = zis.read(bytes)) != -1) {
+ out.write(bytes, 0, actual_bytes);
+ total_bytes += actual_bytes;
+ }
+ out.close();
+ }
+ zis.closeEntry();
+ }
+
+ } catch (IOException e) {
+ throw new RuntimeException("Error while unzipping: " + e.toString());
+ } finally {
+ try {
+ if (zis != null) zis.close();
+ } catch (IOException e) {
+ throw new RuntimeException("Error while closing zip: " + e.toString());
+ }
+ }
+ }
+
+ public void run(String[] args) {
+ if (args.length != 1) {
+ showUsage(System.err);
+ throw new RuntimeException("Incorrect usage!");
+ }
+
+ unzip(args);
+ }
+}
+
diff --git a/deps/v8/build/android/pylib/device_settings.py b/deps/v8/build/android/pylib/device_settings.py
new file mode 100644
index 0000000000..ab4ad1b900
--- /dev/null
+++ b/deps/v8/build/android/pylib/device_settings.py
@@ -0,0 +1,199 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from pylib import content_settings
+
+_LOCK_SCREEN_SETTINGS_PATH = '/data/system/locksettings.db'
+_ALTERNATE_LOCK_SCREEN_SETTINGS_PATH = (
+ '/data/data/com.android.providers.settings/databases/settings.db')
+PASSWORD_QUALITY_UNSPECIFIED = '0'
+_COMPATIBLE_BUILD_TYPES = ['userdebug', 'eng']
+
+
+def ConfigureContentSettings(device, desired_settings):
+ """Configures device content setings from a list.
+
+ Many settings are documented at:
+ http://developer.android.com/reference/android/provider/Settings.Global.html
+ http://developer.android.com/reference/android/provider/Settings.Secure.html
+ http://developer.android.com/reference/android/provider/Settings.System.html
+
+ Many others are undocumented.
+
+ Args:
+ device: A DeviceUtils instance for the device to configure.
+ desired_settings: A list of (table, [(key: value), ...]) for all
+ settings to configure.
+ """
+ for table, key_value in desired_settings:
+ settings = content_settings.ContentSettings(table, device)
+ for key, value in key_value:
+ settings[key] = value
+ logging.info('\n%s %s', table, (80 - len(table)) * '-')
+ for key, value in sorted(settings.iteritems()):
+ logging.info('\t%s: %s', key, value)
+
+
+def SetLockScreenSettings(device):
+ """Sets lock screen settings on the device.
+
+ On certain device/Android configurations we need to disable the lock screen in
+ a different database. Additionally, the password type must be set to
+ DevicePolicyManager.PASSWORD_QUALITY_UNSPECIFIED.
+ Lock screen settings are stored in sqlite on the device in:
+ /data/system/locksettings.db
+
+ IMPORTANT: The first column is used as a primary key so that all rows with the
+ same value for that column are removed from the table prior to inserting the
+ new values.
+
+ Args:
+ device: A DeviceUtils instance for the device to configure.
+
+ Raises:
+ Exception if the setting was not properly set.
+ """
+ if device.build_type not in _COMPATIBLE_BUILD_TYPES:
+ logging.warning('Unable to disable lockscreen on %s builds.',
+ device.build_type)
+ return
+
+ def get_lock_settings(table):
+ return [(table, 'lockscreen.disabled', '1'),
+ (table, 'lockscreen.password_type', PASSWORD_QUALITY_UNSPECIFIED),
+ (table, 'lockscreen.password_type_alternate',
+ PASSWORD_QUALITY_UNSPECIFIED)]
+
+ if device.FileExists(_LOCK_SCREEN_SETTINGS_PATH):
+ db = _LOCK_SCREEN_SETTINGS_PATH
+ locksettings = get_lock_settings('locksettings')
+ columns = ['name', 'user', 'value']
+ generate_values = lambda k, v: [k, '0', v]
+ elif device.FileExists(_ALTERNATE_LOCK_SCREEN_SETTINGS_PATH):
+ db = _ALTERNATE_LOCK_SCREEN_SETTINGS_PATH
+ locksettings = get_lock_settings('secure') + get_lock_settings('system')
+ columns = ['name', 'value']
+ generate_values = lambda k, v: [k, v]
+ else:
+ logging.warning('Unable to find database file to set lock screen settings.')
+ return
+
+ for table, key, value in locksettings:
+ # Set the lockscreen setting for default user '0'
+ values = generate_values(key, value)
+
+ cmd = """begin transaction;
+delete from '%(table)s' where %(primary_key)s='%(primary_value)s';
+insert into '%(table)s' (%(columns)s) values (%(values)s);
+commit transaction;""" % {
+ 'table': table,
+ 'primary_key': columns[0],
+ 'primary_value': values[0],
+ 'columns': ', '.join(columns),
+ 'values': ', '.join(["'%s'" % value for value in values])
+ }
+ output_msg = device.RunShellCommand('sqlite3 %s "%s"' % (db, cmd),
+ as_root=True)
+ if output_msg:
+ logging.info(' '.join(output_msg))
+
+
+ENABLE_LOCATION_SETTINGS = [
+ # Note that setting these in this order is required in order for all of
+ # them to take and stick through a reboot.
+ ('com.google.settings/partner', [
+ ('use_location_for_services', 1),
+ ]),
+ ('settings/secure', [
+ # Ensure Geolocation is enabled and allowed for tests.
+ ('location_providers_allowed', 'gps,network'),
+ ]),
+ ('com.google.settings/partner', [
+ ('network_location_opt_in', 1),
+ ])
+]
+
+DISABLE_LOCATION_SETTINGS = [
+ ('com.google.settings/partner', [
+ ('use_location_for_services', 0),
+ ]),
+ ('settings/secure', [
+ # Ensure Geolocation is disabled.
+ ('location_providers_allowed', ''),
+ ]),
+]
+
+ENABLE_MOCK_LOCATION_SETTINGS = [
+ ('settings/secure', [
+ ('mock_location', 1),
+ ]),
+]
+
+DISABLE_MOCK_LOCATION_SETTINGS = [
+ ('settings/secure', [
+ ('mock_location', 0),
+ ]),
+]
+
+DETERMINISTIC_DEVICE_SETTINGS = [
+ ('settings/global', [
+ ('assisted_gps_enabled', 0),
+
+ # Disable "auto time" and "auto time zone" to avoid network-provided time
+ # to overwrite the device's datetime and timezone synchronized from host
+ # when running tests later. See b/6569849.
+ ('auto_time', 0),
+ ('auto_time_zone', 0),
+
+ ('development_settings_enabled', 1),
+
+ # Flag for allowing ActivityManagerService to send ACTION_APP_ERROR intents
+ # on application crashes and ANRs. If this is disabled, the crash/ANR dialog
+ # will never display the "Report" button.
+ # Type: int ( 0 = disallow, 1 = allow )
+ ('send_action_app_error', 0),
+
+ ('stay_on_while_plugged_in', 3),
+
+ ('verifier_verify_adb_installs', 0),
+ ]),
+ ('settings/secure', [
+ ('allowed_geolocation_origins',
+ 'http://www.google.co.uk http://www.google.com'),
+
+ # Ensure that we never get random dialogs like "Unfortunately the process
+ # android.process.acore has stopped", which steal the focus, and make our
+ # automation fail (because the dialog steals the focus then mistakenly
+ # receives the injected user input events).
+ ('anr_show_background', 0),
+
+ ('lockscreen.disabled', 1),
+
+ ('screensaver_enabled', 0),
+
+ ('skip_first_use_hints', 1),
+ ]),
+ ('settings/system', [
+ # Don't want devices to accidentally rotate the screen as that could
+ # affect performance measurements.
+ ('accelerometer_rotation', 0),
+
+ ('lockscreen.disabled', 1),
+
+ # Turn down brightness and disable auto-adjust so that devices run cooler.
+ ('screen_brightness', 5),
+ ('screen_brightness_mode', 0),
+
+ ('user_rotation', 0),
+ ]),
+]
+
+NETWORK_DISABLED_SETTINGS = [
+ ('settings/global', [
+ ('airplane_mode_on', 1),
+ ('wifi_on', 0),
+ ]),
+]
diff --git a/deps/v8/build/android/pylib/gtest/__init__.py b/deps/v8/build/android/pylib/gtest/__init__.py
new file mode 100644
index 0000000000..96196cffb2
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/gtest/filter/OWNERS b/deps/v8/build/android/pylib/gtest/filter/OWNERS
new file mode 100644
index 0000000000..72e8ffc0db
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/filter/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/deps/v8/build/android/pylib/gtest/filter/base_unittests_disabled b/deps/v8/build/android/pylib/gtest/filter/base_unittests_disabled
new file mode 100644
index 0000000000..533d3e167b
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/filter/base_unittests_disabled
@@ -0,0 +1,25 @@
+# List of suppressions
+
+# Android will not support StackTrace.
+StackTrace.*
+#
+# Sometimes this is automatically generated by run_tests.py
+VerifyPathControlledByUserTest.Symlinks
+
+# http://crbug.com/138845
+MessagePumpLibeventTest.TestWatchingFromBadThread
+
+StringPrintfTest.StringPrintfMisc
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringPrintfBounds
+# TODO(jrg): Fails on bots. Works locally. Figure out why. 2/6/12
+FieldTrialTest.*
+# Flaky?
+ScopedJavaRefTest.RefCounts
+FileTest.MemoryCorruption
+MessagePumpLibeventTest.QuitOutsideOfRun
+ScopedFD.ScopedFDCrashesOnCloseFailure
+
+# http://crbug.com/245043
+StackContainer.BufferAlignment
diff --git a/deps/v8/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled b/deps/v8/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled
new file mode 100644
index 0000000000..6bec7d015b
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled
@@ -0,0 +1,10 @@
+# Additional list of suppressions from emulator
+#
+# Automatically generated by run_tests.py
+PathServiceTest.Get
+SharedMemoryTest.OpenClose
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringPrintfBounds
+StringPrintfTest.StringPrintfMisc
+VerifyPathControlledByUserTest.Symlinks
diff --git a/deps/v8/build/android/pylib/gtest/filter/breakpad_unittests_disabled b/deps/v8/build/android/pylib/gtest/filter/breakpad_unittests_disabled
new file mode 100644
index 0000000000..cefc64fd5e
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/filter/breakpad_unittests_disabled
@@ -0,0 +1,9 @@
+FileIDStripTest.StripSelf
+# crbug.com/303960
+ExceptionHandlerTest.InstructionPointerMemoryNullPointer
+# crbug.com/171419
+MinidumpWriterTest.MappingInfoContained
+# crbug.com/310088
+MinidumpWriterTest.MinidumpSizeLimit
+# crbug.com/375838
+ElfCoreDumpTest.ValidCoreFile
diff --git a/deps/v8/build/android/pylib/gtest/filter/content_browsertests_disabled b/deps/v8/build/android/pylib/gtest/filter/content_browsertests_disabled
new file mode 100644
index 0000000000..974f131c2b
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/filter/content_browsertests_disabled
@@ -0,0 +1,50 @@
+# List of suppressions
+# Timeouts
+DatabaseTest.*
+
+# Crashes
+RenderFrameHostManagerTest.IgnoreRendererDebugURLsWhenCrashed
+
+# Plugins are not supported.
+BrowserPluginThreadedCompositorPixelTest.*
+BrowserPluginHostTest.*
+BrowserPluginTest.*
+PluginTest.*
+
+# http://crbug.com/463740
+CrossPlatformAccessibilityBrowserTest.SelectedEditableTextAccessibility
+
+# http://crbug.com/297230
+DumpAccessibilityTreeTest.AccessibilityAriaLevel/*
+DumpAccessibilityTreeTest.AccessibilityAriaProgressbar/*
+DumpAccessibilityTreeTest.AccessibilityListMarkers/*
+DumpAccessibilityTreeTest.AccessibilityUl/*
+DumpAccessibilityTreeTest.AccessibilityCanvas/*
+DumpAccessibilityTreeTest.AccessibilityDialog/*
+DumpAccessibilityTreeTest.AccessibilityModalDialogClosed/*
+DumpAccessibilityTreeTest.AccessibilityModalDialogInIframeOpened/*
+RenderAccessibilityImplTest.DetachAccessibilityObject
+
+# http://crbug.com/187500
+RenderViewImplTest.ImeComposition
+RenderViewImplTest.InsertCharacters
+RenderViewImplTest.OnHandleKeyboardEvent
+RenderViewImplTest.OnNavStateChanged
+# ZoomLevel is not used on Android
+RenderFrameImplTest.ZoomLimit
+RendererAccessibilityTest.SendFullAccessibilityTreeOnReload
+RendererAccessibilityTest.HideAccessibilityObject
+RendererAccessibilityTest.ShowAccessibilityObject
+RendererAccessibilityTest.TextSelectionShouldSendRoot
+
+# http://crbug.com/386227
+IndexedDBBrowserTest.VersionChangeCrashResilience
+
+# http://crbug.com/233118
+IndexedDBBrowserTest.NullKeyPathPersistence
+
+# http://crbug.com/338421
+GinBrowserTest.GinAndGarbageCollection
+
+# http://crbug.com/343604
+MSE_ClearKey/EncryptedMediaTest.ConfigChangeVideo/0
diff --git a/deps/v8/build/android/pylib/gtest/filter/unit_tests_disabled b/deps/v8/build/android/pylib/gtest/filter/unit_tests_disabled
new file mode 100644
index 0000000000..6a7340db43
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/filter/unit_tests_disabled
@@ -0,0 +1,80 @@
+# List of suppressions
+
+# The UDP related tests currently do not work on Android because
+# we lack a UDP forwarder tool.
+NetworkStatsTestUDP.*
+
+# Missing test resource of 16MB.
+HistoryProfileTest.TypicalProfileVersion
+
+# crbug.com/139408
+SQLitePersistentCookieStoreTest.TestDontLoadOldSessionCookies
+SQLitePersistentCookieStoreTest.PersistIsPersistent
+
+# crbug.com/139433
+AutofillTableTest.AutofillProfile*
+AutofillTableTest.UpdateAutofillProfile
+
+# crbug.com/139400
+AutofillProfileTest.*
+CreditCardTest.SetInfoExpirationMonth
+
+# crbug.com/139398
+DownloadItemModelTest.InterruptTooltip
+
+# Tests crashing in the APK
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+DownloadItemModelTest.InterruptStatus
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+PageInfoTest.OnSiteDataAccessed
+
+# crbug.com/139423
+ValueStoreFrontendTest.GetExistingData
+
+# crbug.com/139421
+ChromeSelectFilePolicyTest.ExpectAsynchronousListenerCall
+
+# http://crbug.com/139033
+ChromeDownloadManagerDelegateTest.StartDownload_PromptAlways
+
+# crbug.com/139411
+AutocompleteProviderTest.*
+HistoryContentsProviderBodyOnlyTest.*
+HistoryContentsProviderTest.*
+HQPOrderingTest.*
+SearchProviderTest.*
+
+ProtocolHandlerRegistryTest.TestOSRegistrationFailure
+
+# crbug.com/139418
+SQLiteServerBoundCertStoreTest.TestUpgradeV1
+SQLiteServerBoundCertStoreTest.TestUpgradeV2
+
+ProfileSyncComponentsFactoryImplTest.*
+PermissionsTest.GetWarningMessages_Plugins
+ImageOperations.ResizeShouldAverageColors
+
+# crbug.com/139643
+VariationsUtilTest.DisableAfterInitialization
+VariationsUtilTest.AssociateGoogleVariationID
+VariationsUtilTest.NoAssociation
+
+# crbug.com/141473
+AutofillManagerTest.UpdatePasswordSyncState
+AutofillManagerTest.UpdatePasswordGenerationState
+
+# crbug.com/145843
+EntropyProviderTest.UseOneTimeRandomizationSHA1
+EntropyProviderTest.UseOneTimeRandomizationPermuted
+
+# crbug.com/147500
+ManifestTest.RestrictedKeys
+
+# crbug.com/152599
+SyncSearchEngineDataTypeControllerTest.*
+
+# crbug.com/256259
+DiagnosticsModelTest.RunAll
+
+# Death tests are not supported with apks.
+*DeathTest*
diff --git a/deps/v8/build/android/pylib/gtest/gtest_config.py b/deps/v8/build/android/pylib/gtest/gtest_config.py
new file mode 100644
index 0000000000..3ac195586c
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/gtest_config.py
@@ -0,0 +1,57 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configuration file for android gtest suites."""
+
+# Add new suites here before upgrading them to the stable list below.
+EXPERIMENTAL_TEST_SUITES = [
+ 'components_browsertests',
+ 'heap_profiler_unittests',
+ 'devtools_bridge_tests',
+]
+
+TELEMETRY_EXPERIMENTAL_TEST_SUITES = [
+ 'telemetry_unittests',
+]
+
+# Do not modify this list without approval of an android owner.
+# This list determines which suites are run by default, both for local
+# testing and on android trybots running on commit-queue.
+STABLE_TEST_SUITES = [
+ 'android_webview_unittests',
+ 'base_unittests',
+ 'blink_unittests',
+ 'breakpad_unittests',
+ 'cc_unittests',
+ 'components_unittests',
+ 'content_browsertests',
+ 'content_unittests',
+ 'events_unittests',
+ 'gl_tests',
+ 'gl_unittests',
+ 'gpu_unittests',
+ 'ipc_tests',
+ 'media_unittests',
+ 'midi_unittests',
+ 'net_unittests',
+ 'sandbox_linux_unittests',
+ 'skia_unittests',
+ 'sql_unittests',
+ 'storage_unittests',
+ 'ui_android_unittests',
+ 'ui_base_unittests',
+ 'ui_touch_selection_unittests',
+ 'unit_tests_apk',
+]
+
+# Tests fail in component=shared_library build, which is required for ASan.
+# http://crbug.com/344868
+ASAN_EXCLUDED_TEST_SUITES = [
+ 'breakpad_unittests',
+ 'sandbox_linux_unittests',
+
+ # The internal ASAN recipe cannot run step "unit_tests_apk", this is the
+ # only internal recipe affected. See http://crbug.com/607850
+ 'unit_tests_apk',
+]
diff --git a/deps/v8/build/android/pylib/gtest/gtest_test_instance.py b/deps/v8/build/android/pylib/gtest/gtest_test_instance.py
new file mode 100644
index 0000000000..d3bedee19b
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/gtest_test_instance.py
@@ -0,0 +1,530 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import HTMLParser
+import logging
+import os
+import re
+import tempfile
+import threading
+import xml.etree.ElementTree
+
+from devil.android import apk_helper
+from pylib import constants
+from pylib.constants import host_paths
+from pylib.base import base_test_result
+from pylib.base import test_instance
+from pylib.symbols import stack_symbolizer
+from pylib.utils import test_filter
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+ import unittest_util # pylint: disable=import-error
+
+
+BROWSER_TEST_SUITES = [
+ 'components_browsertests',
+ 'content_browsertests',
+]
+
+RUN_IN_SUB_THREAD_TEST_SUITES = [
+ # Multiprocess tests should be run outside of the main thread.
+ 'base_unittests', # file_locking_unittest.cc uses a child process.
+ 'ipc_perftests',
+ 'ipc_tests',
+ 'mojo_perftests',
+ 'mojo_unittests',
+ 'net_unittests'
+]
+
+
+# Used for filtering large data deps at a finer grain than what's allowed in
+# isolate files since pushing deps to devices is expensive.
+# Wildcards are allowed.
+_DEPS_EXCLUSION_LIST = [
+ 'chrome/test/data/extensions/api_test',
+ 'chrome/test/data/extensions/secure_shell',
+ 'chrome/test/data/firefox*',
+ 'chrome/test/data/gpu',
+ 'chrome/test/data/image_decoding',
+ 'chrome/test/data/import',
+ 'chrome/test/data/page_cycler',
+ 'chrome/test/data/perf',
+ 'chrome/test/data/pyauto_private',
+ 'chrome/test/data/safari_import',
+ 'chrome/test/data/scroll',
+ 'chrome/test/data/third_party',
+ 'third_party/hunspell_dictionaries/*.dic',
+ # crbug.com/258690
+ 'webkit/data/bmp_decoder',
+ 'webkit/data/ico_decoder',
+]
+
+
+_EXTRA_NATIVE_TEST_ACTIVITY = (
+ 'org.chromium.native_test.NativeTestInstrumentationTestRunner.'
+ 'NativeTestActivity')
+_EXTRA_RUN_IN_SUB_THREAD = (
+ 'org.chromium.native_test.NativeTest.RunInSubThread')
+EXTRA_SHARD_NANO_TIMEOUT = (
+ 'org.chromium.native_test.NativeTestInstrumentationTestRunner.'
+ 'ShardNanoTimeout')
+_EXTRA_SHARD_SIZE_LIMIT = (
+ 'org.chromium.native_test.NativeTestInstrumentationTestRunner.'
+ 'ShardSizeLimit')
+
+# TODO(jbudorick): Remove these once we're no longer parsing stdout to generate
+# results.
+_RE_TEST_STATUS = re.compile(
+ r'\[ +((?:RUN)|(?:FAILED)|(?:OK)|(?:CRASHED)) +\]'
+ r' ?([^ ]+)?(?: \((\d+) ms\))?$')
+# Crash detection constants.
+_RE_TEST_ERROR = re.compile(r'FAILURES!!! Tests run: \d+,'
+ r' Failures: \d+, Errors: 1')
+_RE_TEST_CURRENTLY_RUNNING = re.compile(r'\[ERROR:.*?\]'
+ r' Currently running: (.*)')
+_RE_DISABLED = re.compile(r'DISABLED_')
+_RE_FLAKY = re.compile(r'FLAKY_')
+
+# Detect stack line in stdout.
+_STACK_LINE_RE = re.compile(r'\s*#\d+')
+
+def ParseGTestListTests(raw_list):
+ """Parses a raw test list as provided by --gtest_list_tests.
+
+ Args:
+ raw_list: The raw test listing with the following format:
+
+ IPCChannelTest.
+ SendMessageInChannelConnected
+ IPCSyncChannelTest.
+ Simple
+ DISABLED_SendWithTimeoutMixedOKAndTimeout
+
+ Returns:
+ A list of all tests. For the above raw listing:
+
+ [IPCChannelTest.SendMessageInChannelConnected, IPCSyncChannelTest.Simple,
+ IPCSyncChannelTest.DISABLED_SendWithTimeoutMixedOKAndTimeout]
+ """
+ ret = []
+ current = ''
+ for test in raw_list:
+ if not test:
+ continue
+ if not test.startswith(' '):
+ test_case = test.split()[0]
+ if test_case.endswith('.'):
+ current = test_case
+ else:
+ test = test.strip()
+ if test and not 'YOU HAVE' in test:
+ test_name = test.split()[0]
+ ret += [current + test_name]
+ return ret
+
+
+def ParseGTestOutput(output, symbolizer, device_abi):
+ """Parses raw gtest output and returns a list of results.
+
+ Args:
+ output: A list of output lines.
+ symbolizer: The symbolizer used to symbolize stack.
+ device_abi: Device abi that is needed for symbolization.
+ Returns:
+ A list of base_test_result.BaseTestResults.
+ """
+ duration = 0
+ fallback_result_type = None
+ log = []
+ stack = []
+ result_type = None
+ results = []
+ test_name = None
+
+ def symbolize_stack_and_merge_with_log():
+ log_string = '\n'.join(log or [])
+ if not stack:
+ stack_string = ''
+ else:
+ stack_string = '\n'.join(
+ symbolizer.ExtractAndResolveNativeStackTraces(
+ stack, device_abi))
+ return '%s\n%s' % (log_string, stack_string)
+
+ def handle_possibly_unknown_test():
+ if test_name is not None:
+ results.append(base_test_result.BaseTestResult(
+ TestNameWithoutDisabledPrefix(test_name),
+ fallback_result_type or base_test_result.ResultType.UNKNOWN,
+ duration, log=symbolize_stack_and_merge_with_log()))
+
+ for l in output:
+ matcher = _RE_TEST_STATUS.match(l)
+ if matcher:
+ if matcher.group(1) == 'RUN':
+ handle_possibly_unknown_test()
+ duration = 0
+ fallback_result_type = None
+ log = []
+ stack = []
+ result_type = None
+ elif matcher.group(1) == 'OK':
+ result_type = base_test_result.ResultType.PASS
+ elif matcher.group(1) == 'FAILED':
+ result_type = base_test_result.ResultType.FAIL
+ elif matcher.group(1) == 'CRASHED':
+ fallback_result_type = base_test_result.ResultType.CRASH
+ # Be aware that test name and status might not appear on same line.
+ test_name = matcher.group(2) if matcher.group(2) else test_name
+ duration = int(matcher.group(3)) if matcher.group(3) else 0
+
+ else:
+ # Needs another matcher here to match crashes, like those of DCHECK.
+ matcher = _RE_TEST_CURRENTLY_RUNNING.match(l)
+ if matcher:
+ test_name = matcher.group(1)
+ result_type = base_test_result.ResultType.CRASH
+ duration = 0 # Don't know.
+
+ if log is not None:
+ if not matcher and _STACK_LINE_RE.match(l):
+ stack.append(l)
+ else:
+ log.append(l)
+
+ if result_type and test_name:
+ # Don't bother symbolizing output if the test passed.
+ if result_type == base_test_result.ResultType.PASS:
+ stack = []
+ results.append(base_test_result.BaseTestResult(
+ TestNameWithoutDisabledPrefix(test_name), result_type, duration,
+ log=symbolize_stack_and_merge_with_log()))
+ test_name = None
+
+ handle_possibly_unknown_test()
+
+ return results
+
+
+def ParseGTestXML(xml_content):
+ """Parse gtest XML result."""
+ results = []
+ if not xml_content:
+ return results
+
+ html = HTMLParser.HTMLParser()
+
+ testsuites = xml.etree.ElementTree.fromstring(xml_content)
+ for testsuite in testsuites:
+ suite_name = testsuite.attrib['name']
+ for testcase in testsuite:
+ case_name = testcase.attrib['name']
+ result_type = base_test_result.ResultType.PASS
+ log = []
+ for failure in testcase:
+ result_type = base_test_result.ResultType.FAIL
+ log.append(html.unescape(failure.attrib['message']))
+
+ results.append(base_test_result.BaseTestResult(
+ '%s.%s' % (suite_name, TestNameWithoutDisabledPrefix(case_name)),
+ result_type,
+ int(float(testcase.attrib['time']) * 1000),
+ log=('\n'.join(log) if log else '')))
+
+ return results
+
+
+def TestNameWithoutDisabledPrefix(test_name):
+ """Modify the test name without disabled prefix if prefix 'DISABLED_' or
+ 'FLAKY_' presents.
+
+ Args:
+ test_name: The name of a test.
+ Returns:
+ A test name without prefix 'DISABLED_' or 'FLAKY_'.
+ """
+ disabled_prefixes = [_RE_DISABLED, _RE_FLAKY]
+ for dp in disabled_prefixes:
+ test_name = dp.sub('', test_name)
+ return test_name
+
+class GtestTestInstance(test_instance.TestInstance):
+
+ def __init__(self, args, data_deps_delegate, error_func):
+ super(GtestTestInstance, self).__init__()
+ # TODO(jbudorick): Support multiple test suites.
+ if len(args.suite_name) > 1:
+ raise ValueError('Platform mode currently supports only 1 gtest suite')
+ self._isolated_script_test_perf_output = (
+ args.isolated_script_test_perf_output)
+ self._exe_dist_dir = None
+ self._external_shard_index = args.test_launcher_shard_index
+ self._extract_test_list_from_filter = args.extract_test_list_from_filter
+ self._filter_tests_lock = threading.Lock()
+ self._gs_test_artifacts_bucket = args.gs_test_artifacts_bucket
+ self._shard_timeout = args.shard_timeout
+ self._store_tombstones = args.store_tombstones
+ self._suite = args.suite_name[0]
+ self._symbolizer = stack_symbolizer.Symbolizer(None)
+ self._total_external_shards = args.test_launcher_total_shards
+ self._wait_for_java_debugger = args.wait_for_java_debugger
+
+ # GYP:
+ if args.executable_dist_dir:
+ self._exe_dist_dir = os.path.abspath(args.executable_dist_dir)
+ else:
+ # TODO(agrieve): Remove auto-detection once recipes pass flag explicitly.
+ exe_dist_dir = os.path.join(constants.GetOutDirectory(),
+ '%s__dist' % self._suite)
+
+ if os.path.exists(exe_dist_dir):
+ self._exe_dist_dir = exe_dist_dir
+
+ incremental_part = ''
+ if args.test_apk_incremental_install_json:
+ incremental_part = '_incremental'
+
+ apk_path = os.path.join(
+ constants.GetOutDirectory(), '%s_apk' % self._suite,
+ '%s-debug%s.apk' % (self._suite, incremental_part))
+ self._test_apk_incremental_install_json = (
+ args.test_apk_incremental_install_json)
+ if not os.path.exists(apk_path):
+ self._apk_helper = None
+ else:
+ self._apk_helper = apk_helper.ApkHelper(apk_path)
+ self._extras = {
+ _EXTRA_NATIVE_TEST_ACTIVITY: self._apk_helper.GetActivityName(),
+ }
+ if self._suite in RUN_IN_SUB_THREAD_TEST_SUITES:
+ self._extras[_EXTRA_RUN_IN_SUB_THREAD] = 1
+ if self._suite in BROWSER_TEST_SUITES:
+ self._extras[_EXTRA_SHARD_SIZE_LIMIT] = 1
+ self._extras[EXTRA_SHARD_NANO_TIMEOUT] = int(1e9 * self._shard_timeout)
+ self._shard_timeout = 10 * self._shard_timeout
+ if args.wait_for_java_debugger:
+ self._extras[EXTRA_SHARD_NANO_TIMEOUT] = int(1e15) # Forever
+
+ if not self._apk_helper and not self._exe_dist_dir:
+ error_func('Could not find apk or executable for %s' % self._suite)
+
+ self._data_deps = []
+ self._gtest_filter = test_filter.InitializeFilterFromArgs(args)
+ self._run_disabled = args.run_disabled
+
+ self._data_deps_delegate = data_deps_delegate
+ self._runtime_deps_path = args.runtime_deps_path
+ if not self._runtime_deps_path:
+ logging.warning('No data dependencies will be pushed.')
+
+ if args.app_data_files:
+ self._app_data_files = args.app_data_files
+ if args.app_data_file_dir:
+ self._app_data_file_dir = args.app_data_file_dir
+ else:
+ self._app_data_file_dir = tempfile.mkdtemp()
+ logging.critical('Saving app files to %s', self._app_data_file_dir)
+ else:
+ self._app_data_files = None
+ self._app_data_file_dir = None
+
+ self._flags = None
+ self._initializeCommandLineFlags(args)
+
+ # TODO(jbudorick): Remove this once it's deployed.
+ self._enable_xml_result_parsing = args.enable_xml_result_parsing
+
+ def _initializeCommandLineFlags(self, args):
+ self._flags = []
+ if args.command_line_flags:
+ self._flags.extend(args.command_line_flags)
+ if args.device_flags_file:
+ with open(args.device_flags_file) as f:
+ stripped_lines = (l.strip() for l in f)
+ self._flags.extend(flag for flag in stripped_lines if flag)
+ if args.run_disabled:
+ self._flags.append('--gtest_also_run_disabled_tests')
+
+ @property
+ def activity(self):
+ return self._apk_helper and self._apk_helper.GetActivityName()
+
+ @property
+ def apk(self):
+ return self._apk_helper and self._apk_helper.path
+
+ @property
+ def apk_helper(self):
+ return self._apk_helper
+
+ @property
+ def app_file_dir(self):
+ return self._app_data_file_dir
+
+ @property
+ def app_files(self):
+ return self._app_data_files
+
+ @property
+ def enable_xml_result_parsing(self):
+ return self._enable_xml_result_parsing
+
+ @property
+ def exe_dist_dir(self):
+ return self._exe_dist_dir
+
+ @property
+ def external_shard_index(self):
+ return self._external_shard_index
+
+ @property
+ def extract_test_list_from_filter(self):
+ return self._extract_test_list_from_filter
+
+ @property
+ def extras(self):
+ return self._extras
+
+ @property
+ def flags(self):
+ return self._flags
+
+ @property
+ def gs_test_artifacts_bucket(self):
+ return self._gs_test_artifacts_bucket
+
+ @property
+ def gtest_filter(self):
+ return self._gtest_filter
+
+ @property
+ def isolated_script_test_perf_output(self):
+ return self._isolated_script_test_perf_output
+
+ @property
+ def package(self):
+ return self._apk_helper and self._apk_helper.GetPackageName()
+
+ @property
+ def permissions(self):
+ return self._apk_helper and self._apk_helper.GetPermissions()
+
+ @property
+ def runner(self):
+ return self._apk_helper and self._apk_helper.GetInstrumentationName()
+
+ @property
+ def shard_timeout(self):
+ return self._shard_timeout
+
+ @property
+ def store_tombstones(self):
+ return self._store_tombstones
+
+ @property
+ def suite(self):
+ return self._suite
+
+ @property
+ def symbolizer(self):
+ return self._symbolizer
+
+ @property
+ def test_apk_incremental_install_json(self):
+ return self._test_apk_incremental_install_json
+
+ @property
+ def total_external_shards(self):
+ return self._total_external_shards
+
+ @property
+ def wait_for_java_debugger(self):
+ return self._wait_for_java_debugger
+
+ #override
+ def TestType(self):
+ return 'gtest'
+
+ #override
+ def GetPreferredAbis(self):
+ if not self._apk_helper:
+ return None
+ return self._apk_helper.GetAbis()
+
+ #override
+ def SetUp(self):
+ """Map data dependencies via isolate."""
+ self._data_deps.extend(
+ self._data_deps_delegate(self._runtime_deps_path))
+
+ def GetDataDependencies(self):
+ """Returns the test suite's data dependencies.
+
+ Returns:
+ A list of (host_path, device_path) tuples to push. If device_path is
+ None, the client is responsible for determining where to push the file.
+ """
+ return self._data_deps
+
+ def FilterTests(self, test_list, disabled_prefixes=None):
+ """Filters |test_list| based on prefixes and, if present, a filter string.
+
+ Args:
+ test_list: The list of tests to filter.
+ disabled_prefixes: A list of test prefixes to filter. Defaults to
+ DISABLED_, FLAKY_, FAILS_, PRE_, and MANUAL_
+ Returns:
+ A filtered list of tests to run.
+ """
+ gtest_filter_strings = [
+ self._GenerateDisabledFilterString(disabled_prefixes)]
+ if self._gtest_filter:
+ gtest_filter_strings.append(self._gtest_filter)
+
+ filtered_test_list = test_list
+ # This lock is required because on older versions of Python
+ # |unittest_util.FilterTestNames| use of |fnmatch| is not threadsafe.
+ with self._filter_tests_lock:
+ for gtest_filter_string in gtest_filter_strings:
+ logging.debug('Filtering tests using: %s', gtest_filter_string)
+ filtered_test_list = unittest_util.FilterTestNames(
+ filtered_test_list, gtest_filter_string)
+
+ if self._run_disabled and self._gtest_filter:
+ out_filtered_test_list = list(set(test_list)-set(filtered_test_list))
+ for test in out_filtered_test_list:
+ test_name_no_disabled = TestNameWithoutDisabledPrefix(test)
+ if test_name_no_disabled != test and unittest_util.FilterTestNames(
+ [test_name_no_disabled], self._gtest_filter):
+ filtered_test_list.append(test)
+ return filtered_test_list
+
+ def _GenerateDisabledFilterString(self, disabled_prefixes):
+ disabled_filter_items = []
+
+ if disabled_prefixes is None:
+ disabled_prefixes = ['FAILS_', 'PRE_']
+ if '--run-manual' not in self._flags:
+ disabled_prefixes += ['MANUAL_']
+ if not self._run_disabled:
+ disabled_prefixes += ['DISABLED_', 'FLAKY_']
+
+ disabled_filter_items += ['%s*' % dp for dp in disabled_prefixes]
+ disabled_filter_items += ['*.%s*' % dp for dp in disabled_prefixes]
+
+ disabled_tests_file_path = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'pylib', 'gtest',
+ 'filter', '%s_disabled' % self._suite)
+ if disabled_tests_file_path and os.path.exists(disabled_tests_file_path):
+ with open(disabled_tests_file_path) as disabled_tests_file:
+ disabled_filter_items += [
+ '%s' % l for l in (line.strip() for line in disabled_tests_file)
+ if l and not l.startswith('#')]
+
+ return '*-%s' % ':'.join(disabled_filter_items)
+
+ #override
+ def TearDown(self):
+ """Do nothing."""
+ pass
diff --git a/deps/v8/build/android/pylib/gtest/gtest_test_instance_test.py b/deps/v8/build/android/pylib/gtest/gtest_test_instance_test.py
new file mode 100755
index 0000000000..b39da527df
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/gtest_test_instance_test.py
@@ -0,0 +1,217 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.base import base_test_result
+from pylib.gtest import gtest_test_instance
+
+
+class GtestTestInstanceTests(unittest.TestCase):
+
+ def testParseGTestListTests_simple(self):
+ raw_output = [
+ 'TestCaseOne.',
+ ' testOne',
+ ' testTwo',
+ 'TestCaseTwo.',
+ ' testThree',
+ ' testFour',
+ ]
+ actual = gtest_test_instance.ParseGTestListTests(raw_output)
+ expected = [
+ 'TestCaseOne.testOne',
+ 'TestCaseOne.testTwo',
+ 'TestCaseTwo.testThree',
+ 'TestCaseTwo.testFour',
+ ]
+ self.assertEqual(expected, actual)
+
+ def testParseGTestListTests_typeParameterized_old(self):
+ raw_output = [
+ 'TPTestCase/WithTypeParam/0.',
+ ' testOne',
+ ' testTwo',
+ ]
+ actual = gtest_test_instance.ParseGTestListTests(raw_output)
+ expected = [
+ 'TPTestCase/WithTypeParam/0.testOne',
+ 'TPTestCase/WithTypeParam/0.testTwo',
+ ]
+ self.assertEqual(expected, actual)
+
+ def testParseGTestListTests_typeParameterized_new(self):
+ raw_output = [
+ 'TPTestCase/WithTypeParam/0. # TypeParam = TypeParam0',
+ ' testOne',
+ ' testTwo',
+ ]
+ actual = gtest_test_instance.ParseGTestListTests(raw_output)
+ expected = [
+ 'TPTestCase/WithTypeParam/0.testOne',
+ 'TPTestCase/WithTypeParam/0.testTwo',
+ ]
+ self.assertEqual(expected, actual)
+
+ def testParseGTestListTests_valueParameterized_old(self):
+ raw_output = [
+ 'VPTestCase.',
+ ' testWithValueParam/0',
+ ' testWithValueParam/1',
+ ]
+ actual = gtest_test_instance.ParseGTestListTests(raw_output)
+ expected = [
+ 'VPTestCase.testWithValueParam/0',
+ 'VPTestCase.testWithValueParam/1',
+ ]
+ self.assertEqual(expected, actual)
+
+ def testParseGTestListTests_valueParameterized_new(self):
+ raw_output = [
+ 'VPTestCase.',
+ ' testWithValueParam/0 # GetParam() = 0',
+ ' testWithValueParam/1 # GetParam() = 1',
+ ]
+ actual = gtest_test_instance.ParseGTestListTests(raw_output)
+ expected = [
+ 'VPTestCase.testWithValueParam/0',
+ 'VPTestCase.testWithValueParam/1',
+ ]
+ self.assertEqual(expected, actual)
+
+ def testParseGTestListTests_emptyTestName(self):
+ raw_output = [
+ 'TestCase.',
+ ' ',
+ ' nonEmptyTestName',
+ ]
+ actual = gtest_test_instance.ParseGTestListTests(raw_output)
+ expected = [
+ 'TestCase.nonEmptyTestName',
+ ]
+ self.assertEqual(expected, actual)
+
+ def testParseGTestOutput_pass(self):
+ raw_output = [
+ '[ RUN ] FooTest.Bar',
+ '[ OK ] FooTest.Bar (1 ms)',
+ ]
+ actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+ self.assertEquals(1, len(actual))
+ self.assertEquals('FooTest.Bar', actual[0].GetName())
+ self.assertEquals(1, actual[0].GetDuration())
+ self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType())
+
+ def testParseGTestOutput_fail(self):
+ raw_output = [
+ '[ RUN ] FooTest.Bar',
+ '[ FAILED ] FooTest.Bar (1 ms)',
+ ]
+ actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+ self.assertEquals(1, len(actual))
+ self.assertEquals('FooTest.Bar', actual[0].GetName())
+ self.assertEquals(1, actual[0].GetDuration())
+ self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType())
+
+ def testParseGTestOutput_crash(self):
+ raw_output = [
+ '[ RUN ] FooTest.Bar',
+ '[ CRASHED ] FooTest.Bar (1 ms)',
+ ]
+ actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+ self.assertEquals(1, len(actual))
+ self.assertEquals('FooTest.Bar', actual[0].GetName())
+ self.assertEquals(1, actual[0].GetDuration())
+ self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType())
+
+ def testParseGTestOutput_errorCrash(self):
+ raw_output = [
+ '[ RUN ] FooTest.Bar',
+ '[ERROR:blah] Currently running: FooTest.Bar',
+ ]
+ actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+ self.assertEquals(1, len(actual))
+ self.assertEquals('FooTest.Bar', actual[0].GetName())
+ self.assertEquals(0, actual[0].GetDuration())
+ self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType())
+
+ def testParseGTestOutput_unknown(self):
+ raw_output = [
+ '[ RUN ] FooTest.Bar',
+ ]
+ actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+ self.assertEquals(1, len(actual))
+ self.assertEquals('FooTest.Bar', actual[0].GetName())
+ self.assertEquals(0, actual[0].GetDuration())
+ self.assertEquals(base_test_result.ResultType.UNKNOWN, actual[0].GetType())
+
+ def testParseGTestOutput_nonterminalUnknown(self):
+ raw_output = [
+ '[ RUN ] FooTest.Bar',
+ '[ RUN ] FooTest.Baz',
+ '[ OK ] FooTest.Baz (1 ms)',
+ ]
+ actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+ self.assertEquals(2, len(actual))
+
+ self.assertEquals('FooTest.Bar', actual[0].GetName())
+ self.assertEquals(0, actual[0].GetDuration())
+ self.assertEquals(base_test_result.ResultType.UNKNOWN, actual[0].GetType())
+
+ self.assertEquals('FooTest.Baz', actual[1].GetName())
+ self.assertEquals(1, actual[1].GetDuration())
+ self.assertEquals(base_test_result.ResultType.PASS, actual[1].GetType())
+
+ def testParseGTestOutput_deathTestCrashOk(self):
+ raw_output = [
+ '[ RUN ] FooTest.Bar',
+ '[ CRASHED ]',
+ '[ OK ] FooTest.Bar (1 ms)',
+ ]
+ actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+ self.assertEquals(1, len(actual))
+
+ self.assertEquals('FooTest.Bar', actual[0].GetName())
+ self.assertEquals(1, actual[0].GetDuration())
+ self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType())
+
+ def testParseGTestXML_none(self):
+ actual = gtest_test_instance.ParseGTestXML(None)
+ self.assertEquals([], actual)
+
+ def testTestNameWithoutDisabledPrefix_disabled(self):
+ test_name_list = [
+ 'A.DISABLED_B',
+ 'DISABLED_A.B',
+ 'DISABLED_A.DISABLED_B',
+ ]
+ for test_name in test_name_list:
+ actual = gtest_test_instance \
+ .TestNameWithoutDisabledPrefix(test_name)
+ expected = 'A.B'
+ self.assertEquals(expected, actual)
+
+ def testTestNameWithoutDisabledPrefix_flaky(self):
+ test_name_list = [
+ 'A.FLAKY_B',
+ 'FLAKY_A.B',
+ 'FLAKY_A.FLAKY_B',
+ ]
+ for test_name in test_name_list:
+ actual = gtest_test_instance \
+ .TestNameWithoutDisabledPrefix(test_name)
+ expected = 'A.B'
+ self.assertEquals(expected, actual)
+
+ def testTestNameWithoutDisabledPrefix_notDisabledOrFlaky(self):
+ test_name = 'A.B'
+ actual = gtest_test_instance \
+ .TestNameWithoutDisabledPrefix(test_name)
+ expected = 'A.B'
+ self.assertEquals(expected, actual)
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/deps/v8/build/android/pylib/instrumentation/__init__.py b/deps/v8/build/android/pylib/instrumentation/__init__.py
new file mode 100644
index 0000000000..96196cffb2
--- /dev/null
+++ b/deps/v8/build/android/pylib/instrumentation/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/instrumentation/instrumentation_parser.py b/deps/v8/build/android/pylib/instrumentation/instrumentation_parser.py
new file mode 100644
index 0000000000..aa78e9ec92
--- /dev/null
+++ b/deps/v8/build/android/pylib/instrumentation/instrumentation_parser.py
@@ -0,0 +1,105 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import re
+
+# http://developer.android.com/reference/android/test/InstrumentationTestRunner.html
+STATUS_CODE_START = 1
+STATUS_CODE_OK = 0
+STATUS_CODE_ERROR = -1
+STATUS_CODE_FAILURE = -2
+
+# AndroidJUnitRunner would status output -3 to indicate a test is skipped
+STATUS_CODE_SKIP = -3
+
+# AndroidJUnitRunner outputs -4 to indicate a failed assumption
+# "A test for which an assumption fails should not generate a test
+# case failure"
+# http://junit.org/junit4/javadoc/4.12/org/junit/AssumptionViolatedException.html
+STATUS_CODE_ASSUMPTION_FAILURE = -4
+
+# http://developer.android.com/reference/android/app/Activity.html
+RESULT_CODE_OK = -1
+RESULT_CODE_CANCELED = 0
+
+_INSTR_LINE_RE = re.compile(r'^\s*INSTRUMENTATION_([A-Z_]+): (.*)$')
+
+
+class InstrumentationParser(object):
+
+ def __init__(self, stream):
+ """An incremental parser for the output of Android instrumentation tests.
+
+ Example:
+
+ stream = adb.IterShell('am instrument -r ...')
+ parser = InstrumentationParser(stream)
+
+ for code, bundle in parser.IterStatus():
+ # do something with each instrumentation status
+ print 'status:', code, bundle
+
+ # do something with the final instrumentation result
+ code, bundle = parser.GetResult()
+ print 'result:', code, bundle
+
+ Args:
+ stream: a sequence of lines as produced by the raw output of an
+ instrumentation test (e.g. by |am instrument -r|).
+ """
+ self._stream = stream
+ self._code = None
+ self._bundle = None
+
+ def IterStatus(self):
+ """Iterate over statuses as they are produced by the instrumentation test.
+
+ Yields:
+ A tuple (code, bundle) for each instrumentation status found in the
+ output.
+ """
+ def join_bundle_values(bundle):
+ for key in bundle:
+ bundle[key] = '\n'.join(bundle[key])
+ return bundle
+
+ bundle = {'STATUS': {}, 'RESULT': {}}
+ header = None
+ key = None
+ for line in self._stream:
+ m = _INSTR_LINE_RE.match(line)
+ if m:
+ header, value = m.groups()
+ key = None
+ if header in ['STATUS', 'RESULT'] and '=' in value:
+ key, value = value.split('=', 1)
+ bundle[header][key] = [value]
+ elif header == 'STATUS_CODE':
+ yield int(value), join_bundle_values(bundle['STATUS'])
+ bundle['STATUS'] = {}
+ elif header == 'CODE':
+ self._code = int(value)
+ else:
+ logging.warning('Unknown INSTRUMENTATION_%s line: %s', header, value)
+ elif key is not None:
+ bundle[header][key].append(line)
+
+ self._bundle = join_bundle_values(bundle['RESULT'])
+
+ def GetResult(self):
+ """Return the final instrumentation result.
+
+ Returns:
+ A pair (code, bundle) with the final instrumentation result. The |code|
+ may be None if no instrumentation result was found in the output.
+
+ Raises:
+ AssertionError if attempting to get the instrumentation result before
+ exhausting |IterStatus| first.
+ """
+ assert self._bundle is not None, (
+ 'The IterStatus generator must be exhausted before reading the final'
+ ' instrumentation result.')
+ return self._code, self._bundle
diff --git a/deps/v8/build/android/pylib/instrumentation/instrumentation_parser_test.py b/deps/v8/build/android/pylib/instrumentation/instrumentation_parser_test.py
new file mode 100755
index 0000000000..092d10fc93
--- /dev/null
+++ b/deps/v8/build/android/pylib/instrumentation/instrumentation_parser_test.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Unit tests for instrumentation.InstrumentationParser."""
+
+import unittest
+
+from pylib.instrumentation import instrumentation_parser
+
+
+class InstrumentationParserTest(unittest.TestCase):
+
+ def testInstrumentationParser_nothing(self):
+ parser = instrumentation_parser.InstrumentationParser([''])
+ statuses = list(parser.IterStatus())
+ code, bundle = parser.GetResult()
+ self.assertEqual(None, code)
+ self.assertEqual({}, bundle)
+ self.assertEqual([], statuses)
+
+ def testInstrumentationParser_noMatchingStarts(self):
+ raw_output = [
+ '',
+ 'this.is.a.test.package.TestClass:.',
+ 'Test result for =.',
+ 'Time: 1.234',
+ '',
+ 'OK (1 test)',
+ ]
+
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+ code, bundle = parser.GetResult()
+ self.assertEqual(None, code)
+ self.assertEqual({}, bundle)
+ self.assertEqual([], statuses)
+
+ def testInstrumentationParser_resultAndCode(self):
+ raw_output = [
+ 'INSTRUMENTATION_RESULT: shortMsg=foo bar',
+ 'INSTRUMENTATION_RESULT: longMsg=a foo',
+ 'walked into',
+ 'a bar',
+ 'INSTRUMENTATION_CODE: -1',
+ ]
+
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+ code, bundle = parser.GetResult()
+ self.assertEqual(-1, code)
+ self.assertEqual(
+ {'shortMsg': 'foo bar', 'longMsg': 'a foo\nwalked into\na bar'}, bundle)
+ self.assertEqual([], statuses)
+
+ def testInstrumentationParser_oneStatus(self):
+ raw_output = [
+ 'INSTRUMENTATION_STATUS: foo=1',
+ 'INSTRUMENTATION_STATUS: bar=hello',
+ 'INSTRUMENTATION_STATUS: world=false',
+ 'INSTRUMENTATION_STATUS: class=this.is.a.test.package.TestClass',
+ 'INSTRUMENTATION_STATUS: test=testMethod',
+ 'INSTRUMENTATION_STATUS_CODE: 0',
+ ]
+
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+
+ expected = [
+ (0, {
+ 'foo': '1',
+ 'bar': 'hello',
+ 'world': 'false',
+ 'class': 'this.is.a.test.package.TestClass',
+ 'test': 'testMethod',
+ })
+ ]
+ self.assertEqual(expected, statuses)
+
+ def testInstrumentationParser_multiStatus(self):
+ raw_output = [
+ 'INSTRUMENTATION_STATUS: class=foo',
+ 'INSTRUMENTATION_STATUS: test=bar',
+ 'INSTRUMENTATION_STATUS_CODE: 1',
+ 'INSTRUMENTATION_STATUS: test_skipped=true',
+ 'INSTRUMENTATION_STATUS_CODE: 0',
+ 'INSTRUMENTATION_STATUS: class=hello',
+ 'INSTRUMENTATION_STATUS: test=world',
+ 'INSTRUMENTATION_STATUS: stack=',
+ 'foo/bar.py (27)',
+ 'hello/world.py (42)',
+ 'test/file.py (1)',
+ 'INSTRUMENTATION_STATUS_CODE: -1',
+ ]
+
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+
+ expected = [
+ (1, {'class': 'foo', 'test': 'bar',}),
+ (0, {'test_skipped': 'true'}),
+ (-1, {
+ 'class': 'hello',
+ 'test': 'world',
+ 'stack': '\nfoo/bar.py (27)\nhello/world.py (42)\ntest/file.py (1)',
+ }),
+ ]
+ self.assertEqual(expected, statuses)
+
+ def testInstrumentationParser_statusResultAndCode(self):
+ raw_output = [
+ 'INSTRUMENTATION_STATUS: class=foo',
+ 'INSTRUMENTATION_STATUS: test=bar',
+ 'INSTRUMENTATION_STATUS_CODE: 1',
+ 'INSTRUMENTATION_RESULT: result=hello',
+ 'world',
+ '',
+ '',
+ 'INSTRUMENTATION_CODE: 0',
+ ]
+
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+ code, bundle = parser.GetResult()
+
+ self.assertEqual(0, code)
+ self.assertEqual({'result': 'hello\nworld\n\n'}, bundle)
+ self.assertEqual([(1, {'class': 'foo', 'test': 'bar'})], statuses)
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/deps/v8/build/android/pylib/instrumentation/instrumentation_test_instance.py b/deps/v8/build/android/pylib/instrumentation/instrumentation_test_instance.py
new file mode 100644
index 0000000000..98b9435efe
--- /dev/null
+++ b/deps/v8/build/android/pylib/instrumentation/instrumentation_test_instance.py
@@ -0,0 +1,944 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import copy
+import logging
+import os
+import pickle
+import re
+
+from devil.android import apk_helper
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_exception
+from pylib.base import test_instance
+from pylib.constants import host_paths
+from pylib.instrumentation import test_result
+from pylib.instrumentation import instrumentation_parser
+from pylib.symbols import deobfuscator
+from pylib.symbols import stack_symbolizer
+from pylib.utils import dexdump
+from pylib.utils import instrumentation_tracing
+from pylib.utils import proguard
+from pylib.utils import shared_preference_utils
+from pylib.utils import test_filter
+
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+ import unittest_util # pylint: disable=import-error
+
+# Ref: http://developer.android.com/reference/android/app/Activity.html
+_ACTIVITY_RESULT_CANCELED = 0
+_ACTIVITY_RESULT_OK = -1
+
+_COMMAND_LINE_PARAMETER = 'cmdlinearg-parameter'
+_DEFAULT_ANNOTATIONS = [
+ 'SmallTest', 'MediumTest', 'LargeTest', 'EnormousTest', 'IntegrationTest']
+_EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS = [
+ 'DisabledTest', 'FlakyTest', 'Manual']
+_VALID_ANNOTATIONS = set(_DEFAULT_ANNOTATIONS +
+ _EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS)
+
+# These test methods are inherited from android.test base test class and
+# should be permitted for not having size annotation. For more, please check
+# https://developer.android.com/reference/android/test/AndroidTestCase.html
+# https://developer.android.com/reference/android/test/ServiceTestCase.html
+_TEST_WITHOUT_SIZE_ANNOTATIONS = [
+ 'testAndroidTestCaseSetupProperly', 'testServiceTestCaseSetUpProperly']
+
+_EXTRA_DRIVER_TEST_LIST = (
+ 'org.chromium.test.driver.OnDeviceInstrumentationDriver.TestList')
+_EXTRA_DRIVER_TEST_LIST_FILE = (
+ 'org.chromium.test.driver.OnDeviceInstrumentationDriver.TestListFile')
+_EXTRA_DRIVER_TARGET_PACKAGE = (
+ 'org.chromium.test.driver.OnDeviceInstrumentationDriver.TargetPackage')
+_EXTRA_DRIVER_TARGET_CLASS = (
+ 'org.chromium.test.driver.OnDeviceInstrumentationDriver.TargetClass')
+_EXTRA_TIMEOUT_SCALE = (
+ 'org.chromium.test.driver.OnDeviceInstrumentationDriver.TimeoutScale')
+_TEST_LIST_JUNIT4_RUNNERS = [
+ 'org.chromium.base.test.BaseChromiumAndroidJUnitRunner']
+
+_SKIP_PARAMETERIZATION = 'SkipCommandLineParameterization'
+_COMMANDLINE_PARAMETERIZATION = 'CommandLineParameter'
+_NATIVE_CRASH_RE = re.compile('(process|native) crash', re.IGNORECASE)
+_PICKLE_FORMAT_VERSION = 12
+
+
+class MissingSizeAnnotationError(test_exception.TestException):
+ def __init__(self, class_name):
+ super(MissingSizeAnnotationError, self).__init__(class_name +
+ ': Test method is missing required size annotation. Add one of: ' +
+ ', '.join('@' + a for a in _VALID_ANNOTATIONS))
+
+
+class TestListPickleException(test_exception.TestException):
+ pass
+
+
+# TODO(jbudorick): Make these private class methods of
+# InstrumentationTestInstance once the instrumentation junit3_runner_class is
+# deprecated.
+def ParseAmInstrumentRawOutput(raw_output):
+ """Parses the output of an |am instrument -r| call.
+
+ Args:
+ raw_output: the output of an |am instrument -r| call as a list of lines
+ Returns:
+ A 3-tuple containing:
+ - the instrumentation code as an integer
+ - the instrumentation result as a list of lines
+ - the instrumentation statuses received as a list of 2-tuples
+ containing:
+ - the status code as an integer
+ - the bundle dump as a dict mapping string keys to a list of
+ strings, one for each line.
+ """
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+ code, bundle = parser.GetResult()
+ return (code, bundle, statuses)
+
+
+def GenerateTestResults(
+ result_code, result_bundle, statuses, start_ms, duration_ms, device_abi,
+ symbolizer):
+ """Generate test results from |statuses|.
+
+ Args:
+ result_code: The overall status code as an integer.
+ result_bundle: The summary bundle dump as a dict.
+ statuses: A list of 2-tuples containing:
+ - the status code as an integer
+ - the bundle dump as a dict mapping string keys to string values
+ Note that this is the same as the third item in the 3-tuple returned by
+ |_ParseAmInstrumentRawOutput|.
+ start_ms: The start time of the test in milliseconds.
+ duration_ms: The duration of the test in milliseconds.
+ device_abi: The device_abi, which is needed for symbolization.
+ symbolizer: The symbolizer used to symbolize stack.
+
+ Returns:
+ A list containing an instance of InstrumentationTestResult for each test
+ parsed.
+ """
+
+ results = []
+
+ current_result = None
+
+ for status_code, bundle in statuses:
+ test_class = bundle.get('class', '')
+ test_method = bundle.get('test', '')
+ if test_class and test_method:
+ test_name = '%s#%s' % (test_class, test_method)
+ else:
+ continue
+
+ if status_code == instrumentation_parser.STATUS_CODE_START:
+ if current_result:
+ results.append(current_result)
+ current_result = test_result.InstrumentationTestResult(
+ test_name, base_test_result.ResultType.UNKNOWN, start_ms, duration_ms)
+ else:
+ if status_code == instrumentation_parser.STATUS_CODE_OK:
+ if bundle.get('test_skipped', '').lower() in ('true', '1', 'yes'):
+ current_result.SetType(base_test_result.ResultType.SKIP)
+ elif current_result.GetType() == base_test_result.ResultType.UNKNOWN:
+ current_result.SetType(base_test_result.ResultType.PASS)
+ elif status_code == instrumentation_parser.STATUS_CODE_SKIP:
+ current_result.SetType(base_test_result.ResultType.SKIP)
+ elif status_code == instrumentation_parser.STATUS_CODE_ASSUMPTION_FAILURE:
+ current_result.SetType(base_test_result.ResultType.SKIP)
+ else:
+ if status_code not in (instrumentation_parser.STATUS_CODE_ERROR,
+ instrumentation_parser.STATUS_CODE_FAILURE):
+ logging.error('Unrecognized status code %d. Handling as an error.',
+ status_code)
+ current_result.SetType(base_test_result.ResultType.FAIL)
+ if 'stack' in bundle:
+ if symbolizer and device_abi:
+ current_result.SetLog(
+ '%s\n%s' % (
+ bundle['stack'],
+ '\n'.join(symbolizer.ExtractAndResolveNativeStackTraces(
+ bundle['stack'], device_abi))))
+ else:
+ current_result.SetLog(bundle['stack'])
+
+ if current_result:
+ if current_result.GetType() == base_test_result.ResultType.UNKNOWN:
+ crashed = (result_code == _ACTIVITY_RESULT_CANCELED
+ and any(_NATIVE_CRASH_RE.search(l)
+ for l in result_bundle.itervalues()))
+ if crashed:
+ current_result.SetType(base_test_result.ResultType.CRASH)
+
+ results.append(current_result)
+
+ return results
+
+
+def FilterTests(tests, filter_str=None, annotations=None,
+ excluded_annotations=None):
+ """Filter a list of tests
+
+ Args:
+ tests: a list of tests. e.g. [
+ {'annotations": {}, 'class': 'com.example.TestA', 'method':'test1'},
+ {'annotations": {}, 'class': 'com.example.TestB', 'method':'test2'}]
+ filter_str: googletest-style filter string.
+ annotations: a dict of wanted annotations for test methods.
+ exclude_annotations: a dict of annotations to exclude.
+
+ Return:
+ A list of filtered tests
+ """
+ def gtest_filter(t):
+ if not filter_str:
+ return True
+ # Allow fully-qualified name as well as an omitted package.
+ unqualified_class_test = {
+ 'class': t['class'].split('.')[-1],
+ 'method': t['method']
+ }
+ names = [
+ GetTestName(t, sep='.'),
+ GetTestName(unqualified_class_test, sep='.'),
+ GetUniqueTestName(t, sep='.')
+ ]
+
+ if t['is_junit4']:
+ names += [
+ GetTestNameWithoutParameterPostfix(t, sep='.'),
+ GetTestNameWithoutParameterPostfix(unqualified_class_test, sep='.')
+ ]
+
+ pattern_groups = filter_str.split('-')
+ if len(pattern_groups) > 1:
+ negative_filter = pattern_groups[1]
+ if unittest_util.FilterTestNames(names, negative_filter):
+ return []
+
+ positive_filter = pattern_groups[0]
+ return unittest_util.FilterTestNames(names, positive_filter)
+
+ def annotation_filter(all_annotations):
+ if not annotations:
+ return True
+ return any_annotation_matches(annotations, all_annotations)
+
+ def excluded_annotation_filter(all_annotations):
+ if not excluded_annotations:
+ return True
+ return not any_annotation_matches(excluded_annotations,
+ all_annotations)
+
+ def any_annotation_matches(filter_annotations, all_annotations):
+ return any(
+ ak in all_annotations
+ and annotation_value_matches(av, all_annotations[ak])
+ for ak, av in filter_annotations)
+
+ def annotation_value_matches(filter_av, av):
+ if filter_av is None:
+ return True
+ elif isinstance(av, dict):
+ return filter_av in av['value']
+ elif isinstance(av, list):
+ return filter_av in av
+ return filter_av == av
+
+ filtered_tests = []
+ for t in tests:
+ # Gtest filtering
+ if not gtest_filter(t):
+ continue
+
+ # Enforce that all tests declare their size.
+ if (not any(a in _VALID_ANNOTATIONS for a in t['annotations'])
+ and t['method'] not in _TEST_WITHOUT_SIZE_ANNOTATIONS):
+ raise MissingSizeAnnotationError(GetTestName(t))
+
+ if (not annotation_filter(t['annotations'])
+ or not excluded_annotation_filter(t['annotations'])):
+ continue
+
+ filtered_tests.append(t)
+
+ return filtered_tests
+
+
+# TODO(yolandyan): remove this once the tests are converted to junit4
+def GetAllTestsFromJar(test_jar):
+ pickle_path = '%s-proguard.pickle' % test_jar
+ try:
+ tests = GetTestsFromPickle(pickle_path, os.path.getmtime(test_jar))
+ except TestListPickleException as e:
+ logging.info('Could not get tests from pickle: %s', e)
+ logging.info('Getting tests from JAR via proguard.')
+ tests = _GetTestsFromProguard(test_jar)
+ SaveTestsToPickle(pickle_path, tests)
+ return tests
+
+
+def GetAllTestsFromApk(test_apk):
+ pickle_path = '%s-dexdump.pickle' % test_apk
+ try:
+ tests = GetTestsFromPickle(pickle_path, os.path.getmtime(test_apk))
+ except TestListPickleException as e:
+ logging.info('Could not get tests from pickle: %s', e)
+ logging.info('Getting tests from dex via dexdump.')
+ tests = _GetTestsFromDexdump(test_apk)
+ SaveTestsToPickle(pickle_path, tests)
+ return tests
+
+def GetTestsFromPickle(pickle_path, test_mtime):
+ if not os.path.exists(pickle_path):
+ raise TestListPickleException('%s does not exist.' % pickle_path)
+ if os.path.getmtime(pickle_path) <= test_mtime:
+ raise TestListPickleException('File is stale: %s' % pickle_path)
+
+ with open(pickle_path, 'r') as f:
+ pickle_data = pickle.load(f)
+ if pickle_data['VERSION'] != _PICKLE_FORMAT_VERSION:
+ raise TestListPickleException('PICKLE_FORMAT_VERSION has changed.')
+ return pickle_data['TEST_METHODS']
+
+
+# TODO(yolandyan): remove this once the test listing from java runner lands
+@instrumentation_tracing.no_tracing
+def _GetTestsFromProguard(jar_path):
+ p = proguard.Dump(jar_path)
+ class_lookup = dict((c['class'], c) for c in p['classes'])
+
+ def is_test_class(c):
+ return c['class'].endswith('Test')
+
+ def is_test_method(m):
+ return m['method'].startswith('test')
+
+ def recursive_class_annotations(c):
+ s = c['superclass']
+ if s in class_lookup:
+ a = recursive_class_annotations(class_lookup[s])
+ else:
+ a = {}
+ a.update(c['annotations'])
+ return a
+
+ def stripped_test_class(c):
+ return {
+ 'class': c['class'],
+ 'annotations': recursive_class_annotations(c),
+ 'methods': [m for m in c['methods'] if is_test_method(m)],
+ 'superclass': c['superclass'],
+ }
+
+ return [stripped_test_class(c) for c in p['classes']
+ if is_test_class(c)]
+
+
+def _GetTestsFromDexdump(test_apk):
+ dump = dexdump.Dump(test_apk)
+ tests = []
+
+ def get_test_methods(methods):
+ return [
+ {
+ 'method': m,
+ # No annotation info is available from dexdump.
+ # Set MediumTest annotation for default.
+ 'annotations': {'MediumTest': None},
+ } for m in methods if m.startswith('test')]
+
+ for package_name, package_info in dump.iteritems():
+ for class_name, class_info in package_info['classes'].iteritems():
+ if class_name.endswith('Test'):
+ tests.append({
+ 'class': '%s.%s' % (package_name, class_name),
+ 'annotations': {},
+ 'methods': get_test_methods(class_info['methods']),
+ 'superclass': class_info['superclass'],
+ })
+ return tests
+
+def SaveTestsToPickle(pickle_path, tests):
+ pickle_data = {
+ 'VERSION': _PICKLE_FORMAT_VERSION,
+ 'TEST_METHODS': tests,
+ }
+ with open(pickle_path, 'w') as pickle_file:
+ pickle.dump(pickle_data, pickle_file)
+
+
+class MissingJUnit4RunnerException(test_exception.TestException):
+ """Raised when JUnit4 runner is not provided or specified in apk manifest"""
+
+ def __init__(self):
+ super(MissingJUnit4RunnerException, self).__init__(
+ 'JUnit4 runner is not provided or specified in test apk manifest.')
+
+
+def GetTestName(test, sep='#'):
+ """Gets the name of the given test.
+
+ Note that this may return the same name for more than one test, e.g. if a
+ test is being run multiple times with different parameters.
+
+ Args:
+ test: the instrumentation test dict.
+ sep: the character(s) that should join the class name and the method name.
+ Returns:
+ The test name as a string.
+ """
+ test_name = '%s%s%s' % (test['class'], sep, test['method'])
+ assert ' *-:' not in test_name, (
+ 'The test name must not contain any of the characters in " *-:". See '
+ 'https://crbug.com/912199')
+ return test_name
+
+
+def GetTestNameWithoutParameterPostfix(
+ test, sep='#', parameterization_sep='__'):
+ """Gets the name of the given JUnit4 test without parameter postfix.
+
+ For most WebView JUnit4 javatests, each test is parameterizatized with
+ "__sandboxed_mode" to run in both non-sandboxed mode and sandboxed mode.
+
+ This function returns the name of the test without parameterization
+ so test filters can match both parameterized and non-parameterized tests.
+
+ Args:
+ test: the instrumentation test dict.
+ sep: the character(s) that should join the class name and the method name.
+ parameterization_sep: the character(s) that seperate method name and method
+ parameterization postfix.
+ Returns:
+ The test name without parameter postfix as a string.
+ """
+ name = GetTestName(test, sep=sep)
+ return name.split(parameterization_sep)[0]
+
+
+def GetUniqueTestName(test, sep='#'):
+ """Gets the unique name of the given test.
+
+ This will include text to disambiguate between tests for which GetTestName
+ would return the same name.
+
+ Args:
+ test: the instrumentation test dict.
+ sep: the character(s) that should join the class name and the method name.
+ Returns:
+ The unique test name as a string.
+ """
+ display_name = GetTestName(test, sep=sep)
+ if test.get('flags', [None])[0]:
+ sanitized_flags = [x.replace('-', '_') for x in test['flags']]
+ display_name = '%s_with_%s' % (display_name, '_'.join(sanitized_flags))
+
+ assert ' *-:' not in display_name, (
+ 'The test name must not contain any of the characters in " *-:". See '
+ 'https://crbug.com/912199')
+
+ return display_name
+
+
+class InstrumentationTestInstance(test_instance.TestInstance):
+
+ def __init__(self, args, data_deps_delegate, error_func):
+ super(InstrumentationTestInstance, self).__init__()
+
+ self._additional_apks = []
+ self._apk_under_test = None
+ self._apk_under_test_incremental_install_json = None
+ self._package_info = None
+ self._suite = None
+ self._test_apk = None
+ self._test_apk_incremental_install_json = None
+ self._test_jar = None
+ self._test_package = None
+ self._junit3_runner_class = None
+ self._junit4_runner_class = None
+ self._junit4_runner_supports_listing = None
+ self._test_support_apk = None
+ self._initializeApkAttributes(args, error_func)
+
+ self._data_deps = None
+ self._data_deps_delegate = None
+ self._runtime_deps_path = None
+ self._initializeDataDependencyAttributes(args, data_deps_delegate)
+
+ self._annotations = None
+ self._excluded_annotations = None
+ self._test_filter = None
+ self._initializeTestFilterAttributes(args)
+
+ self._flags = None
+ self._use_apk_under_test_flags_file = False
+ self._initializeFlagAttributes(args)
+
+ self._driver_apk = None
+ self._driver_package = None
+ self._driver_name = None
+ self._initializeDriverAttributes()
+
+ self._screenshot_dir = None
+ self._timeout_scale = None
+ self._wait_for_java_debugger = None
+ self._initializeTestControlAttributes(args)
+
+ self._coverage_directory = None
+ self._initializeTestCoverageAttributes(args)
+
+ self._store_tombstones = False
+ self._symbolizer = None
+ self._enable_java_deobfuscation = False
+ self._deobfuscator = None
+ self._initializeLogAttributes(args)
+
+ self._edit_shared_prefs = []
+ self._initializeEditPrefsAttributes(args)
+
+ self._replace_system_package = None
+ self._initializeReplaceSystemPackageAttributes(args)
+
+ self._use_webview_provider = None
+ self._initializeUseWebviewProviderAttributes(args)
+
+ self._external_shard_index = args.test_launcher_shard_index
+ self._total_external_shards = args.test_launcher_total_shards
+
+ def _initializeApkAttributes(self, args, error_func):
+ if args.apk_under_test:
+ apk_under_test_path = args.apk_under_test
+ if not args.apk_under_test.endswith('.apk'):
+ apk_under_test_path = os.path.join(
+ constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
+ '%s.apk' % args.apk_under_test)
+
+ # TODO(jbudorick): Move the realpath up to the argument parser once
+ # APK-by-name is no longer supported.
+ apk_under_test_path = os.path.realpath(apk_under_test_path)
+
+ if not os.path.exists(apk_under_test_path):
+ error_func('Unable to find APK under test: %s' % apk_under_test_path)
+
+ self._apk_under_test = apk_helper.ToHelper(apk_under_test_path)
+
+ if args.test_apk.endswith('.apk'):
+ self._suite = os.path.splitext(os.path.basename(args.test_apk))[0]
+ test_apk_path = args.test_apk
+ self._test_apk = apk_helper.ToHelper(args.test_apk)
+ else:
+ self._suite = args.test_apk
+ test_apk_path = os.path.join(
+ constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
+ '%s.apk' % args.test_apk)
+
+ # TODO(jbudorick): Move the realpath up to the argument parser once
+ # APK-by-name is no longer supported.
+ test_apk_path = os.path.realpath(test_apk_path)
+
+ if not os.path.exists(test_apk_path):
+ error_func('Unable to find test APK: %s' % test_apk_path)
+
+ self._test_apk = apk_helper.ToHelper(test_apk_path)
+
+ self._apk_under_test_incremental_install_json = (
+ args.apk_under_test_incremental_install_json)
+ self._test_apk_incremental_install_json = (
+ args.test_apk_incremental_install_json)
+
+ if self._test_apk_incremental_install_json:
+ assert self._suite.endswith('_incremental')
+ self._suite = self._suite[:-len('_incremental')]
+
+ self._test_jar = args.test_jar
+ self._test_support_apk = apk_helper.ToHelper(os.path.join(
+ constants.GetOutDirectory(), constants.SDK_BUILD_TEST_JAVALIB_DIR,
+ '%sSupport.apk' % self._suite))
+
+ if not os.path.exists(self._test_apk.path):
+ error_func('Unable to find test APK: %s' % self._test_apk.path)
+ if not self._test_jar:
+ logging.warning('Test jar not specified. Test runner will not have '
+ 'Java annotation info available. May not handle test '
+ 'timeouts correctly.')
+ elif not os.path.exists(self._test_jar):
+ error_func('Unable to find test JAR: %s' % self._test_jar)
+
+ self._test_package = self._test_apk.GetPackageName()
+ all_instrumentations = self._test_apk.GetAllInstrumentations()
+ all_junit3_runner_classes = [
+ x for x in all_instrumentations if ('0xffffffff' in x.get(
+ 'chromium-junit3', ''))]
+ all_junit4_runner_classes = [
+ x for x in all_instrumentations if ('0xffffffff' not in x.get(
+ 'chromium-junit3', ''))]
+
+ if len(all_junit3_runner_classes) > 1:
+ logging.warning('This test apk has more than one JUnit3 instrumentation')
+ if len(all_junit4_runner_classes) > 1:
+ logging.warning('This test apk has more than one JUnit4 instrumentation')
+
+ self._junit3_runner_class = (
+ all_junit3_runner_classes[0]['android:name']
+ if all_junit3_runner_classes else self.test_apk.GetInstrumentationName())
+
+ self._junit4_runner_class = (
+ all_junit4_runner_classes[0]['android:name']
+ if all_junit4_runner_classes else None)
+
+ if self._junit4_runner_class:
+ if self._test_apk_incremental_install_json:
+ self._junit4_runner_supports_listing = next(
+ (True for x in self._test_apk.GetAllMetadata()
+ if 'real-instr' in x[0] and x[1] in _TEST_LIST_JUNIT4_RUNNERS),
+ False)
+ else:
+ self._junit4_runner_supports_listing = (
+ self._junit4_runner_class in _TEST_LIST_JUNIT4_RUNNERS)
+
+ self._package_info = None
+ if self._apk_under_test:
+ package_under_test = self._apk_under_test.GetPackageName()
+ for package_info in constants.PACKAGE_INFO.itervalues():
+ if package_under_test == package_info.package:
+ self._package_info = package_info
+ break
+ if not self._package_info:
+ logging.warning('Unable to find package info for %s', self._test_package)
+
+ for apk in args.additional_apks:
+ if not os.path.exists(apk):
+ error_func('Unable to find additional APK: %s' % apk)
+ self._additional_apks = (
+ [apk_helper.ToHelper(x) for x in args.additional_apks])
+
+ def _initializeDataDependencyAttributes(self, args, data_deps_delegate):
+ self._data_deps = []
+ self._data_deps_delegate = data_deps_delegate
+ self._runtime_deps_path = args.runtime_deps_path
+
+ if not self._runtime_deps_path:
+ logging.warning('No data dependencies will be pushed.')
+
+ def _initializeTestFilterAttributes(self, args):
+ self._test_filter = test_filter.InitializeFilterFromArgs(args)
+
+ def annotation_element(a):
+ a = a.split('=', 1)
+ return (a[0], a[1] if len(a) == 2 else None)
+
+ if args.annotation_str:
+ self._annotations = [
+ annotation_element(a) for a in args.annotation_str.split(',')]
+ elif not self._test_filter:
+ self._annotations = [
+ annotation_element(a) for a in _DEFAULT_ANNOTATIONS]
+ else:
+ self._annotations = []
+
+ if args.exclude_annotation_str:
+ self._excluded_annotations = [
+ annotation_element(a) for a in args.exclude_annotation_str.split(',')]
+ else:
+ self._excluded_annotations = []
+
+ requested_annotations = set(a[0] for a in self._annotations)
+ if not args.run_disabled:
+ self._excluded_annotations.extend(
+ annotation_element(a) for a in _EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS
+ if a not in requested_annotations)
+
+ def _initializeFlagAttributes(self, args):
+ self._use_apk_under_test_flags_file = args.use_apk_under_test_flags_file
+ self._flags = ['--enable-test-intents']
+ if args.command_line_flags:
+ self._flags.extend(args.command_line_flags)
+ if args.device_flags_file:
+ with open(args.device_flags_file) as device_flags_file:
+ stripped_lines = (l.strip() for l in device_flags_file)
+ self._flags.extend(flag for flag in stripped_lines if flag)
+ if args.strict_mode and args.strict_mode != 'off':
+ self._flags.append('--strict-mode=' + args.strict_mode)
+
+ def _initializeDriverAttributes(self):
+ self._driver_apk = os.path.join(
+ constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
+ 'OnDeviceInstrumentationDriver.apk')
+ if os.path.exists(self._driver_apk):
+ driver_apk = apk_helper.ApkHelper(self._driver_apk)
+ self._driver_package = driver_apk.GetPackageName()
+ self._driver_name = driver_apk.GetInstrumentationName()
+ else:
+ self._driver_apk = None
+
+ def _initializeTestControlAttributes(self, args):
+ self._screenshot_dir = args.screenshot_dir
+ self._timeout_scale = args.timeout_scale or 1
+ self._wait_for_java_debugger = args.wait_for_java_debugger
+
+ def _initializeTestCoverageAttributes(self, args):
+ self._coverage_directory = args.coverage_dir
+
+ def _initializeLogAttributes(self, args):
+ self._enable_java_deobfuscation = args.enable_java_deobfuscation
+ self._store_tombstones = args.store_tombstones
+ self._symbolizer = stack_symbolizer.Symbolizer(
+ self.apk_under_test.path if self.apk_under_test else None)
+
+ def _initializeEditPrefsAttributes(self, args):
+ if not hasattr(args, 'shared_prefs_file') or not args.shared_prefs_file:
+ return
+ if not isinstance(args.shared_prefs_file, str):
+ logging.warning("Given non-string for a filepath")
+ return
+ self._edit_shared_prefs = shared_preference_utils.ExtractSettingsFromJson(
+ args.shared_prefs_file)
+
+ def _initializeReplaceSystemPackageAttributes(self, args):
+ if (not hasattr(args, 'replace_system_package')
+ or not args.replace_system_package):
+ return
+ self._replace_system_package = args.replace_system_package
+
+ def _initializeUseWebviewProviderAttributes(self, args):
+ if (not hasattr(args, 'use_webview_provider')
+ or not args.use_webview_provider):
+ return
+ self._use_webview_provider = args.use_webview_provider
+
+ @property
+ def additional_apks(self):
+ return self._additional_apks
+
+ @property
+ def apk_under_test(self):
+ return self._apk_under_test
+
+ @property
+ def apk_under_test_incremental_install_json(self):
+ return self._apk_under_test_incremental_install_json
+
+ @property
+ def coverage_directory(self):
+ return self._coverage_directory
+
+ @property
+ def driver_apk(self):
+ return self._driver_apk
+
+ @property
+ def driver_package(self):
+ return self._driver_package
+
+ @property
+ def driver_name(self):
+ return self._driver_name
+
+ @property
+ def edit_shared_prefs(self):
+ return self._edit_shared_prefs
+
+ @property
+ def external_shard_index(self):
+ return self._external_shard_index
+
+ @property
+ def flags(self):
+ return self._flags
+
+ @property
+ def junit3_runner_class(self):
+ return self._junit3_runner_class
+
+ @property
+ def junit4_runner_class(self):
+ return self._junit4_runner_class
+
+ @property
+ def junit4_runner_supports_listing(self):
+ return self._junit4_runner_supports_listing
+
+ @property
+ def package_info(self):
+ return self._package_info
+
+ @property
+ def replace_system_package(self):
+ return self._replace_system_package
+
+ @property
+ def use_webview_provider(self):
+ return self._use_webview_provider
+
+ @property
+ def screenshot_dir(self):
+ return self._screenshot_dir
+
+ @property
+ def store_tombstones(self):
+ return self._store_tombstones
+
+ @property
+ def suite(self):
+ return self._suite
+
+ @property
+ def symbolizer(self):
+ return self._symbolizer
+
+ @property
+ def test_apk(self):
+ return self._test_apk
+
+ @property
+ def test_apk_incremental_install_json(self):
+ return self._test_apk_incremental_install_json
+
+ @property
+ def test_jar(self):
+ return self._test_jar
+
+ @property
+ def test_support_apk(self):
+ return self._test_support_apk
+
+ @property
+ def test_package(self):
+ return self._test_package
+
+ @property
+ def timeout_scale(self):
+ return self._timeout_scale
+
+ @property
+ def total_external_shards(self):
+ return self._total_external_shards
+
+ @property
+ def use_apk_under_test_flags_file(self):
+ return self._use_apk_under_test_flags_file
+
+ @property
+ def wait_for_java_debugger(self):
+ return self._wait_for_java_debugger
+
+ #override
+ def TestType(self):
+ return 'instrumentation'
+
+ #override
+ def GetPreferredAbis(self):
+ ret = self._test_apk.GetAbis()
+ if not ret and self._apk_under_test:
+ ret = self._apk_under_test.GetAbis()
+ return ret
+
+ #override
+ def SetUp(self):
+ self._data_deps.extend(
+ self._data_deps_delegate(self._runtime_deps_path))
+ if self._enable_java_deobfuscation:
+ self._deobfuscator = deobfuscator.DeobfuscatorPool(
+ self.test_apk.path + '.mapping')
+
+ def GetDataDependencies(self):
+ return self._data_deps
+
+ def GetTests(self):
+ if self.test_jar:
+ raw_tests = GetAllTestsFromJar(self.test_jar)
+ else:
+ raw_tests = GetAllTestsFromApk(self.test_apk.path)
+ return self.ProcessRawTests(raw_tests)
+
+ def MaybeDeobfuscateLines(self, lines):
+ if not self._deobfuscator:
+ return lines
+ return self._deobfuscator.TransformLines(lines)
+
+ def ProcessRawTests(self, raw_tests):
+ inflated_tests = self._ParameterizeTestsWithFlags(
+ self._InflateTests(raw_tests))
+ if self._junit4_runner_class is None and any(
+ t['is_junit4'] for t in inflated_tests):
+ raise MissingJUnit4RunnerException()
+ filtered_tests = FilterTests(
+ inflated_tests, self._test_filter, self._annotations,
+ self._excluded_annotations)
+ if self._test_filter and not filtered_tests:
+ for t in inflated_tests:
+ logging.debug(' %s', GetUniqueTestName(t))
+ logging.warning('Unmatched Filter: %s', self._test_filter)
+ return filtered_tests
+
+ # pylint: disable=no-self-use
+ def _InflateTests(self, tests):
+ inflated_tests = []
+ for c in tests:
+ for m in c['methods']:
+ a = dict(c['annotations'])
+ a.update(m['annotations'])
+ inflated_tests.append({
+ 'class': c['class'],
+ 'method': m['method'],
+ 'annotations': a,
+ 'is_junit4': c['superclass'] == 'java.lang.Object'
+ })
+ return inflated_tests
+
+ def _ParameterizeTestsWithFlags(self, tests):
+ new_tests = []
+ for t in tests:
+ annotations = t['annotations']
+ parameters = None
+ if (annotations.get(_COMMANDLINE_PARAMETERIZATION)
+ and _SKIP_PARAMETERIZATION not in annotations):
+ parameters = annotations[_COMMANDLINE_PARAMETERIZATION]['value']
+ if parameters:
+ t['flags'] = [parameters[0]]
+ for p in parameters[1:]:
+ parameterized_t = copy.copy(t)
+ parameterized_t['flags'] = ['--%s' % p]
+ new_tests.append(parameterized_t)
+ return tests + new_tests
+
+ def GetDriverEnvironmentVars(
+ self, test_list=None, test_list_file_path=None):
+ env = {
+ _EXTRA_DRIVER_TARGET_PACKAGE: self.test_package,
+ _EXTRA_DRIVER_TARGET_CLASS: self.junit3_runner_class,
+ _EXTRA_TIMEOUT_SCALE: self._timeout_scale,
+ }
+
+ if test_list:
+ env[_EXTRA_DRIVER_TEST_LIST] = ','.join(test_list)
+
+ if test_list_file_path:
+ env[_EXTRA_DRIVER_TEST_LIST_FILE] = (
+ os.path.basename(test_list_file_path))
+
+ return env
+
+ @staticmethod
+ def ParseAmInstrumentRawOutput(raw_output):
+ return ParseAmInstrumentRawOutput(raw_output)
+
+ @staticmethod
+ def GenerateTestResults(
+ result_code, result_bundle, statuses, start_ms, duration_ms,
+ device_abi, symbolizer):
+ return GenerateTestResults(result_code, result_bundle, statuses,
+ start_ms, duration_ms, device_abi, symbolizer)
+
+ #override
+ def TearDown(self):
+ self.symbolizer.CleanUp()
+ if self._deobfuscator:
+ self._deobfuscator.Close()
+ self._deobfuscator = None
diff --git a/deps/v8/build/android/pylib/instrumentation/instrumentation_test_instance_test.py b/deps/v8/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
new file mode 100755
index 0000000000..78446d1527
--- /dev/null
+++ b/deps/v8/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
@@ -0,0 +1,972 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for instrumentation_test_instance."""
+
+# pylint: disable=protected-access
+
+import collections
+import tempfile
+import unittest
+
+from pylib.base import base_test_result
+from pylib.constants import host_paths
+from pylib.instrumentation import instrumentation_test_instance
+
+with host_paths.SysPath(host_paths.PYMOCK_PATH):
+ import mock # pylint: disable=import-error
+
+_INSTRUMENTATION_TEST_INSTANCE_PATH = (
+ 'pylib.instrumentation.instrumentation_test_instance.%s')
+
+class InstrumentationTestInstanceTest(unittest.TestCase):
+
+ def setUp(self):
+ options = mock.Mock()
+ options.tool = ''
+
+ @staticmethod
+ def createTestInstance():
+ c = _INSTRUMENTATION_TEST_INSTANCE_PATH % 'InstrumentationTestInstance'
+ with mock.patch('%s._initializeApkAttributes' % c), (
+ mock.patch('%s._initializeDataDependencyAttributes' % c)), (
+ mock.patch('%s._initializeTestFilterAttributes' % c)), (
+ mock.patch('%s._initializeFlagAttributes' % c)), (
+ mock.patch('%s._initializeDriverAttributes' % c)), (
+ mock.patch('%s._initializeTestControlAttributes' % c)), (
+ mock.patch('%s._initializeTestCoverageAttributes' % c)):
+ return instrumentation_test_instance.InstrumentationTestInstance(
+ mock.MagicMock(), mock.MagicMock(), lambda s: None)
+
+ _FlagAttributesArgs = collections.namedtuple(
+ '_FlagAttributesArgs',
+ [
+ 'command_line_flags',
+ 'device_flags_file',
+ 'strict_mode',
+ 'use_apk_under_test_flags_file'
+ ])
+
+ def createFlagAttributesArgs(
+ self, command_line_flags=None, device_flags_file=None,
+ strict_mode=None, use_apk_under_test_flags_file=False):
+ return self._FlagAttributesArgs(
+ command_line_flags, device_flags_file, strict_mode,
+ use_apk_under_test_flags_file)
+
+ def test_initializeFlagAttributes_commandLineFlags(self):
+ o = self.createTestInstance()
+ args = self.createFlagAttributesArgs(command_line_flags=['--foo', '--bar'])
+ o._initializeFlagAttributes(args)
+ self.assertEquals(o._flags, ['--enable-test-intents', '--foo', '--bar'])
+
+ def test_initializeFlagAttributes_deviceFlagsFile(self):
+ o = self.createTestInstance()
+ with tempfile.NamedTemporaryFile() as flags_file:
+ flags_file.write('\n'.join(['--foo', '--bar']))
+ flags_file.flush()
+
+ args = self.createFlagAttributesArgs(device_flags_file=flags_file.name)
+ o._initializeFlagAttributes(args)
+ self.assertEquals(o._flags, ['--enable-test-intents', '--foo', '--bar'])
+
+ def test_initializeFlagAttributes_strictModeOn(self):
+ o = self.createTestInstance()
+ args = self.createFlagAttributesArgs(strict_mode='on')
+ o._initializeFlagAttributes(args)
+ self.assertEquals(o._flags, ['--enable-test-intents', '--strict-mode=on'])
+
+ def test_initializeFlagAttributes_strictModeOff(self):
+ o = self.createTestInstance()
+ args = self.createFlagAttributesArgs(strict_mode='off')
+ o._initializeFlagAttributes(args)
+ self.assertEquals(o._flags, ['--enable-test-intents'])
+
+ def testGetTests_noFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'method': 'testMethod1',
+ 'is_junit4': True,
+ },
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'MediumTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'method': 'testMethod2',
+ 'is_junit4': True,
+ },
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Bar']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest2',
+ 'method': 'testMethod1',
+ 'is_junit4': True,
+ },
+ ]
+
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_simpleGtestFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._test_filter = 'org.chromium.test.SampleTest.testMethod1'
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_simpleGtestUnqualifiedNameFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._test_filter = 'SampleTest.testMethod1'
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_parameterizedTestGtestFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1__sandboxed_mode',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'method': 'testMethod1',
+ 'is_junit4': True,
+ },
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'method': 'testMethod1__sandboxed_mode',
+ 'is_junit4': True,
+ },
+ ]
+
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ o._test_filter = 'org.chromium.test.SampleTest.testMethod1'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_wildcardGtestFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Bar']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest2',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._test_filter = 'org.chromium.test.SampleTest2.*'
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_negativeGtestFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'MediumTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': True,
+ 'method': 'testMethod2',
+ },
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Bar']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest2',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._test_filter = '*-org.chromium.test.SampleTest.testMethod1'
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_annotationFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Bar']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest2',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._annotations = [('SmallTest', None)]
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_excludedAnnotationFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'junit.framework.TestCase',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'junit.framework.TestCase',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'MediumTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': False,
+ 'method': 'testMethod2',
+ },
+ ]
+
+ o._excluded_annotations = [('SmallTest', None)]
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_annotationSimpleValueFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'junit.framework.TestCase',
+ 'methods': [
+ {
+ 'annotations': {
+ 'SmallTest': None,
+ 'TestValue': '1',
+ },
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {
+ 'MediumTest': None,
+ 'TestValue': '2',
+ },
+ 'method': 'testMethod2',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'junit.framework.TestCase',
+ 'methods': [
+ {
+ 'annotations': {
+ 'SmallTest': None,
+ 'TestValue': '3',
+ },
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'SmallTest': None,
+ 'TestValue': '1',
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': False,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._annotations = [('TestValue', '1')]
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_annotationDictValueFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Bar']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest2',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._annotations = [('Feature', 'Bar')]
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTestName(self):
+ test = {
+ 'annotations': {
+ 'RunWith': {'value': 'class J4Runner'},
+ 'SmallTest': {},
+ 'Test': {'expected': 'class org.junit.Test$None',
+ 'timeout': '0'},
+ 'UiThreadTest': {}},
+ 'class': 'org.chromium.TestA',
+ 'is_junit4': True,
+ 'method': 'testSimple'}
+ unqualified_class_test = {
+ 'class': test['class'].split('.')[-1],
+ 'method': test['method']
+ }
+
+ self.assertEquals(
+ instrumentation_test_instance.GetTestName(test, sep='.'),
+ 'org.chromium.TestA.testSimple')
+ self.assertEquals(
+ instrumentation_test_instance.GetTestName(
+ unqualified_class_test, sep='.'),
+ 'TestA.testSimple')
+
+ def testGetUniqueTestName(self):
+ test = {
+ 'annotations': {
+ 'RunWith': {'value': 'class J4Runner'},
+ 'SmallTest': {},
+ 'Test': {'expected': 'class org.junit.Test$None', 'timeout': '0'},
+ 'UiThreadTest': {}},
+ 'class': 'org.chromium.TestA',
+ 'flags': ['enable_features=abc'],
+ 'is_junit4': True,
+ 'method': 'testSimple'}
+ self.assertEquals(
+ instrumentation_test_instance.GetUniqueTestName(
+ test, sep='.'),
+ 'org.chromium.TestA.testSimple_with_enable_features=abc')
+
+ def testGetTestNameWithoutParameterPostfix(self):
+ test = {
+ 'annotations': {
+ 'RunWith': {'value': 'class J4Runner'},
+ 'SmallTest': {},
+ 'Test': {'expected': 'class org.junit.Test$None', 'timeout': '0'},
+ 'UiThreadTest': {}},
+ 'class': 'org.chromium.TestA__sandbox_mode',
+ 'flags': 'enable_features=abc',
+ 'is_junit4': True,
+ 'method': 'testSimple'}
+ unqualified_class_test = {
+ 'class': test['class'].split('.')[-1],
+ 'method': test['method']
+ }
+ self.assertEquals(
+ instrumentation_test_instance.GetTestNameWithoutParameterPostfix(
+ test, sep='.'),
+ 'org.chromium.TestA')
+ self.assertEquals(
+ instrumentation_test_instance.GetTestNameWithoutParameterPostfix(
+ unqualified_class_test, sep='.'),
+ 'TestA')
+
+ def testGetTests_multipleAnnotationValuesRequested(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'junit.framework.TestCase',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Baz']},
+ 'MediumTest': None,
+ },
+ 'method': 'testMethod2',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'junit.framework.TestCase',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Baz']},
+ 'MediumTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': False,
+ 'method': 'testMethod2',
+ },
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Bar']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest2',
+ 'is_junit4': False,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._annotations = [('Feature', 'Bar'), ('Feature', 'Baz')]
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGenerateTestResults_noStatus(self):
+ results = instrumentation_test_instance.GenerateTestResults(
+ None, None, [], 0, 1000, None, None)
+ self.assertEqual([], results)
+
+ def testGenerateTestResults_testPassed(self):
+ statuses = [
+ (1, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ (0, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ ]
+ results = instrumentation_test_instance.GenerateTestResults(
+ None, None, statuses, 0, 1000, None, None)
+ self.assertEqual(1, len(results))
+ self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
+
+ def testGenerateTestResults_testSkipped_true(self):
+ statuses = [
+ (1, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ (0, {
+ 'test_skipped': 'true',
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ (0, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ ]
+ results = instrumentation_test_instance.GenerateTestResults(
+ None, None, statuses, 0, 1000, None, None)
+ self.assertEqual(1, len(results))
+ self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType())
+
+ def testGenerateTestResults_testSkipped_false(self):
+ statuses = [
+ (1, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ (0, {
+ 'test_skipped': 'false',
+ }),
+ (0, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ ]
+ results = instrumentation_test_instance.GenerateTestResults(
+ None, None, statuses, 0, 1000, None, None)
+ self.assertEqual(1, len(results))
+ self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
+
+ def testGenerateTestResults_testFailed(self):
+ statuses = [
+ (1, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ (-2, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ ]
+ results = instrumentation_test_instance.GenerateTestResults(
+ None, None, statuses, 0, 1000, None, None)
+ self.assertEqual(1, len(results))
+ self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType())
+
+ def testGenerateTestResults_testUnknownException(self):
+ stacktrace = 'long\nstacktrace'
+ statuses = [
+ (1, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ (-1, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ 'stack': stacktrace,
+ }),
+ ]
+ results = instrumentation_test_instance.GenerateTestResults(
+ None, None, statuses, 0, 1000, None, None)
+ self.assertEqual(1, len(results))
+ self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType())
+ self.assertEqual(stacktrace, results[0].GetLog())
+
+ def testGenerateJUnitTestResults_testSkipped_true(self):
+ statuses = [
+ (1, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ (-3, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ ]
+ results = instrumentation_test_instance.GenerateTestResults(
+ None, None, statuses, 0, 1000, None, None)
+ self.assertEqual(1, len(results))
+ self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType())
+
+ def testCommandLineParameterization(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'CommandLineParameter': {
+ 'value': ['', 'enable-features=abc']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'CommandLineParameter': {'value': ['', 'enable-features=abc']},
+ 'SmallTest': None},
+ 'class': 'org.chromium.test.SampleTest',
+ 'flags': [''],
+ 'is_junit4': True,
+ 'method': 'testMethod1'},
+ {
+ 'annotations': {
+ 'CommandLineParameter': {'value': ['', 'enable-features=abc']},
+ 'MediumTest': None},
+ 'class': 'org.chromium.test.SampleTest',
+ 'flags': [''],
+ 'is_junit4': True,
+ 'method': 'testMethod2'},
+ {
+ 'annotations': {
+ 'CommandLineParameter': {'value': ['', 'enable-features=abc']},
+ 'SmallTest': None},
+ 'class': 'org.chromium.test.SampleTest',
+ 'flags': ['--enable-features=abc'],
+ 'is_junit4': True,
+ 'method': 'testMethod1'},
+ {
+ 'annotations': {
+ 'CommandLineParameter': {'value': ['', 'enable-features=abc']},
+ 'MediumTest': None},
+ 'class': 'org.chromium.test.SampleTest',
+ 'flags': ['--enable-features=abc'],
+ 'is_junit4': True,
+ 'method': 'testMethod2'}]
+
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testCommandLineParameterization_skipped(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'CommandLineParameter': {
+ 'value': ['', 'enable-features=abc']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {
+ 'SmallTest': None,
+ 'SkipCommandLineParameterization': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'CommandLineParameter': {'value': ['', 'enable-features=abc']},
+ 'SkipCommandLineParameterization': None,
+ 'SmallTest': None},
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': True,
+ 'method': 'testMethod1'},
+ {
+ 'annotations': {
+ 'CommandLineParameter': {'value': ['', 'enable-features=abc']},
+ 'MediumTest': None},
+ 'class': 'org.chromium.test.SampleTest',
+ 'flags': [''],
+ 'is_junit4': True,
+ 'method': 'testMethod2'},
+ {
+ 'annotations': {
+ 'CommandLineParameter': {'value': ['', 'enable-features=abc']},
+ 'MediumTest': None},
+ 'class': 'org.chromium.test.SampleTest',
+ 'flags': ['--enable-features=abc'],
+ 'is_junit4': True,
+ 'method': 'testMethod2'}]
+
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+ self.assertEquals(actual_tests, expected_tests)
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/deps/v8/build/android/pylib/instrumentation/json_perf_parser.py b/deps/v8/build/android/pylib/instrumentation/json_perf_parser.py
new file mode 100644
index 0000000000..c647890ba3
--- /dev/null
+++ b/deps/v8/build/android/pylib/instrumentation/json_perf_parser.py
@@ -0,0 +1,161 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""A helper module for parsing JSON objects from perf tests results."""
+
+import json
+
+
+def GetAverageRunInfo(json_data, name):
+ """Summarizes TraceEvent JSON data for performance metrics.
+
+ Example JSON Inputs (More tags can be added but these are required):
+ Measuring Duration:
+ [
+ { "cat": "Java",
+ "ts": 10000000000,
+ "ph": "S",
+ "name": "TestTrace"
+ },
+ { "cat": "Java",
+ "ts": 10000004000,
+ "ph": "F",
+ "name": "TestTrace"
+ },
+ ...
+ ]
+
+ Measuring Call Frequency (FPS):
+ [
+ { "cat": "Java",
+ "ts": 10000000000,
+ "ph": "I",
+ "name": "TestTraceFPS"
+ },
+ { "cat": "Java",
+ "ts": 10000004000,
+ "ph": "I",
+ "name": "TestTraceFPS"
+ },
+ ...
+ ]
+
+ Args:
+ json_data: A list of dictonaries each representing a JSON object.
+ name: The 'name' tag to filter on in the JSON file.
+
+ Returns:
+ A dictionary of result data with the following tags:
+ min: The minimum value tracked.
+ max: The maximum value tracked.
+ average: The average of all the values tracked.
+ count: The number of times the category/name pair was tracked.
+ type: The type of tracking ('Instant' for instant tags and 'Span' for
+ begin/end tags.
+ category: The passed in category filter.
+ name: The passed in name filter.
+ data_points: A list of all of the times used to generate this data.
+ units: The units for the values being reported.
+
+ Raises:
+ Exception: if entry contains invalid data.
+ """
+
+ def EntryFilter(entry):
+ return entry['cat'] == 'Java' and entry['name'] == name
+ filtered_entries = [j for j in json_data if EntryFilter(j)]
+
+ result = {}
+
+ result['min'] = -1
+ result['max'] = -1
+ result['average'] = 0
+ result['count'] = 0
+ result['type'] = 'Unknown'
+ result['category'] = 'Java'
+ result['name'] = name
+ result['data_points'] = []
+ result['units'] = ''
+
+ total_sum = 0
+
+ last_val = 0
+ val_type = None
+ for entry in filtered_entries:
+ if not val_type:
+ if 'mem' in entry:
+ val_type = 'mem'
+
+ def GetVal(entry):
+ return entry['mem']
+
+ result['units'] = 'kb'
+ elif 'ts' in entry:
+ val_type = 'ts'
+
+ def GetVal(entry):
+ return float(entry['ts']) / 1000.0
+
+ result['units'] = 'ms'
+ else:
+ raise Exception('Entry did not contain valid value info: %s' % entry)
+
+ if not val_type in entry:
+ raise Exception('Entry did not contain expected value type "%s" '
+ 'information: %s' % (val_type, entry))
+ val = GetVal(entry)
+ if (entry['ph'] == 'S' and
+ (result['type'] == 'Unknown' or result['type'] == 'Span')):
+ result['type'] = 'Span'
+ last_val = val
+ elif ((entry['ph'] == 'F' and result['type'] == 'Span') or
+ (entry['ph'] == 'I' and (result['type'] == 'Unknown' or
+ result['type'] == 'Instant'))):
+ if last_val > 0:
+ delta = val - last_val
+ if result['min'] == -1 or result['min'] > delta:
+ result['min'] = delta
+ if result['max'] == -1 or result['max'] < delta:
+ result['max'] = delta
+ total_sum += delta
+ result['count'] += 1
+ result['data_points'].append(delta)
+ if entry['ph'] == 'I':
+ result['type'] = 'Instant'
+ last_val = val
+ if result['count'] > 0:
+ result['average'] = total_sum / result['count']
+
+ return result
+
+
+def GetAverageRunInfoFromJSONString(json_string, name):
+ """Returns the results from GetAverageRunInfo using a JSON string.
+
+ Args:
+ json_string: The string containing JSON.
+ name: The 'name' tag to filter on in the JSON file.
+
+ Returns:
+ See GetAverageRunInfo Returns section.
+ """
+ return GetAverageRunInfo(json.loads(json_string), name)
+
+
+def GetAverageRunInfoFromFile(json_file, name):
+ """Returns the results from GetAverageRunInfo using a JSON file.
+
+ Args:
+ json_file: The path to a JSON file.
+ name: The 'name' tag to filter on in the JSON file.
+
+ Returns:
+ See GetAverageRunInfo Returns section.
+ """
+ with open(json_file, 'r') as f:
+ data = f.read()
+ perf = json.loads(data)
+
+ return GetAverageRunInfo(perf, name)
diff --git a/deps/v8/build/android/pylib/instrumentation/render_test.html.jinja b/deps/v8/build/android/pylib/instrumentation/render_test.html.jinja
new file mode 100644
index 0000000000..81b85b78e3
--- /dev/null
+++ b/deps/v8/build/android/pylib/instrumentation/render_test.html.jinja
@@ -0,0 +1,40 @@
+<html>
+<head>
+ <title>{{ test_name }}</title>
+ <script>
+ function toggleZoom() {
+ for (const img of document.getElementsByTagName("img")) {
+ if (img.hasAttribute('style')) {
+ img.removeAttribute('style');
+ } else {
+ img.style.width = '100%';
+ }
+ }
+ }
+ </script>
+</head>
+<body>
+ <a href="https://cs.chromium.org/search/?q={{ test_name }}&m=100&type=cs">Link to Golden (in repo)</a><br />
+ <a download="{{ test_name }}" href="{{ failure_link }}">Download Failure Image (right click and 'Save link as')</a>
+ <table>
+ <thead>
+ <tr>
+ <th>Failure</th>
+ <th>Golden</th>
+ <th>Diff</th>
+ </tr>
+ </thead>
+ <tbody style="vertical-align: top">
+ <tr onclick="toggleZoom()">
+ <td><img src="{{ failure_link }}" style="width: 100%" /></td>
+ {% if golden_link %}
+ <td><img src="{{ golden_link }}" style="width: 100%" /></td>
+ <td><img src="{{ diff_link }}" style="width: 100%" /></td>
+ {% else %}
+ <td>No Golden Image.</td>
+ {% endif %}
+ </tr>
+ </tbody>
+ </table>
+</body>
+</html>
diff --git a/deps/v8/build/android/pylib/instrumentation/test_result.py b/deps/v8/build/android/pylib/instrumentation/test_result.py
new file mode 100644
index 0000000000..24e80a8e5f
--- /dev/null
+++ b/deps/v8/build/android/pylib/instrumentation/test_result.py
@@ -0,0 +1,30 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import base_test_result
+
+
+class InstrumentationTestResult(base_test_result.BaseTestResult):
+ """Result information for a single instrumentation test."""
+
+ def __init__(self, full_name, test_type, start_date, dur, log=''):
+ """Construct an InstrumentationTestResult object.
+
+ Args:
+ full_name: Full name of the test.
+ test_type: Type of the test result as defined in ResultType.
+ start_date: Date in milliseconds when the test began running.
+ dur: Duration of the test run in milliseconds.
+ log: A string listing any errors.
+ """
+ super(InstrumentationTestResult, self).__init__(
+ full_name, test_type, dur, log)
+ name_pieces = full_name.rsplit('#')
+ if len(name_pieces) > 1:
+ self._test_name = name_pieces[1]
+ self._class_name = name_pieces[0]
+ else:
+ self._class_name = full_name
+ self._test_name = full_name
+ self._start_date = start_date
diff --git a/deps/v8/build/android/pylib/junit/__init__.py b/deps/v8/build/android/pylib/junit/__init__.py
new file mode 100644
index 0000000000..4d6aabb953
--- /dev/null
+++ b/deps/v8/build/android/pylib/junit/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/junit/junit_test_instance.py b/deps/v8/build/android/pylib/junit/junit_test_instance.py
new file mode 100644
index 0000000000..f258cbd7bb
--- /dev/null
+++ b/deps/v8/build/android/pylib/junit/junit_test_instance.py
@@ -0,0 +1,80 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import test_instance
+from pylib.utils import test_filter
+
+
+class JunitTestInstance(test_instance.TestInstance):
+
+ def __init__(self, args, _):
+ super(JunitTestInstance, self).__init__()
+
+ self._android_manifest_path = args.android_manifest_path
+ self._coverage_dir = args.coverage_dir
+ self._debug_socket = args.debug_socket
+ self._jacoco = args.jacoco
+ self._package_filter = args.package_filter
+ self._package_name = args.package_name
+ self._resource_zips = args.resource_zips
+ self._robolectric_runtime_deps_dir = args.robolectric_runtime_deps_dir
+ self._runner_filter = args.runner_filter
+ self._test_filter = test_filter.InitializeFilterFromArgs(args)
+ self._test_suite = args.test_suite
+
+ #override
+ def TestType(self):
+ return 'junit'
+
+ #override
+ def SetUp(self):
+ pass
+
+ #override
+ def TearDown(self):
+ pass
+
+ @property
+ def android_manifest_path(self):
+ return self._android_manifest_path
+
+ @property
+ def coverage_dir(self):
+ return self._coverage_dir
+
+ @property
+ def jacoco(self):
+ return self._jacoco
+
+ @property
+ def debug_socket(self):
+ return self._debug_socket
+
+ @property
+ def package_filter(self):
+ return self._package_filter
+
+ @property
+ def package_name(self):
+ return self._package_name
+
+ @property
+ def resource_zips(self):
+ return self._resource_zips
+
+ @property
+ def robolectric_runtime_deps_dir(self):
+ return self._robolectric_runtime_deps_dir
+
+ @property
+ def runner_filter(self):
+ return self._runner_filter
+
+ @property
+ def test_filter(self):
+ return self._test_filter
+
+ @property
+ def suite(self):
+ return self._test_suite
diff --git a/deps/v8/build/android/pylib/linker/__init__.py b/deps/v8/build/android/pylib/linker/__init__.py
new file mode 100644
index 0000000000..9228df89b0
--- /dev/null
+++ b/deps/v8/build/android/pylib/linker/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/linker/linker_test_instance.py b/deps/v8/build/android/pylib/linker/linker_test_instance.py
new file mode 100644
index 0000000000..5f19db9678
--- /dev/null
+++ b/deps/v8/build/android/pylib/linker/linker_test_instance.py
@@ -0,0 +1,51 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import test_instance
+from pylib.constants import host_paths
+from pylib.linker import test_case
+from pylib.utils import test_filter
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+ import unittest_util
+
+
+class LinkerTestInstance(test_instance.TestInstance):
+
+ def __init__(self, args):
+ super(LinkerTestInstance, self).__init__()
+ self._test_apk = args.test_apk
+ self._test_filter = test_filter.InitializeFilterFromArgs(args)
+
+ @property
+ def test_apk(self):
+ return self._test_apk
+
+ @property
+ def test_filter(self):
+ return self._test_filter
+
+ def GetTests(self):
+ tests = [
+ test_case.LinkerSharedRelroTest(is_low_memory=False),
+ test_case.LinkerSharedRelroTest(is_low_memory=True)
+ ]
+
+ if self._test_filter:
+ filtered_names = unittest_util.FilterTestNames(
+ (t.qualified_name for t in tests), self._test_filter)
+ tests = [
+ t for t in tests
+ if t.qualified_name in filtered_names]
+
+ return tests
+
+ def SetUp(self):
+ pass
+
+ def TearDown(self):
+ pass
+
+ def TestType(self):
+ return 'linker'
diff --git a/deps/v8/build/android/pylib/linker/test_case.py b/deps/v8/build/android/pylib/linker/test_case.py
new file mode 100644
index 0000000000..871da89add
--- /dev/null
+++ b/deps/v8/build/android/pylib/linker/test_case.py
@@ -0,0 +1,215 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base class for linker-specific test cases.
+
+ The custom dynamic linker can only be tested through a custom test case
+ for various technical reasons:
+
+ - It's an 'invisible feature', i.e. it doesn't expose a new API or
+ behaviour, all it does is save RAM when loading native libraries.
+
+ - Checking that it works correctly requires several things that do not
+ fit the existing GTest-based and instrumentation-based tests:
+
+ - Native test code needs to be run in both the browser and renderer
+ process at the same time just after loading native libraries, in
+ a completely asynchronous way.
+
+ - Each test case requires restarting a whole new application process
+ with a different command-line.
+
+ - Enabling test support in the Linker code requires building a special
+ APK with a flag to activate special test-only support code in the
+ Linker code itself.
+
+ Host-driven tests have also been tried, but since they're really
+ sub-classes of instrumentation tests, they didn't work well either.
+
+ To build and run the linker tests, do the following:
+
+ ninja -C out/Debug chromium_linker_test_apk
+ out/Debug/bin/run_chromium_linker_test_apk
+
+"""
+# pylint: disable=R0201
+
+import logging
+import re
+
+from devil.android import device_errors
+from devil.android.sdk import intent
+from pylib.base import base_test_result
+
+
+ResultType = base_test_result.ResultType
+
+_PACKAGE_NAME = 'org.chromium.chromium_linker_test_apk'
+_ACTIVITY_NAME = '.ChromiumLinkerTestActivity'
+_COMMAND_LINE_FILE = '/data/local/tmp/chromium-linker-test-command-line'
+
+# Logcat filters used during each test. Only the 'chromium' one is really
+# needed, but the logs are added to the TestResult in case of error, and
+# it is handy to have others as well when troubleshooting.
+_LOGCAT_FILTERS = ['*:s', 'chromium:v', 'cr_chromium:v',
+ 'cr_ChromiumAndroidLinker:v', 'cr_LibraryLoader:v',
+ 'cr_LinkerTest:v']
+#_LOGCAT_FILTERS = ['*:v'] ## DEBUG
+
+# Regular expression used to match status lines in logcat.
+_RE_BROWSER_STATUS_LINE = re.compile(r' BROWSER_LINKER_TEST: (FAIL|SUCCESS)$')
+_RE_RENDERER_STATUS_LINE = re.compile(r' RENDERER_LINKER_TEST: (FAIL|SUCCESS)$')
+
+def _StartActivityAndWaitForLinkerTestStatus(device, timeout):
+ """Force-start an activity and wait up to |timeout| seconds until the full
+ linker test status lines appear in the logcat, recorded through |device|.
+ Args:
+ device: A DeviceUtils instance.
+ timeout: Timeout in seconds
+ Returns:
+ A (status, logs) tuple, where status is a ResultType constant, and logs
+ if the final logcat output as a string.
+ """
+
+ # 1. Start recording logcat with appropriate filters.
+ with device.GetLogcatMonitor(filter_specs=_LOGCAT_FILTERS) as logmon:
+
+ # 2. Force-start activity.
+ device.StartActivity(
+ intent.Intent(package=_PACKAGE_NAME, activity=_ACTIVITY_NAME),
+ force_stop=True)
+
+ # 3. Wait up to |timeout| seconds until the test status is in the logcat.
+ result = ResultType.PASS
+ try:
+ browser_match = logmon.WaitFor(_RE_BROWSER_STATUS_LINE, timeout=timeout)
+ logging.debug('Found browser match: %s', browser_match.group(0))
+ renderer_match = logmon.WaitFor(_RE_RENDERER_STATUS_LINE,
+ timeout=timeout)
+ logging.debug('Found renderer match: %s', renderer_match.group(0))
+ if (browser_match.group(1) != 'SUCCESS'
+ or renderer_match.group(1) != 'SUCCESS'):
+ result = ResultType.FAIL
+ except device_errors.CommandTimeoutError:
+ result = ResultType.TIMEOUT
+
+ logcat = device.adb.Logcat(dump=True)
+
+ logmon.Close()
+ return result, '\n'.join(logcat)
+
+
+class LibraryLoadMap(dict):
+ """A helper class to pretty-print a map of library names to load addresses."""
+ def __str__(self):
+ items = ['\'%s\': 0x%x' % (name, address) for \
+ (name, address) in self.iteritems()]
+ return '{%s}' % (', '.join(items))
+
+ def __repr__(self):
+ return 'LibraryLoadMap(%s)' % self.__str__()
+
+
+class AddressList(list):
+ """A helper class to pretty-print a list of load addresses."""
+ def __str__(self):
+ items = ['0x%x' % address for address in self]
+ return '[%s]' % (', '.join(items))
+
+ def __repr__(self):
+ return 'AddressList(%s)' % self.__str__()
+
+
+class LinkerTestCaseBase(object):
+ """Base class for linker test cases."""
+
+ def __init__(self, is_low_memory=False):
+ """Create a test case.
+ Args:
+ is_low_memory: True to simulate a low-memory device, False otherwise.
+ """
+ test_suffix = 'ForLinker'
+ self.is_low_memory = is_low_memory
+ if is_low_memory:
+ test_suffix += 'LowMemoryDevice'
+ else:
+ test_suffix += 'RegularDevice'
+ class_name = self.__class__.__name__
+ self.qualified_name = '%s.%s' % (class_name, test_suffix)
+ self.tagged_name = self.qualified_name
+
+ def _RunTest(self, _device):
+ """Run the test, must be overriden.
+ Args:
+ _device: A DeviceUtils interface.
+ Returns:
+ A (status, log) tuple, where <status> is a ResultType constant, and <log>
+ is the logcat output captured during the test in case of error, or None
+ in case of success.
+ """
+ return ResultType.FAIL, 'Unimplemented _RunTest() method!'
+
+ def Run(self, device):
+ """Run the test on a given device.
+ Args:
+ device: Name of target device where to run the test.
+ Returns:
+ A base_test_result.TestRunResult() instance.
+ """
+ margin = 8
+ print '[ %-*s ] %s' % (margin, 'RUN', self.tagged_name)
+ logging.info('Running linker test: %s', self.tagged_name)
+
+ command_line_flags = ''
+ if self.is_low_memory:
+ command_line_flags += ' --low-memory-device'
+ device.WriteFile(_COMMAND_LINE_FILE, command_line_flags)
+
+ # Run the test.
+ status, logs = self._RunTest(device)
+
+ result_text = 'OK'
+ if status == ResultType.FAIL:
+ result_text = 'FAILED'
+ elif status == ResultType.TIMEOUT:
+ result_text = 'TIMEOUT'
+ print '[ %*s ] %s' % (margin, result_text, self.tagged_name)
+
+ return base_test_result.BaseTestResult(self.tagged_name, status, log=logs)
+
+
+ def __str__(self):
+ return self.tagged_name
+
+ def __repr__(self):
+ return self.tagged_name
+
+
+class LinkerSharedRelroTest(LinkerTestCaseBase):
+ """A linker test case to check the status of shared RELRO sections.
+
+ The core of the checks performed here are pretty simple:
+
+ - Clear the logcat and start recording with an appropriate set of filters.
+ - Create the command-line appropriate for the test-case.
+ - Start the activity (always forcing a cold start).
+ - Every second, look at the current content of the filtered logcat lines
+ and look for instances of the following:
+
+ BROWSER_LINKER_TEST: <status>
+ RENDERER_LINKER_TEST: <status>
+
+ where <status> can be either FAIL or SUCCESS. These lines can appear
+ in any order in the logcat. Once both browser and renderer status are
+ found, stop the loop. Otherwise timeout after 30 seconds.
+
+ Note that there can be other lines beginning with BROWSER_LINKER_TEST:
+ and RENDERER_LINKER_TEST:, but are not followed by a <status> code.
+
+ - The test case passes if the <status> for both the browser and renderer
+ process are SUCCESS. Otherwise its a fail.
+ """
+ def _RunTest(self, device):
+ # Wait up to 30 seconds until the linker test status is in the logcat.
+ return _StartActivityAndWaitForLinkerTestStatus(device, timeout=30)
diff --git a/deps/v8/build/android/pylib/local/__init__.py b/deps/v8/build/android/pylib/local/__init__.py
new file mode 100644
index 0000000000..4d6aabb953
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/local/device/__init__.py b/deps/v8/build/android/pylib/local/device/__init__.py
new file mode 100644
index 0000000000..4d6aabb953
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/local/device/local_device_environment.py b/deps/v8/build/android/pylib/local/device/local_device_environment.py
new file mode 100644
index 0000000000..4d7aa82ad0
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_environment.py
@@ -0,0 +1,300 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import datetime
+import functools
+import logging
+import os
+import shutil
+import tempfile
+import threading
+
+import devil_chromium
+from devil import base_error
+from devil.android import device_blacklist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android import logcat_monitor
+from devil.android.sdk import adb_wrapper
+from devil.utils import file_utils
+from devil.utils import parallelizer
+from pylib import constants
+from pylib.base import environment
+from pylib.utils import instrumentation_tracing
+from py_trace_event import trace_event
+
+
+LOGCAT_FILTERS = [
+ 'chromium:v',
+ 'cr_*:v',
+ 'DEBUG:I',
+ 'StrictMode:D',
+]
+
+
+def _DeviceCachePath(device):
+ file_name = 'device_cache_%s.json' % device.adb.GetDeviceSerial()
+ return os.path.join(constants.GetOutDirectory(), file_name)
+
+
+def handle_shard_failures(f):
+ """A decorator that handles device failures for per-device functions.
+
+ Args:
+ f: the function being decorated. The function must take at least one
+ argument, and that argument must be the device.
+ """
+ return handle_shard_failures_with(None)(f)
+
+
+# TODO(jbudorick): Refactor this to work as a decorator or context manager.
+def handle_shard_failures_with(on_failure):
+ """A decorator that handles device failures for per-device functions.
+
+ This calls on_failure in the event of a failure.
+
+ Args:
+ f: the function being decorated. The function must take at least one
+ argument, and that argument must be the device.
+ on_failure: A binary function to call on failure.
+ """
+ def decorator(f):
+ @functools.wraps(f)
+ def wrapper(dev, *args, **kwargs):
+ try:
+ return f(dev, *args, **kwargs)
+ except device_errors.CommandTimeoutError:
+ logging.exception('Shard timed out: %s(%s)', f.__name__, str(dev))
+ except device_errors.DeviceUnreachableError:
+ logging.exception('Shard died: %s(%s)', f.__name__, str(dev))
+ except base_error.BaseError:
+ logging.exception('Shard failed: %s(%s)', f.__name__, str(dev))
+ except SystemExit:
+ logging.exception('Shard killed: %s(%s)', f.__name__, str(dev))
+ raise
+ if on_failure:
+ on_failure(dev, f.__name__)
+ return None
+
+ return wrapper
+
+ return decorator
+
+
+class LocalDeviceEnvironment(environment.Environment):
+
+ def __init__(self, args, output_manager, _error_func):
+ super(LocalDeviceEnvironment, self).__init__(output_manager)
+ self._blacklist = (device_blacklist.Blacklist(args.blacklist_file)
+ if args.blacklist_file
+ else None)
+ self._device_serials = args.test_devices
+ self._devices_lock = threading.Lock()
+ self._devices = None
+ self._concurrent_adb = args.enable_concurrent_adb
+ self._enable_device_cache = args.enable_device_cache
+ self._logcat_monitors = []
+ self._logcat_output_dir = args.logcat_output_dir
+ self._logcat_output_file = args.logcat_output_file
+ self._max_tries = 1 + args.num_retries
+ self._preferred_abis = None
+ self._recover_devices = args.recover_devices
+ self._skip_clear_data = args.skip_clear_data
+ self._tool_name = args.tool
+ self._trace_output = None
+ if hasattr(args, 'trace_output'):
+ self._trace_output = args.trace_output
+ self._trace_all = None
+ if hasattr(args, 'trace_all'):
+ self._trace_all = args.trace_all
+
+ devil_chromium.Initialize(
+ output_directory=constants.GetOutDirectory(),
+ adb_path=args.adb_path)
+
+ # Some things such as Forwarder require ADB to be in the environment path.
+ adb_dir = os.path.dirname(adb_wrapper.AdbWrapper.GetAdbPath())
+ if adb_dir and adb_dir not in os.environ['PATH'].split(os.pathsep):
+ os.environ['PATH'] = adb_dir + os.pathsep + os.environ['PATH']
+
+ #override
+ def SetUp(self):
+ if self.trace_output and self._trace_all:
+ to_include = [r"pylib\..*", r"devil\..*", "__main__"]
+ to_exclude = ["logging"]
+ instrumentation_tracing.start_instrumenting(self.trace_output, to_include,
+ to_exclude)
+ elif self.trace_output:
+ self.EnableTracing()
+
+ # Must be called before accessing |devices|.
+ def SetPreferredAbis(self, abis):
+ assert self._devices is None
+ self._preferred_abis = abis
+
+ def _InitDevices(self):
+ device_arg = []
+ if self._device_serials:
+ device_arg = self._device_serials
+
+ self._devices = device_utils.DeviceUtils.HealthyDevices(
+ self._blacklist,
+ retries=5,
+ enable_usb_resets=True,
+ enable_device_files_cache=self._enable_device_cache,
+ default_retries=self._max_tries - 1,
+ device_arg=device_arg,
+ abis=self._preferred_abis)
+
+ if self._logcat_output_file:
+ self._logcat_output_dir = tempfile.mkdtemp()
+
+ @handle_shard_failures_with(on_failure=self.BlacklistDevice)
+ def prepare_device(d):
+ d.WaitUntilFullyBooted()
+
+ if self._enable_device_cache:
+ cache_path = _DeviceCachePath(d)
+ if os.path.exists(cache_path):
+ logging.info('Using device cache: %s', cache_path)
+ with open(cache_path) as f:
+ d.LoadCacheData(f.read())
+ # Delete cached file so that any exceptions cause it to be cleared.
+ os.unlink(cache_path)
+
+ if self._logcat_output_dir:
+ logcat_file = os.path.join(
+ self._logcat_output_dir,
+ '%s_%s' % (d.adb.GetDeviceSerial(),
+ datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%S')))
+ monitor = logcat_monitor.LogcatMonitor(
+ d.adb, clear=True, output_file=logcat_file)
+ self._logcat_monitors.append(monitor)
+ monitor.Start()
+
+ self.parallel_devices.pMap(prepare_device)
+
+ @property
+ def blacklist(self):
+ return self._blacklist
+
+ @property
+ def concurrent_adb(self):
+ return self._concurrent_adb
+
+ @property
+ def devices(self):
+ # Initialize lazily so that host-only tests do not fail when no devices are
+ # attached.
+ if self._devices is None:
+ self._InitDevices()
+ return self._devices
+
+ @property
+ def max_tries(self):
+ return self._max_tries
+
+ @property
+ def parallel_devices(self):
+ return parallelizer.SyncParallelizer(self.devices)
+
+ @property
+ def recover_devices(self):
+ return self._recover_devices
+
+ @property
+ def skip_clear_data(self):
+ return self._skip_clear_data
+
+ @property
+ def tool(self):
+ return self._tool_name
+
+ @property
+ def trace_output(self):
+ return self._trace_output
+
+ #override
+ def TearDown(self):
+ if self.trace_output and self._trace_all:
+ instrumentation_tracing.stop_instrumenting()
+ elif self.trace_output:
+ self.DisableTracing()
+
+ # By default, teardown will invoke ADB. When receiving SIGTERM due to a
+ # timeout, there's a high probability that ADB is non-responsive. In these
+ # cases, sending an ADB command will potentially take a long time to time
+ # out. Before this happens, the process will be hard-killed for not
+ # responding to SIGTERM fast enough.
+ if self._received_sigterm:
+ return
+
+ if not self._devices:
+ return
+
+ @handle_shard_failures_with(on_failure=self.BlacklistDevice)
+ def tear_down_device(d):
+ # Write the cache even when not using it so that it will be ready the
+ # first time that it is enabled. Writing it every time is also necessary
+ # so that an invalid cache can be flushed just by disabling it for one
+ # run.
+ cache_path = _DeviceCachePath(d)
+ if os.path.exists(os.path.dirname(cache_path)):
+ with open(cache_path, 'w') as f:
+ f.write(d.DumpCacheData())
+ logging.info('Wrote device cache: %s', cache_path)
+ else:
+ logging.warning(
+ 'Unable to write device cache as %s directory does not exist',
+ os.path.dirname(cache_path))
+
+ self.parallel_devices.pMap(tear_down_device)
+
+ for m in self._logcat_monitors:
+ try:
+ m.Stop()
+ m.Close()
+ _, temp_path = tempfile.mkstemp()
+ with open(m.output_file, 'r') as infile:
+ with open(temp_path, 'w') as outfile:
+ for line in infile:
+ outfile.write('Device(%s) %s' % (m.adb.GetDeviceSerial(), line))
+ shutil.move(temp_path, m.output_file)
+ except base_error.BaseError:
+ logging.exception('Failed to stop logcat monitor for %s',
+ m.adb.GetDeviceSerial())
+ except IOError:
+ logging.exception('Failed to locate logcat for device %s',
+ m.adb.GetDeviceSerial())
+
+ if self._logcat_output_file:
+ file_utils.MergeFiles(
+ self._logcat_output_file,
+ [m.output_file for m in self._logcat_monitors
+ if os.path.exists(m.output_file)])
+ shutil.rmtree(self._logcat_output_dir)
+
+ def BlacklistDevice(self, device, reason='local_device_failure'):
+ device_serial = device.adb.GetDeviceSerial()
+ if self._blacklist:
+ self._blacklist.Extend([device_serial], reason=reason)
+ with self._devices_lock:
+ self._devices = [d for d in self._devices if str(d) != device_serial]
+ logging.error('Device %s blacklisted: %s', device_serial, reason)
+ if not self._devices:
+ raise device_errors.NoDevicesError(
+ 'All devices were blacklisted due to errors')
+
+ @staticmethod
+ def DisableTracing():
+ if not trace_event.trace_is_enabled():
+ logging.warning('Tracing is not running.')
+ else:
+ trace_event.trace_disable()
+
+ def EnableTracing(self):
+ if trace_event.trace_is_enabled():
+ logging.warning('Tracing is already running.')
+ else:
+ trace_event.trace_enable(self._trace_output)
diff --git a/deps/v8/build/android/pylib/local/device/local_device_gtest_run.py b/deps/v8/build/android/pylib/local/device/local_device_gtest_run.py
new file mode 100644
index 0000000000..76d3e1bb9b
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_gtest_run.py
@@ -0,0 +1,635 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import itertools
+import logging
+import os
+import posixpath
+import shutil
+import time
+
+from devil.android import crash_handler
+from devil.android import device_errors
+from devil.android import device_temp_file
+from devil.android import logcat_monitor
+from devil.android import ports
+from devil.utils import reraiser_thread
+from incremental_install import installer
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.gtest import gtest_test_instance
+from pylib.local import local_test_server_spawner
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_test_run
+from pylib.utils import google_storage_helper
+from pylib.utils import logdog_helper
+from py_trace_event import trace_event
+from py_utils import contextlib_ext
+from py_utils import tempfile_ext
+import tombstones
+
+_MAX_INLINE_FLAGS_LENGTH = 50 # Arbitrarily chosen.
+_EXTRA_COMMAND_LINE_FILE = (
+ 'org.chromium.native_test.NativeTest.CommandLineFile')
+_EXTRA_COMMAND_LINE_FLAGS = (
+ 'org.chromium.native_test.NativeTest.CommandLineFlags')
+_EXTRA_STDOUT_FILE = (
+ 'org.chromium.native_test.NativeTestInstrumentationTestRunner'
+ '.StdoutFile')
+_EXTRA_TEST = (
+ 'org.chromium.native_test.NativeTestInstrumentationTestRunner'
+ '.Test')
+_EXTRA_TEST_LIST = (
+ 'org.chromium.native_test.NativeTestInstrumentationTestRunner'
+ '.TestList')
+
+_MAX_SHARD_SIZE = 256
+_SECONDS_TO_NANOS = int(1e9)
+
+# The amount of time a test executable may run before it gets killed.
+_TEST_TIMEOUT_SECONDS = 30*60
+
+# Tests that use SpawnedTestServer must run the LocalTestServerSpawner on the
+# host machine.
+# TODO(jbudorick): Move this up to the test instance if the net test server is
+# handled outside of the APK for the remote_device environment.
+_SUITE_REQUIRES_TEST_SERVER_SPAWNER = [
+ 'components_browsertests', 'content_unittests', 'content_browsertests',
+ 'net_unittests', 'services_unittests', 'unit_tests'
+]
+
+# No-op context manager. If we used Python 3, we could change this to
+# contextlib.ExitStack()
+class _NullContextManager(object):
+ def __enter__(self):
+ pass
+ def __exit__(self, *args):
+ pass
+
+
+def _GenerateSequentialFileNames(filename):
+ """Infinite generator of names: 'name.ext', 'name_1.ext', 'name_2.ext', ..."""
+ yield filename
+ base, ext = os.path.splitext(filename)
+ for i in itertools.count(1):
+ yield '%s_%d%s' % (base, i, ext)
+
+
+def _ExtractTestsFromFilter(gtest_filter):
+ """Returns the list of tests specified by the given filter.
+
+ Returns:
+ None if the device should be queried for the test list instead.
+ """
+ # Empty means all tests, - means exclude filter.
+ if not gtest_filter or '-' in gtest_filter:
+ return None
+
+ patterns = gtest_filter.split(':')
+ # For a single pattern, allow it even if it has a wildcard so long as the
+ # wildcard comes at the end and there is at least one . to prove the scope is
+ # not too large.
+ # This heuristic is not necessarily faster, but normally is.
+ if len(patterns) == 1 and patterns[0].endswith('*'):
+ no_suffix = patterns[0].rstrip('*')
+ if '*' not in no_suffix and '.' in no_suffix:
+ return patterns
+
+ if '*' in gtest_filter:
+ return None
+ return patterns
+
+
+class _ApkDelegate(object):
+ def __init__(self, test_instance, tool):
+ self._activity = test_instance.activity
+ self._apk_helper = test_instance.apk_helper
+ self._test_apk_incremental_install_json = (
+ test_instance.test_apk_incremental_install_json)
+ self._package = test_instance.package
+ self._runner = test_instance.runner
+ self._permissions = test_instance.permissions
+ self._suite = test_instance.suite
+ self._component = '%s/%s' % (self._package, self._runner)
+ self._extras = test_instance.extras
+ self._wait_for_java_debugger = test_instance.wait_for_java_debugger
+ self._tool = tool
+
+ def GetTestDataRoot(self, device):
+ # pylint: disable=no-self-use
+ return posixpath.join(device.GetExternalStoragePath(),
+ 'chromium_tests_root')
+
+ def Install(self, device):
+ if self._test_apk_incremental_install_json:
+ installer.Install(device, self._test_apk_incremental_install_json,
+ apk=self._apk_helper, permissions=self._permissions)
+ else:
+ device.Install(
+ self._apk_helper,
+ allow_downgrade=True,
+ reinstall=True,
+ permissions=self._permissions)
+
+ def ResultsDirectory(self, device):
+ return device.GetApplicationDataDirectory(self._package)
+
+ def Run(self, test, device, flags=None, **kwargs):
+ extras = dict(self._extras)
+
+ if ('timeout' in kwargs
+ and gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT not in extras):
+ # Make sure the instrumentation doesn't kill the test before the
+ # scripts do. The provided timeout value is in seconds, but the
+ # instrumentation deals with nanoseconds because that's how Android
+ # handles time.
+ extras[gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT] = int(
+ kwargs['timeout'] * _SECONDS_TO_NANOS)
+
+ # pylint: disable=redefined-variable-type
+ command_line_file = _NullContextManager()
+ if flags:
+ if len(flags) > _MAX_INLINE_FLAGS_LENGTH:
+ command_line_file = device_temp_file.DeviceTempFile(device.adb)
+ device.WriteFile(command_line_file.name, '_ %s' % flags)
+ extras[_EXTRA_COMMAND_LINE_FILE] = command_line_file.name
+ else:
+ extras[_EXTRA_COMMAND_LINE_FLAGS] = flags
+
+ test_list_file = _NullContextManager()
+ if test:
+ if len(test) > 1:
+ test_list_file = device_temp_file.DeviceTempFile(device.adb)
+ device.WriteFile(test_list_file.name, '\n'.join(test))
+ extras[_EXTRA_TEST_LIST] = test_list_file.name
+ else:
+ extras[_EXTRA_TEST] = test[0]
+ # pylint: enable=redefined-variable-type
+
+ stdout_file = device_temp_file.DeviceTempFile(
+ device.adb, dir=device.GetExternalStoragePath(), suffix='.gtest_out')
+ extras[_EXTRA_STDOUT_FILE] = stdout_file.name
+
+ if self._wait_for_java_debugger:
+ cmd = ['am', 'set-debug-app', '-w', self._package]
+ device.RunShellCommand(cmd, check_return=True)
+ logging.warning('*' * 80)
+ logging.warning('Waiting for debugger to attach to process: %s',
+ self._package)
+ logging.warning('*' * 80)
+
+ with command_line_file, test_list_file, stdout_file:
+ try:
+ device.StartInstrumentation(
+ self._component, extras=extras, raw=False, **kwargs)
+ except device_errors.CommandFailedError:
+ logging.exception('gtest shard failed.')
+ except device_errors.CommandTimeoutError:
+ logging.exception('gtest shard timed out.')
+ except device_errors.DeviceUnreachableError:
+ logging.exception('gtest shard device unreachable.')
+ except Exception:
+ device.ForceStop(self._package)
+ raise
+ # TODO(jbudorick): Remove this after resolving crbug.com/726880
+ logging.info(
+ '%s size on device: %s',
+ stdout_file.name, device.StatPath(stdout_file.name).get('st_size', 0))
+ return device.ReadFile(stdout_file.name).splitlines()
+
+ def PullAppFiles(self, device, files, directory):
+ device_dir = device.GetApplicationDataDirectory(self._package)
+ host_dir = os.path.join(directory, str(device))
+ for f in files:
+ device_file = posixpath.join(device_dir, f)
+ host_file = os.path.join(host_dir, *f.split(posixpath.sep))
+ for host_file in _GenerateSequentialFileNames(host_file):
+ if not os.path.exists(host_file):
+ break
+ device.PullFile(device_file, host_file)
+
+ def Clear(self, device):
+ device.ClearApplicationState(self._package, permissions=self._permissions)
+
+
+class _ExeDelegate(object):
+ def __init__(self, tr, dist_dir, tool):
+ self._host_dist_dir = dist_dir
+ self._exe_file_name = os.path.basename(dist_dir)[:-len('__dist')]
+ self._device_dist_dir = posixpath.join(
+ constants.TEST_EXECUTABLE_DIR, os.path.basename(dist_dir))
+ self._test_run = tr
+ self._tool = tool
+
+ def GetTestDataRoot(self, device):
+ # pylint: disable=no-self-use
+ # pylint: disable=unused-argument
+ return posixpath.join(constants.TEST_EXECUTABLE_DIR, 'chromium_tests_root')
+
+ def Install(self, device):
+ # TODO(jbudorick): Look into merging this with normal data deps pushing if
+ # executables become supported on nonlocal environments.
+ device.PushChangedFiles([(self._host_dist_dir, self._device_dist_dir)],
+ delete_device_stale=True)
+
+ def ResultsDirectory(self, device):
+ # pylint: disable=no-self-use
+ # pylint: disable=unused-argument
+ return constants.TEST_EXECUTABLE_DIR
+
+ def Run(self, test, device, flags=None, **kwargs):
+ tool = self._test_run.GetTool(device).GetTestWrapper()
+ if tool:
+ cmd = [tool]
+ else:
+ cmd = []
+ cmd.append(posixpath.join(self._device_dist_dir, self._exe_file_name))
+
+ if test:
+ cmd.append('--gtest_filter=%s' % ':'.join(test))
+ if flags:
+ # TODO(agrieve): This won't work if multiple flags are passed.
+ cmd.append(flags)
+ cwd = constants.TEST_EXECUTABLE_DIR
+
+ env = {
+ 'LD_LIBRARY_PATH': self._device_dist_dir
+ }
+
+ if self._tool != 'asan':
+ env['UBSAN_OPTIONS'] = constants.UBSAN_OPTIONS
+
+ try:
+ gcov_strip_depth = os.environ['NATIVE_COVERAGE_DEPTH_STRIP']
+ external = device.GetExternalStoragePath()
+ env['GCOV_PREFIX'] = '%s/gcov' % external
+ env['GCOV_PREFIX_STRIP'] = gcov_strip_depth
+ except (device_errors.CommandFailedError, KeyError):
+ pass
+
+ # Executable tests return a nonzero exit code on test failure, which is
+ # fine from the test runner's perspective; thus check_return=False.
+ output = device.RunShellCommand(
+ cmd, cwd=cwd, env=env, check_return=False, large_output=True, **kwargs)
+ return output
+
+ def PullAppFiles(self, device, files, directory):
+ pass
+
+ def Clear(self, device):
+ device.KillAll(self._exe_file_name, blocking=True, timeout=30, quiet=True)
+
+
+class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
+
+ def __init__(self, env, test_instance):
+ assert isinstance(env, local_device_environment.LocalDeviceEnvironment)
+ assert isinstance(test_instance, gtest_test_instance.GtestTestInstance)
+ super(LocalDeviceGtestRun, self).__init__(env, test_instance)
+
+ # pylint: disable=redefined-variable-type
+ if self._test_instance.apk:
+ self._delegate = _ApkDelegate(self._test_instance, env.tool)
+ elif self._test_instance.exe_dist_dir:
+ self._delegate = _ExeDelegate(self, self._test_instance.exe_dist_dir,
+ self._env.tool)
+ if self._test_instance.isolated_script_test_perf_output:
+ self._test_perf_output_filenames = _GenerateSequentialFileNames(
+ self._test_instance.isolated_script_test_perf_output)
+ else:
+ self._test_perf_output_filenames = itertools.repeat(None)
+ # pylint: enable=redefined-variable-type
+ self._crashes = set()
+ self._servers = collections.defaultdict(list)
+
+ #override
+ def TestPackage(self):
+ return self._test_instance.suite
+
+ #override
+ def SetUp(self):
+ @local_device_environment.handle_shard_failures_with(
+ on_failure=self._env.BlacklistDevice)
+ @trace_event.traced
+ def individual_device_set_up(device, host_device_tuples):
+ def install_apk(dev):
+ # Install test APK.
+ self._delegate.Install(dev)
+
+ def push_test_data(dev):
+ # Push data dependencies.
+ device_root = self._delegate.GetTestDataRoot(dev)
+ host_device_tuples_substituted = [
+ (h, local_device_test_run.SubstituteDeviceRoot(d, device_root))
+ for h, d in host_device_tuples]
+ dev.PushChangedFiles(
+ host_device_tuples_substituted,
+ delete_device_stale=True,
+ # Some gtest suites, e.g. unit_tests, have data dependencies that
+ # can take longer than the default timeout to push. See
+ # crbug.com/791632 for context.
+ timeout=600)
+ if not host_device_tuples:
+ dev.RemovePath(device_root, force=True, recursive=True, rename=True)
+ dev.RunShellCommand(['mkdir', '-p', device_root], check_return=True)
+
+ def init_tool_and_start_servers(dev):
+ tool = self.GetTool(dev)
+ tool.CopyFiles(dev)
+ tool.SetupEnvironment()
+
+ self._servers[str(dev)] = []
+ if self.TestPackage() in _SUITE_REQUIRES_TEST_SERVER_SPAWNER:
+ self._servers[str(dev)].append(
+ local_test_server_spawner.LocalTestServerSpawner(
+ ports.AllocateTestServerPort(), dev, tool))
+
+ for s in self._servers[str(dev)]:
+ s.SetUp()
+
+ def bind_crash_handler(step, dev):
+ return lambda: crash_handler.RetryOnSystemCrash(step, dev)
+
+ steps = [
+ bind_crash_handler(s, device)
+ for s in (install_apk, push_test_data, init_tool_and_start_servers)]
+ if self._env.concurrent_adb:
+ reraiser_thread.RunAsync(steps)
+ else:
+ for step in steps:
+ step()
+
+ self._env.parallel_devices.pMap(
+ individual_device_set_up,
+ self._test_instance.GetDataDependencies())
+
+ #override
+ def _ShouldShard(self):
+ return True
+
+ #override
+ def _CreateShards(self, tests):
+ # _crashes are tests that might crash and make the tests in the same shard
+ # following the crashed testcase not run.
+ # Thus we need to create separate shards for each crashed testcase,
+ # so that other tests can be run.
+ device_count = len(self._env.devices)
+ shards = []
+
+ # Add shards with only one suspect testcase.
+ shards += [[crash] for crash in self._crashes if crash in tests]
+
+ # Delete suspect testcase from tests.
+ tests = [test for test in tests if not test in self._crashes]
+
+ for i in xrange(0, device_count):
+ unbounded_shard = tests[i::device_count]
+ shards += [unbounded_shard[j:j+_MAX_SHARD_SIZE]
+ for j in xrange(0, len(unbounded_shard), _MAX_SHARD_SIZE)]
+ return shards
+
+ #override
+ def _GetTests(self):
+ if self._test_instance.extract_test_list_from_filter:
+ # When the exact list of tests to run is given via command-line (e.g. when
+ # locally iterating on a specific test), skip querying the device (which
+ # takes ~3 seconds).
+ tests = _ExtractTestsFromFilter(self._test_instance.gtest_filter)
+ if tests:
+ return tests
+
+ # Even when there's only one device, it still makes sense to retrieve the
+ # test list so that tests can be split up and run in batches rather than all
+ # at once (since test output is not streamed).
+ @local_device_environment.handle_shard_failures_with(
+ on_failure=self._env.BlacklistDevice)
+ def list_tests(dev):
+ timeout = 30
+ retries = 1
+ if self._test_instance.wait_for_java_debugger:
+ timeout = None
+
+ flags = [
+ f for f in self._test_instance.flags
+ if f not in ['--wait-for-debugger', '--wait-for-java-debugger']
+ ]
+ flags.append('--gtest_list_tests')
+
+ # TODO(crbug.com/726880): Remove retries when no longer necessary.
+ for i in range(0, retries+1):
+ logging.info('flags:')
+ for f in flags:
+ logging.info(' %s', f)
+
+ raw_test_list = crash_handler.RetryOnSystemCrash(
+ lambda d: self._delegate.Run(
+ None, d, flags=' '.join(flags), timeout=timeout),
+ device=dev)
+ tests = gtest_test_instance.ParseGTestListTests(raw_test_list)
+ if not tests:
+ logging.info('No tests found. Output:')
+ for l in raw_test_list:
+ logging.info(' %s', l)
+ logging.info('Logcat:')
+ for line in dev.adb.Logcat(dump=True):
+ logging.info(line)
+ dev.adb.Logcat(clear=True)
+ if i < retries:
+ logging.info('Retrying...')
+ else:
+ break
+ return tests
+
+ # Query all devices in case one fails.
+ test_lists = self._env.parallel_devices.pMap(list_tests).pGet(None)
+
+ # If all devices failed to list tests, raise an exception.
+ # Check that tl is not None and is not empty.
+ if all(not tl for tl in test_lists):
+ raise device_errors.CommandFailedError(
+ 'Failed to list tests on any device')
+ tests = list(sorted(set().union(*[set(tl) for tl in test_lists if tl])))
+ tests = self._test_instance.FilterTests(tests)
+ tests = self._ApplyExternalSharding(
+ tests, self._test_instance.external_shard_index,
+ self._test_instance.total_external_shards)
+ return tests
+
+ def _UploadTestArtifacts(self, device, test_artifacts_dir):
+ # TODO(jbudorick): Reconcile this with the output manager once
+ # https://codereview.chromium.org/2933993002/ lands.
+ if test_artifacts_dir:
+ with tempfile_ext.NamedTemporaryDirectory() as test_artifacts_host_dir:
+ device.PullFile(test_artifacts_dir.name, test_artifacts_host_dir)
+ with tempfile_ext.NamedTemporaryDirectory() as temp_zip_dir:
+ zip_base_name = os.path.join(temp_zip_dir, 'test_artifacts')
+ test_artifacts_zip = shutil.make_archive(
+ zip_base_name, 'zip', test_artifacts_host_dir)
+ link = google_storage_helper.upload(
+ google_storage_helper.unique_name(
+ 'test_artifacts', device=device),
+ test_artifacts_zip,
+ bucket='%s/test_artifacts' % (
+ self._test_instance.gs_test_artifacts_bucket))
+ logging.info('Uploading test artifacts to %s.', link)
+ return link
+ return None
+
+ #override
+ def _RunTest(self, device, test):
+ # Run the test.
+ timeout = (self._test_instance.shard_timeout
+ * self.GetTool(device).GetTimeoutScale())
+ if self._test_instance.wait_for_java_debugger:
+ timeout = None
+ if self._test_instance.store_tombstones:
+ tombstones.ClearAllTombstones(device)
+ test_perf_output_filename = next(self._test_perf_output_filenames)
+
+ with device_temp_file.DeviceTempFile(
+ adb=device.adb,
+ dir=self._delegate.ResultsDirectory(device),
+ suffix='.xml') as device_tmp_results_file:
+ with contextlib_ext.Optional(
+ device_temp_file.NamedDeviceTemporaryDirectory(
+ adb=device.adb, dir='/sdcard/'),
+ self._test_instance.gs_test_artifacts_bucket) as test_artifacts_dir:
+ with (contextlib_ext.Optional(
+ device_temp_file.DeviceTempFile(
+ adb=device.adb, dir=self._delegate.ResultsDirectory(device)),
+ test_perf_output_filename)) as isolated_script_test_perf_output:
+
+ flags = list(self._test_instance.flags)
+ if self._test_instance.enable_xml_result_parsing:
+ flags.append('--gtest_output=xml:%s' % device_tmp_results_file.name)
+
+ if self._test_instance.gs_test_artifacts_bucket:
+ flags.append('--test_artifacts_dir=%s' % test_artifacts_dir.name)
+
+ if test_perf_output_filename:
+ flags.append('--isolated_script_test_perf_output=%s'
+ % isolated_script_test_perf_output.name)
+
+ logging.info('flags:')
+ for f in flags:
+ logging.info(' %s', f)
+
+ stream_name = 'logcat_%s_%s_%s' % (
+ hash(tuple(test)),
+ time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()),
+ device.serial)
+
+ with self._env.output_manager.ArchivedTempfile(
+ stream_name, 'logcat') as logcat_file:
+ with logcat_monitor.LogcatMonitor(
+ device.adb,
+ filter_specs=local_device_environment.LOGCAT_FILTERS,
+ output_file=logcat_file.name) as logmon:
+ with contextlib_ext.Optional(
+ trace_event.trace(str(test)),
+ self._env.trace_output):
+ output = self._delegate.Run(
+ test, device, flags=' '.join(flags),
+ timeout=timeout, retries=0)
+ logmon.Close()
+
+ if logcat_file.Link():
+ logging.info('Logcat saved to %s', logcat_file.Link())
+
+ if self._test_instance.enable_xml_result_parsing:
+ try:
+ gtest_xml = device.ReadFile(
+ device_tmp_results_file.name,
+ as_root=True)
+ except device_errors.CommandFailedError as e:
+ logging.warning(
+ 'Failed to pull gtest results XML file %s: %s',
+ device_tmp_results_file.name,
+ str(e))
+ gtest_xml = None
+
+ if test_perf_output_filename:
+ try:
+ device.PullFile(isolated_script_test_perf_output.name,
+ test_perf_output_filename)
+ except device_errors.CommandFailedError as e:
+ logging.warning(
+ 'Failed to pull chartjson results %s: %s',
+ isolated_script_test_perf_output.name, str(e))
+
+ test_artifacts_url = self._UploadTestArtifacts(device,
+ test_artifacts_dir)
+
+ for s in self._servers[str(device)]:
+ s.Reset()
+ if self._test_instance.app_files:
+ self._delegate.PullAppFiles(device, self._test_instance.app_files,
+ self._test_instance.app_file_dir)
+ if not self._env.skip_clear_data:
+ self._delegate.Clear(device)
+
+ for l in output:
+ logging.info(l)
+
+ # Parse the output.
+ # TODO(jbudorick): Transition test scripts away from parsing stdout.
+ if self._test_instance.enable_xml_result_parsing:
+ results = gtest_test_instance.ParseGTestXML(gtest_xml)
+ else:
+ results = gtest_test_instance.ParseGTestOutput(
+ output, self._test_instance.symbolizer, device.product_cpu_abi)
+
+ tombstones_url = None
+ for r in results:
+ if logcat_file:
+ r.SetLink('logcat', logcat_file.Link())
+
+ if self._test_instance.gs_test_artifacts_bucket:
+ r.SetLink('test_artifacts', test_artifacts_url)
+
+ if r.GetType() == base_test_result.ResultType.CRASH:
+ self._crashes.add(r.GetName())
+ if self._test_instance.store_tombstones:
+ if not tombstones_url:
+ resolved_tombstones = tombstones.ResolveTombstones(
+ device,
+ resolve_all_tombstones=True,
+ include_stack_symbols=False,
+ wipe_tombstones=True)
+ stream_name = 'tombstones_%s_%s' % (
+ time.strftime('%Y%m%dT%H%M%S', time.localtime()),
+ device.serial)
+ tombstones_url = logdog_helper.text(
+ stream_name, '\n'.join(resolved_tombstones))
+ r.SetLink('tombstones', tombstones_url)
+
+ tests_stripped_disabled_prefix = set()
+ for t in test:
+ tests_stripped_disabled_prefix.add(
+ gtest_test_instance.TestNameWithoutDisabledPrefix(t))
+ not_run_tests = tests_stripped_disabled_prefix.difference(
+ set(r.GetName() for r in results))
+ return results, list(not_run_tests) if results else None
+
+ #override
+ def TearDown(self):
+ # By default, teardown will invoke ADB. When receiving SIGTERM due to a
+ # timeout, there's a high probability that ADB is non-responsive. In these
+ # cases, sending an ADB command will potentially take a long time to time
+ # out. Before this happens, the process will be hard-killed for not
+ # responding to SIGTERM fast enough.
+ if self._received_sigterm:
+ return
+
+ @local_device_environment.handle_shard_failures
+ @trace_event.traced
+ def individual_device_tear_down(dev):
+ for s in self._servers.get(str(dev), []):
+ s.TearDown()
+
+ tool = self.GetTool(dev)
+ tool.CleanUpEnvironment()
+
+ self._env.parallel_devices.pMap(individual_device_tear_down)
diff --git a/deps/v8/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/deps/v8/build/android/pylib/local/device/local_device_instrumentation_test_run.py
new file mode 100644
index 0000000000..4332e74972
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_instrumentation_test_run.py
@@ -0,0 +1,965 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import copy
+import hashlib
+import json
+import logging
+import os
+import posixpath
+import re
+import sys
+import time
+
+from devil.android import crash_handler
+from devil.android import device_errors
+from devil.android import device_temp_file
+from devil.android import flag_changer
+from devil.android.sdk import shared_prefs
+from devil.android import logcat_monitor
+from devil.android.tools import system_app
+from devil.android.tools import webview_app
+from devil.utils import reraiser_thread
+from incremental_install import installer
+from pylib import constants
+from pylib import valgrind_tools
+from pylib.base import base_test_result
+from pylib.base import output_manager
+from pylib.constants import host_paths
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_test_run
+from pylib.output import remote_output_manager
+from pylib.utils import instrumentation_tracing
+from pylib.utils import shared_preference_utils
+
+from py_trace_event import trace_event
+from py_trace_event import trace_time
+from py_utils import contextlib_ext
+from py_utils import tempfile_ext
+import tombstones
+
+
+with host_paths.SysPath(
+ os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'), 0):
+ import jinja2 # pylint: disable=import-error
+ import markupsafe # pylint: disable=import-error,unused-import
+
+
+_JINJA_TEMPLATE_DIR = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'pylib', 'instrumentation')
+_JINJA_TEMPLATE_FILENAME = 'render_test.html.jinja'
+
+_TAG = 'test_runner_py'
+
+TIMEOUT_ANNOTATIONS = [
+ ('Manual', 10 * 60 * 60),
+ ('IntegrationTest', 30 * 60),
+ ('External', 10 * 60),
+ ('EnormousTest', 10 * 60),
+ ('LargeTest', 5 * 60),
+ ('MediumTest', 3 * 60),
+ ('SmallTest', 1 * 60),
+]
+
+LOGCAT_FILTERS = ['*:e', 'chromium:v', 'cr_*:v', 'DEBUG:I',
+ 'StrictMode:D', '%s:I' % _TAG]
+
+EXTRA_SCREENSHOT_FILE = (
+ 'org.chromium.base.test.ScreenshotOnFailureStatement.ScreenshotFile')
+
+EXTRA_UI_CAPTURE_DIR = (
+ 'org.chromium.base.test.util.Screenshooter.ScreenshotDir')
+
+EXTRA_TRACE_FILE = ('org.chromium.base.test.BaseJUnit4ClassRunner.TraceFile')
+
+_EXTRA_TEST_LIST = (
+ 'org.chromium.base.test.BaseChromiumAndroidJUnitRunner.TestList')
+
+FEATURE_ANNOTATION = 'Feature'
+RENDER_TEST_FEATURE_ANNOTATION = 'RenderTest'
+
+# This needs to be kept in sync with formatting in |RenderUtils.imageName|
+RE_RENDER_IMAGE_NAME = re.compile(
+ r'(?P<test_class>\w+)\.'
+ r'(?P<description>[-\w]+)\.'
+ r'(?P<device_model_sdk>[-\w]+)\.png')
+
+@contextlib.contextmanager
+def _LogTestEndpoints(device, test_name):
+ device.RunShellCommand(
+ ['log', '-p', 'i', '-t', _TAG, 'START %s' % test_name],
+ check_return=True)
+ try:
+ yield
+ finally:
+ device.RunShellCommand(
+ ['log', '-p', 'i', '-t', _TAG, 'END %s' % test_name],
+ check_return=True)
+
+# TODO(jbudorick): Make this private once the instrumentation test_runner
+# is deprecated.
+def DidPackageCrashOnDevice(package_name, device):
+ # Dismiss any error dialogs. Limit the number in case we have an error
+ # loop or we are failing to dismiss.
+ try:
+ for _ in xrange(10):
+ package = device.DismissCrashDialogIfNeeded(timeout=10, retries=1)
+ if not package:
+ return False
+ # Assume test package convention of ".test" suffix
+ if package in package_name:
+ return True
+ except device_errors.CommandFailedError:
+ logging.exception('Error while attempting to dismiss crash dialog.')
+ return False
+
+
+_CURRENT_FOCUS_CRASH_RE = re.compile(
+ r'\s*mCurrentFocus.*Application (Error|Not Responding): (\S+)}')
+
+
+class LocalDeviceInstrumentationTestRun(
+ local_device_test_run.LocalDeviceTestRun):
+ def __init__(self, env, test_instance):
+ super(LocalDeviceInstrumentationTestRun, self).__init__(
+ env, test_instance)
+ self._flag_changers = {}
+ self._replace_package_contextmanager = None
+ self._shared_prefs_to_restore = []
+ self._use_webview_contextmanager = None
+
+ #override
+ def TestPackage(self):
+ return self._test_instance.suite
+
+ #override
+ def SetUp(self):
+ @local_device_environment.handle_shard_failures_with(
+ self._env.BlacklistDevice)
+ @trace_event.traced
+ def individual_device_set_up(device, host_device_tuples):
+ steps = []
+
+ if self._test_instance.replace_system_package:
+ @trace_event.traced
+ def replace_package(dev):
+ # We need the context manager to be applied before modifying any
+ # shared preference files in case the replacement APK needs to be
+ # set up, and it needs to be applied while the test is running.
+ # Thus, it needs to be applied early during setup, but must still be
+ # applied during _RunTest, which isn't possible using 'with' without
+ # applying the context manager up in test_runner. Instead, we
+ # manually invoke its __enter__ and __exit__ methods in setup and
+ # teardown.
+ self._replace_package_contextmanager = system_app.ReplaceSystemApp(
+ dev, self._test_instance.replace_system_package.package,
+ self._test_instance.replace_system_package.replacement_apk)
+ # Pylint is not smart enough to realize that this field has
+ # an __enter__ method, and will complain loudly.
+ # pylint: disable=no-member
+ self._replace_package_contextmanager.__enter__()
+ # pylint: enable=no-member
+
+ steps.append(replace_package)
+
+ if self._test_instance.use_webview_provider:
+ @trace_event.traced
+ def use_webview_provider(dev):
+ # We need the context manager to be applied before modifying any
+ # shared preference files in case the replacement APK needs to be
+ # set up, and it needs to be applied while the test is running.
+ # Thus, it needs to be applied early during setup, but must still be
+ # applied during _RunTest, which isn't possible using 'with' without
+ # applying the context manager up in test_runner. Instead, we
+ # manually invoke its __enter__ and __exit__ methods in setup and
+ # teardown.
+ self._use_webview_contextmanager = webview_app.UseWebViewProvider(
+ dev, self._test_instance.use_webview_provider)
+ # Pylint is not smart enough to realize that this field has
+ # an __enter__ method, and will complain loudly.
+ # pylint: disable=no-member
+ self._use_webview_contextmanager.__enter__()
+ # pylint: enable=no-member
+
+ steps.append(use_webview_provider)
+
+ def install_helper(apk, permissions):
+ @instrumentation_tracing.no_tracing
+ @trace_event.traced("apk_path")
+ def install_helper_internal(d, apk_path=apk.path):
+ # pylint: disable=unused-argument
+ d.Install(apk, permissions=permissions)
+ return install_helper_internal
+
+ def incremental_install_helper(apk, json_path, permissions):
+ @trace_event.traced("apk_path")
+ def incremental_install_helper_internal(d, apk_path=apk.path):
+ # pylint: disable=unused-argument
+ installer.Install(d, json_path, apk=apk, permissions=permissions)
+ return incremental_install_helper_internal
+
+ if self._test_instance.apk_under_test:
+ permissions = self._test_instance.apk_under_test.GetPermissions()
+ if self._test_instance.apk_under_test_incremental_install_json:
+ steps.append(incremental_install_helper(
+ self._test_instance.apk_under_test,
+ self._test_instance.
+ apk_under_test_incremental_install_json,
+ permissions))
+ else:
+ steps.append(install_helper(self._test_instance.apk_under_test,
+ permissions))
+
+ permissions = self._test_instance.test_apk.GetPermissions()
+ if self._test_instance.test_apk_incremental_install_json:
+ steps.append(incremental_install_helper(
+ self._test_instance.test_apk,
+ self._test_instance.
+ test_apk_incremental_install_json,
+ permissions))
+ else:
+ steps.append(install_helper(self._test_instance.test_apk,
+ permissions))
+
+ steps.extend(install_helper(apk, None)
+ for apk in self._test_instance.additional_apks)
+
+ @trace_event.traced
+ def set_debug_app(dev):
+ # Set debug app in order to enable reading command line flags on user
+ # builds
+ package_name = None
+ if self._test_instance.apk_under_test:
+ package_name = self._test_instance.apk_under_test.GetPackageName()
+ elif self._test_instance.test_apk:
+ package_name = self._test_instance.test_apk.GetPackageName()
+ else:
+ logging.error("Couldn't set debug app: no package name found")
+ return
+ cmd = ['am', 'set-debug-app', '--persistent']
+ if self._test_instance.wait_for_java_debugger:
+ cmd.append('-w')
+ cmd.append(package_name)
+ dev.RunShellCommand(cmd, check_return=True)
+
+ @trace_event.traced
+ def edit_shared_prefs(dev):
+ for setting in self._test_instance.edit_shared_prefs:
+ shared_pref = shared_prefs.SharedPrefs(
+ dev, setting['package'], setting['filename'],
+ use_encrypted_path=setting.get('supports_encrypted_path', False))
+ pref_to_restore = copy.copy(shared_pref)
+ pref_to_restore.Load()
+ self._shared_prefs_to_restore.append(pref_to_restore)
+
+ shared_preference_utils.ApplySharedPreferenceSetting(
+ shared_pref, setting)
+
+ @trace_event.traced
+ def set_vega_permissions(dev):
+ # Normally, installation of VrCore automatically grants storage
+ # permissions. However, since VrCore is part of the system image on
+ # the Vega standalone headset, we don't install the APK as part of test
+ # setup. Instead, grant the permissions here so that it can take
+ # screenshots.
+ if dev.product_name == 'vega':
+ dev.GrantPermissions('com.google.vr.vrcore', [
+ 'android.permission.WRITE_EXTERNAL_STORAGE',
+ 'android.permission.READ_EXTERNAL_STORAGE'
+ ])
+
+ @instrumentation_tracing.no_tracing
+ def push_test_data(dev):
+ device_root = posixpath.join(dev.GetExternalStoragePath(),
+ 'chromium_tests_root')
+ host_device_tuples_substituted = [
+ (h, local_device_test_run.SubstituteDeviceRoot(d, device_root))
+ for h, d in host_device_tuples]
+ logging.info('instrumentation data deps:')
+ for h, d in host_device_tuples_substituted:
+ logging.info('%r -> %r', h, d)
+ dev.PushChangedFiles(host_device_tuples_substituted,
+ delete_device_stale=True)
+ if not host_device_tuples_substituted:
+ dev.RunShellCommand(['rm', '-rf', device_root], check_return=True)
+ dev.RunShellCommand(['mkdir', '-p', device_root], check_return=True)
+
+ @trace_event.traced
+ def create_flag_changer(dev):
+ if self._test_instance.flags:
+ self._CreateFlagChangerIfNeeded(dev)
+ logging.debug('Attempting to set flags: %r',
+ self._test_instance.flags)
+ self._flag_changers[str(dev)].AddFlags(self._test_instance.flags)
+
+ valgrind_tools.SetChromeTimeoutScale(
+ dev, self._test_instance.timeout_scale)
+
+ steps += [
+ set_debug_app, edit_shared_prefs, push_test_data, create_flag_changer,
+ set_vega_permissions
+ ]
+
+ def bind_crash_handler(step, dev):
+ return lambda: crash_handler.RetryOnSystemCrash(step, dev)
+
+ steps = [bind_crash_handler(s, device) for s in steps]
+
+ try:
+ if self._env.concurrent_adb:
+ reraiser_thread.RunAsync(steps)
+ else:
+ for step in steps:
+ step()
+ if self._test_instance.store_tombstones:
+ tombstones.ClearAllTombstones(device)
+ except device_errors.CommandFailedError:
+ # A bugreport can be large and take a while to generate, so only capture
+ # one if we're using a remote manager.
+ if isinstance(
+ self._env.output_manager,
+ remote_output_manager.RemoteOutputManager):
+ logging.error(
+ 'Error when setting up device for tests. Taking a bugreport for '
+ 'investigation. This may take a while...')
+ report_name = '%s.bugreport' % device.serial
+ with self._env.output_manager.ArchivedTempfile(
+ report_name, 'bug_reports') as report_file:
+ device.TakeBugReport(report_file.name)
+ logging.error('Bug report saved to %s', report_file.Link())
+ raise
+
+ self._env.parallel_devices.pMap(
+ individual_device_set_up,
+ self._test_instance.GetDataDependencies())
+ if self._test_instance.wait_for_java_debugger:
+ apk = self._test_instance.apk_under_test or self._test_instance.test_apk
+ logging.warning('*' * 80)
+ logging.warning('Waiting for debugger to attach to process: %s',
+ apk.GetPackageName())
+ logging.warning('*' * 80)
+
+ #override
+ def TearDown(self):
+ # By default, teardown will invoke ADB. When receiving SIGTERM due to a
+ # timeout, there's a high probability that ADB is non-responsive. In these
+ # cases, sending an ADB command will potentially take a long time to time
+ # out. Before this happens, the process will be hard-killed for not
+ # responding to SIGTERM fast enough.
+ if self._received_sigterm:
+ return
+
+ @local_device_environment.handle_shard_failures_with(
+ self._env.BlacklistDevice)
+ @trace_event.traced
+ def individual_device_tear_down(dev):
+ if str(dev) in self._flag_changers:
+ self._flag_changers[str(dev)].Restore()
+
+ # Remove package-specific configuration
+ dev.RunShellCommand(['am', 'clear-debug-app'], check_return=True)
+
+ valgrind_tools.SetChromeTimeoutScale(dev, None)
+
+ # Restore any shared preference files that we stored during setup.
+ # This should be run sometime before the replace package contextmanager
+ # gets exited so we don't have to special case restoring files of
+ # replaced system apps.
+ for pref_to_restore in self._shared_prefs_to_restore:
+ pref_to_restore.Commit(force_commit=True)
+
+ # Context manager exit handlers are applied in reverse order
+ # of the enter handlers
+ if self._use_webview_contextmanager:
+ # See pylint-related comment above with __enter__()
+ # pylint: disable=no-member
+ self._use_webview_contextmanager.__exit__(*sys.exc_info())
+ # pylint: enable=no-member
+
+ if self._replace_package_contextmanager:
+ # See pylint-related comment above with __enter__()
+ # pylint: disable=no-member
+ self._replace_package_contextmanager.__exit__(*sys.exc_info())
+ # pylint: enable=no-member
+
+ self._env.parallel_devices.pMap(individual_device_tear_down)
+
+ def _CreateFlagChangerIfNeeded(self, device):
+ if str(device) not in self._flag_changers:
+ cmdline_file = 'test-cmdline-file'
+ if self._test_instance.use_apk_under_test_flags_file:
+ if self._test_instance.package_info:
+ cmdline_file = self._test_instance.package_info.cmdline_file
+ else:
+ logging.warning(
+ 'No PackageInfo found, falling back to using flag file %s',
+ cmdline_file)
+ self._flag_changers[str(device)] = flag_changer.FlagChanger(
+ device, cmdline_file)
+
+ #override
+ def _CreateShards(self, tests):
+ return tests
+
+ #override
+ def _GetTests(self):
+ if self._test_instance.junit4_runner_supports_listing:
+ raw_tests = self._GetTestsFromRunner()
+ tests = self._test_instance.ProcessRawTests(raw_tests)
+ else:
+ tests = self._test_instance.GetTests()
+ tests = self._ApplyExternalSharding(
+ tests, self._test_instance.external_shard_index,
+ self._test_instance.total_external_shards)
+ return tests
+
+ #override
+ def _GetUniqueTestName(self, test):
+ return instrumentation_test_instance.GetUniqueTestName(test)
+
+ #override
+ def _RunTest(self, device, test):
+ extras = {}
+
+ flags_to_add = []
+ test_timeout_scale = None
+ if self._test_instance.coverage_directory:
+ coverage_basename = '%s.ec' % ('%s_group' % test[0]['method']
+ if isinstance(test, list) else test['method'])
+ extras['coverage'] = 'true'
+ coverage_directory = os.path.join(
+ device.GetExternalStoragePath(), 'chrome', 'test', 'coverage')
+ coverage_device_file = os.path.join(
+ coverage_directory, coverage_basename)
+ extras['coverageFile'] = coverage_device_file
+ # Save screenshot if screenshot dir is specified (save locally) or if
+ # a GS bucket is passed (save in cloud).
+ screenshot_device_file = device_temp_file.DeviceTempFile(
+ device.adb, suffix='.png', dir=device.GetExternalStoragePath())
+ extras[EXTRA_SCREENSHOT_FILE] = screenshot_device_file.name
+
+ # Set up the screenshot directory. This needs to be done for each test so
+ # that we only get screenshots created by that test. It has to be on
+ # external storage since the default location doesn't allow file creation
+ # from the instrumentation test app on Android L and M.
+ ui_capture_dir = device_temp_file.NamedDeviceTemporaryDirectory(
+ device.adb,
+ dir=device.GetExternalStoragePath())
+ extras[EXTRA_UI_CAPTURE_DIR] = ui_capture_dir.name
+
+ if self._env.trace_output:
+ trace_device_file = device_temp_file.DeviceTempFile(
+ device.adb, suffix='.json', dir=device.GetExternalStoragePath())
+ extras[EXTRA_TRACE_FILE] = trace_device_file.name
+
+ if isinstance(test, list):
+ if not self._test_instance.driver_apk:
+ raise Exception('driver_apk does not exist. '
+ 'Please build it and try again.')
+ if any(t.get('is_junit4') for t in test):
+ raise Exception('driver apk does not support JUnit4 tests')
+
+ def name_and_timeout(t):
+ n = instrumentation_test_instance.GetTestName(t)
+ i = self._GetTimeoutFromAnnotations(t['annotations'], n)
+ return (n, i)
+
+ test_names, timeouts = zip(*(name_and_timeout(t) for t in test))
+
+ test_name = ','.join(test_names)
+ test_display_name = test_name
+ target = '%s/%s' % (
+ self._test_instance.driver_package,
+ self._test_instance.driver_name)
+ extras.update(
+ self._test_instance.GetDriverEnvironmentVars(
+ test_list=test_names))
+ timeout = sum(timeouts)
+ else:
+ test_name = instrumentation_test_instance.GetTestName(test)
+ test_display_name = self._GetUniqueTestName(test)
+ if test['is_junit4']:
+ target = '%s/%s' % (
+ self._test_instance.test_package,
+ self._test_instance.junit4_runner_class)
+ else:
+ target = '%s/%s' % (
+ self._test_instance.test_package,
+ self._test_instance.junit3_runner_class)
+ extras['class'] = test_name
+ if 'flags' in test and test['flags']:
+ flags_to_add.extend(test['flags'])
+ timeout = self._GetTimeoutFromAnnotations(
+ test['annotations'], test_display_name)
+
+ test_timeout_scale = self._GetTimeoutScaleFromAnnotations(
+ test['annotations'])
+ if test_timeout_scale and test_timeout_scale != 1:
+ valgrind_tools.SetChromeTimeoutScale(
+ device, test_timeout_scale * self._test_instance.timeout_scale)
+
+ if self._test_instance.wait_for_java_debugger:
+ timeout = None
+ logging.info('preparing to run %s: %s', test_display_name, test)
+
+ render_tests_device_output_dir = None
+ if _IsRenderTest(test):
+ # TODO(mikecase): Add DeviceTempDirectory class and use that instead.
+ render_tests_device_output_dir = posixpath.join(
+ device.GetExternalStoragePath(),
+ 'render_test_output_dir')
+ flags_to_add.append('--render-test-output-dir=%s' %
+ render_tests_device_output_dir)
+
+ if flags_to_add:
+ self._CreateFlagChangerIfNeeded(device)
+ self._flag_changers[str(device)].PushFlags(add=flags_to_add)
+
+ time_ms = lambda: int(time.time() * 1e3)
+ start_ms = time_ms()
+
+ stream_name = 'logcat_%s_%s_%s' % (
+ test_name.replace('#', '.'),
+ time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()),
+ device.serial)
+
+ with ui_capture_dir:
+ with self._env.output_manager.ArchivedTempfile(
+ stream_name, 'logcat') as logcat_file:
+ try:
+ with logcat_monitor.LogcatMonitor(
+ device.adb,
+ filter_specs=local_device_environment.LOGCAT_FILTERS,
+ output_file=logcat_file.name,
+ transform_func=self._test_instance.MaybeDeobfuscateLines
+ ) as logmon:
+ with _LogTestEndpoints(device, test_name):
+ with contextlib_ext.Optional(
+ trace_event.trace(test_name),
+ self._env.trace_output):
+ output = device.StartInstrumentation(
+ target, raw=True, extras=extras, timeout=timeout, retries=0)
+ finally:
+ logmon.Close()
+
+ if logcat_file.Link():
+ logging.info('Logcat saved to %s', logcat_file.Link())
+
+ duration_ms = time_ms() - start_ms
+
+ with contextlib_ext.Optional(
+ trace_event.trace('ProcessResults'),
+ self._env.trace_output):
+ output = self._test_instance.MaybeDeobfuscateLines(output)
+ # TODO(jbudorick): Make instrumentation tests output a JSON so this
+ # doesn't have to parse the output.
+ result_code, result_bundle, statuses = (
+ self._test_instance.ParseAmInstrumentRawOutput(output))
+ results = self._test_instance.GenerateTestResults(
+ result_code, result_bundle, statuses, start_ms, duration_ms,
+ device.product_cpu_abi, self._test_instance.symbolizer)
+
+ if self._env.trace_output:
+ self._SaveTraceData(trace_device_file, device, test['class'])
+
+ def restore_flags():
+ if flags_to_add:
+ self._flag_changers[str(device)].Restore()
+
+ def restore_timeout_scale():
+ if test_timeout_scale:
+ valgrind_tools.SetChromeTimeoutScale(
+ device, self._test_instance.timeout_scale)
+
+ def handle_coverage_data():
+ if self._test_instance.coverage_directory:
+ device.PullFile(coverage_directory,
+ self._test_instance.coverage_directory)
+ device.RunShellCommand(
+ 'rm -f %s' % posixpath.join(coverage_directory, '*'),
+ check_return=True, shell=True)
+
+ def handle_render_test_data():
+ if _IsRenderTest(test):
+ # Render tests do not cause test failure by default. So we have to
+ # check to see if any failure images were generated even if the test
+ # does not fail.
+ try:
+ self._ProcessRenderTestResults(
+ device, render_tests_device_output_dir, results)
+ finally:
+ device.RemovePath(render_tests_device_output_dir,
+ recursive=True, force=True)
+
+ def pull_ui_screen_captures():
+ screenshots = []
+ for filename in device.ListDirectory(ui_capture_dir.name):
+ if filename.endswith('.json'):
+ screenshots.append(pull_ui_screenshot(filename))
+ if screenshots:
+ json_archive_name = 'ui_capture_%s_%s.json' % (
+ test_name.replace('#', '.'),
+ time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()))
+ with self._env.output_manager.ArchivedTempfile(
+ json_archive_name, 'ui_capture', output_manager.Datatype.JSON
+ ) as json_archive:
+ json.dump(screenshots, json_archive)
+ for result in results:
+ result.SetLink('ui screenshot', json_archive.Link())
+
+ def pull_ui_screenshot(filename):
+ source_dir = ui_capture_dir.name
+ json_path = posixpath.join(source_dir, filename)
+ json_data = json.loads(device.ReadFile(json_path))
+ image_file_path = posixpath.join(source_dir, json_data['location'])
+ with self._env.output_manager.ArchivedTempfile(
+ json_data['location'], 'ui_capture', output_manager.Datatype.PNG
+ ) as image_archive:
+ device.PullFile(image_file_path, image_archive.name)
+ json_data['image_link'] = image_archive.Link()
+ return json_data
+
+ # While constructing the TestResult objects, we can parallelize several
+ # steps that involve ADB. These steps should NOT depend on any info in
+ # the results! Things such as whether the test CRASHED have not yet been
+ # determined.
+ post_test_steps = [restore_flags, restore_timeout_scale,
+ handle_coverage_data, handle_render_test_data,
+ pull_ui_screen_captures]
+ if self._env.concurrent_adb:
+ post_test_step_thread_group = reraiser_thread.ReraiserThreadGroup(
+ reraiser_thread.ReraiserThread(f) for f in post_test_steps)
+ post_test_step_thread_group.StartAll(will_block=True)
+ else:
+ for step in post_test_steps:
+ step()
+
+ for result in results:
+ if logcat_file:
+ result.SetLink('logcat', logcat_file.Link())
+
+ # Update the result name if the test used flags.
+ if flags_to_add:
+ for r in results:
+ if r.GetName() == test_name:
+ r.SetName(test_display_name)
+
+ # Add UNKNOWN results for any missing tests.
+ iterable_test = test if isinstance(test, list) else [test]
+ test_names = set(self._GetUniqueTestName(t) for t in iterable_test)
+ results_names = set(r.GetName() for r in results)
+ results.extend(
+ base_test_result.BaseTestResult(u, base_test_result.ResultType.UNKNOWN)
+ for u in test_names.difference(results_names))
+
+ # Update the result type if we detect a crash.
+ try:
+ if DidPackageCrashOnDevice(self._test_instance.test_package, device):
+ for r in results:
+ if r.GetType() == base_test_result.ResultType.UNKNOWN:
+ r.SetType(base_test_result.ResultType.CRASH)
+ except device_errors.CommandTimeoutError:
+ logging.warning('timed out when detecting/dismissing error dialogs')
+ # Attach screenshot to the test to help with debugging the dialog boxes.
+ self._SaveScreenshot(device, screenshot_device_file, test_display_name,
+ results, 'dialog_box_screenshot')
+
+ # Handle failures by:
+ # - optionally taking a screenshot
+ # - logging the raw output at INFO level
+ # - clearing the application state while persisting permissions
+ if any(r.GetType() not in (base_test_result.ResultType.PASS,
+ base_test_result.ResultType.SKIP)
+ for r in results):
+ self._SaveScreenshot(device, screenshot_device_file, test_display_name,
+ results, 'post_test_screenshot')
+
+ logging.info('detected failure in %s. raw output:', test_display_name)
+ for l in output:
+ logging.info(' %s', l)
+ if (not self._env.skip_clear_data
+ and self._test_instance.package_info):
+ permissions = (
+ self._test_instance.apk_under_test.GetPermissions()
+ if self._test_instance.apk_under_test
+ else None)
+ device.ClearApplicationState(self._test_instance.package_info.package,
+ permissions=permissions)
+ else:
+ logging.debug('raw output from %s:', test_display_name)
+ for l in output:
+ logging.debug(' %s', l)
+ if self._test_instance.store_tombstones:
+ tombstones_url = None
+ for result in results:
+ if result.GetType() == base_test_result.ResultType.CRASH:
+ if not tombstones_url:
+ resolved_tombstones = tombstones.ResolveTombstones(
+ device,
+ resolve_all_tombstones=True,
+ include_stack_symbols=False,
+ wipe_tombstones=True,
+ tombstone_symbolizer=self._test_instance.symbolizer)
+ tombstone_filename = 'tombstones_%s_%s' % (
+ time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()),
+ device.serial)
+ with self._env.output_manager.ArchivedTempfile(
+ tombstone_filename, 'tombstones') as tombstone_file:
+ tombstone_file.write('\n'.join(resolved_tombstones))
+ result.SetLink('tombstones', tombstone_file.Link())
+ if self._env.concurrent_adb:
+ post_test_step_thread_group.JoinAll()
+ return results, None
+
+ def _GetTestsFromRunner(self):
+ test_apk_path = self._test_instance.test_apk.path
+ pickle_path = '%s-runner.pickle' % test_apk_path
+ # For incremental APKs, the code doesn't live in the apk, so instead check
+ # the timestamp of the target's .stamp file.
+ if self._test_instance.test_apk_incremental_install_json:
+ with open(self._test_instance.test_apk_incremental_install_json) as f:
+ data = json.load(f)
+ out_dir = constants.GetOutDirectory()
+ test_mtime = max(
+ os.path.getmtime(os.path.join(out_dir, p)) for p in data['dex_files'])
+ else:
+ test_mtime = os.path.getmtime(test_apk_path)
+
+ try:
+ return instrumentation_test_instance.GetTestsFromPickle(
+ pickle_path, test_mtime)
+ except instrumentation_test_instance.TestListPickleException as e:
+ logging.info('Could not get tests from pickle: %s', e)
+ logging.info('Getting tests by having %s list them.',
+ self._test_instance.junit4_runner_class)
+ def list_tests(d):
+ def _run(dev):
+ with device_temp_file.DeviceTempFile(
+ dev.adb, suffix='.json',
+ dir=dev.GetExternalStoragePath()) as dev_test_list_json:
+ junit4_runner_class = self._test_instance.junit4_runner_class
+ test_package = self._test_instance.test_package
+ extras = {
+ 'log': 'true',
+ # Workaround for https://github.com/mockito/mockito/issues/922
+ 'notPackage': 'net.bytebuddy',
+ }
+ extras[_EXTRA_TEST_LIST] = dev_test_list_json.name
+ target = '%s/%s' % (test_package, junit4_runner_class)
+ timeout = 120
+ if self._test_instance.wait_for_java_debugger:
+ timeout = None
+ test_list_run_output = dev.StartInstrumentation(
+ target, extras=extras, retries=0, timeout=timeout)
+ if any(test_list_run_output):
+ logging.error('Unexpected output while listing tests:')
+ for line in test_list_run_output:
+ logging.error(' %s', line)
+ with tempfile_ext.NamedTemporaryDirectory() as host_dir:
+ host_file = os.path.join(host_dir, 'list_tests.json')
+ dev.PullFile(dev_test_list_json.name, host_file)
+ with open(host_file, 'r') as host_file:
+ return json.load(host_file)
+
+ return crash_handler.RetryOnSystemCrash(_run, d)
+
+ raw_test_lists = self._env.parallel_devices.pMap(list_tests).pGet(None)
+
+ # If all devices failed to list tests, raise an exception.
+ # Check that tl is not None and is not empty.
+ if all(not tl for tl in raw_test_lists):
+ raise device_errors.CommandFailedError(
+ 'Failed to list tests on any device')
+
+ # Get the first viable list of raw tests
+ raw_tests = [tl for tl in raw_test_lists if tl][0]
+
+ instrumentation_test_instance.SaveTestsToPickle(pickle_path, raw_tests)
+ return raw_tests
+
+ def _SaveTraceData(self, trace_device_file, device, test_class):
+ trace_host_file = self._env.trace_output
+
+ if device.FileExists(trace_device_file.name):
+ try:
+ java_trace_json = device.ReadFile(trace_device_file.name)
+ except IOError:
+ raise Exception('error pulling trace file from device')
+ finally:
+ trace_device_file.close()
+
+ process_name = '%s (device %s)' % (test_class, device.serial)
+ process_hash = int(hashlib.md5(process_name).hexdigest()[:6], 16)
+
+ java_trace = json.loads(java_trace_json)
+ java_trace.sort(key=lambda event: event['ts'])
+
+ get_date_command = 'echo $EPOCHREALTIME'
+ device_time = device.RunShellCommand(get_date_command, single_line=True)
+ device_time = float(device_time) * 1e6
+ system_time = trace_time.Now()
+ time_difference = system_time - device_time
+
+ threads_to_add = set()
+ for event in java_trace:
+ # Ensure thread ID and thread name will be linked in the metadata.
+ threads_to_add.add((event['tid'], event['name']))
+
+ event['pid'] = process_hash
+
+ # Adjust time stamp to align with Python trace times (from
+ # trace_time.Now()).
+ event['ts'] += time_difference
+
+ for tid, thread_name in threads_to_add:
+ thread_name_metadata = {'pid': process_hash, 'tid': tid,
+ 'ts': 0, 'ph': 'M', 'cat': '__metadata',
+ 'name': 'thread_name',
+ 'args': {'name': thread_name}}
+ java_trace.append(thread_name_metadata)
+
+ process_name_metadata = {'pid': process_hash, 'tid': 0, 'ts': 0,
+ 'ph': 'M', 'cat': '__metadata',
+ 'name': 'process_name',
+ 'args': {'name': process_name}}
+ java_trace.append(process_name_metadata)
+
+ java_trace_json = json.dumps(java_trace)
+ java_trace_json = java_trace_json.rstrip(' ]')
+
+ with open(trace_host_file, 'r') as host_handle:
+ host_contents = host_handle.readline()
+
+ if host_contents:
+ java_trace_json = ',%s' % java_trace_json.lstrip(' [')
+
+ with open(trace_host_file, 'a') as host_handle:
+ host_handle.write(java_trace_json)
+
+ def _SaveScreenshot(self, device, screenshot_device_file, test_name, results,
+ link_name):
+ screenshot_filename = '%s-%s.png' % (
+ test_name, time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()))
+ if device.FileExists(screenshot_device_file.name):
+ with self._env.output_manager.ArchivedTempfile(
+ screenshot_filename, 'screenshot',
+ output_manager.Datatype.PNG) as screenshot_host_file:
+ try:
+ device.PullFile(screenshot_device_file.name,
+ screenshot_host_file.name)
+ finally:
+ screenshot_device_file.close()
+ for result in results:
+ result.SetLink(link_name, screenshot_host_file.Link())
+
+ def _ProcessRenderTestResults(
+ self, device, render_tests_device_output_dir, results):
+
+ failure_images_device_dir = posixpath.join(
+ render_tests_device_output_dir, 'failures')
+ if not device.FileExists(failure_images_device_dir):
+ return
+
+ diff_images_device_dir = posixpath.join(
+ render_tests_device_output_dir, 'diffs')
+
+ golden_images_device_dir = posixpath.join(
+ render_tests_device_output_dir, 'goldens')
+
+ for failure_filename in device.ListDirectory(failure_images_device_dir):
+
+ with self._env.output_manager.ArchivedTempfile(
+ 'fail_%s' % failure_filename, 'render_tests',
+ output_manager.Datatype.PNG) as failure_image_host_file:
+ device.PullFile(
+ posixpath.join(failure_images_device_dir, failure_filename),
+ failure_image_host_file.name)
+ failure_link = failure_image_host_file.Link()
+
+ golden_image_device_file = posixpath.join(
+ golden_images_device_dir, failure_filename)
+ if device.PathExists(golden_image_device_file):
+ with self._env.output_manager.ArchivedTempfile(
+ 'golden_%s' % failure_filename, 'render_tests',
+ output_manager.Datatype.PNG) as golden_image_host_file:
+ device.PullFile(
+ golden_image_device_file, golden_image_host_file.name)
+ golden_link = golden_image_host_file.Link()
+ else:
+ golden_link = ''
+
+ diff_image_device_file = posixpath.join(
+ diff_images_device_dir, failure_filename)
+ if device.PathExists(diff_image_device_file):
+ with self._env.output_manager.ArchivedTempfile(
+ 'diff_%s' % failure_filename, 'render_tests',
+ output_manager.Datatype.PNG) as diff_image_host_file:
+ device.PullFile(
+ diff_image_device_file, diff_image_host_file.name)
+ diff_link = diff_image_host_file.Link()
+ else:
+ diff_link = ''
+
+ jinja2_env = jinja2.Environment(
+ loader=jinja2.FileSystemLoader(_JINJA_TEMPLATE_DIR),
+ trim_blocks=True)
+ template = jinja2_env.get_template(_JINJA_TEMPLATE_FILENAME)
+ # pylint: disable=no-member
+ processed_template_output = template.render(
+ test_name=failure_filename,
+ failure_link=failure_link,
+ golden_link=golden_link,
+ diff_link=diff_link)
+
+ with self._env.output_manager.ArchivedTempfile(
+ '%s.html' % failure_filename, 'render_tests',
+ output_manager.Datatype.HTML) as html_results:
+ html_results.write(processed_template_output)
+ html_results.flush()
+ for result in results:
+ result.SetLink(failure_filename, html_results.Link())
+
+ #override
+ def _ShouldRetry(self, test, result):
+ # We've tried to disable retries in the past with mixed results.
+ # See crbug.com/619055 for historical context and crbug.com/797002
+ # for ongoing efforts.
+ del test, result
+ return True
+
+ #override
+ def _ShouldShard(self):
+ return True
+
+ @classmethod
+ def _GetTimeoutScaleFromAnnotations(cls, annotations):
+ try:
+ return int(annotations.get('TimeoutScale', {}).get('value', 1))
+ except ValueError as e:
+ logging.warning("Non-integer value of TimeoutScale ignored. (%s)", str(e))
+ return 1
+
+ @classmethod
+ def _GetTimeoutFromAnnotations(cls, annotations, test_name):
+ for k, v in TIMEOUT_ANNOTATIONS:
+ if k in annotations:
+ timeout = v
+ break
+ else:
+ logging.warning('Using default 1 minute timeout for %s', test_name)
+ timeout = 60
+
+ timeout *= cls._GetTimeoutScaleFromAnnotations(annotations)
+
+ return timeout
+
+
+def _IsRenderTest(test):
+ """Determines if a test or list of tests has a RenderTest amongst them."""
+ if not isinstance(test, list):
+ test = [test]
+ return any([RENDER_TEST_FEATURE_ANNOTATION in t['annotations'].get(
+ FEATURE_ANNOTATION, {}).get('value', ()) for t in test])
diff --git a/deps/v8/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py b/deps/v8/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py
new file mode 100755
index 0000000000..fb96ee6bbd
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for local_device_instrumentation_test_run."""
+
+# pylint: disable=protected-access
+
+import unittest
+
+from pylib.base import base_test_result
+from pylib.base import mock_environment
+from pylib.base import mock_test_instance
+from pylib.local.device import local_device_instrumentation_test_run
+
+class LocalDeviceInstrumentationTestRunTest(unittest.TestCase):
+
+ # TODO(crbug.com/797002): Decide whether the _ShouldRetry hook is worth
+ # retaining and remove these tests if not.
+
+ def testShouldRetry_failure(self):
+ env = mock_environment.MockEnvironment()
+ ti = mock_test_instance.MockTestInstance()
+ obj = (local_device_instrumentation_test_run
+ .LocalDeviceInstrumentationTestRun(env, ti))
+ test = {
+ 'annotations': {},
+ 'class': 'SadTest',
+ 'method': 'testFailure',
+ 'is_junit4': True,
+ }
+ result = base_test_result.BaseTestResult(
+ 'SadTest.testFailure', base_test_result.ResultType.FAIL)
+ self.assertTrue(obj._ShouldRetry(test, result))
+
+ def testShouldRetry_retryOnFailure(self):
+ env = mock_environment.MockEnvironment()
+ ti = mock_test_instance.MockTestInstance()
+ obj = (local_device_instrumentation_test_run
+ .LocalDeviceInstrumentationTestRun(env, ti))
+ test = {
+ 'annotations': {'RetryOnFailure': None},
+ 'class': 'SadTest',
+ 'method': 'testRetryOnFailure',
+ 'is_junit4': True,
+ }
+ result = base_test_result.BaseTestResult(
+ 'SadTest.testRetryOnFailure', base_test_result.ResultType.FAIL)
+ self.assertTrue(obj._ShouldRetry(test, result))
+
+ def testShouldRetry_notRun(self):
+ env = mock_environment.MockEnvironment()
+ ti = mock_test_instance.MockTestInstance()
+ obj = (local_device_instrumentation_test_run
+ .LocalDeviceInstrumentationTestRun(env, ti))
+ test = {
+ 'annotations': {},
+ 'class': 'SadTest',
+ 'method': 'testNotRun',
+ 'is_junit4': True,
+ }
+ result = base_test_result.BaseTestResult(
+ 'SadTest.testNotRun', base_test_result.ResultType.NOTRUN)
+ self.assertTrue(obj._ShouldRetry(test, result))
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/deps/v8/build/android/pylib/local/device/local_device_linker_test_run.py b/deps/v8/build/android/pylib/local/device/local_device_linker_test_run.py
new file mode 100644
index 0000000000..2a1520e003
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_linker_test_run.py
@@ -0,0 +1,75 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import sys
+import traceback
+
+from pylib.base import base_test_result
+from pylib.linker import test_case
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_test_run
+
+
+class LinkerExceptionTestResult(base_test_result.BaseTestResult):
+ """Test result corresponding to a python exception in a host-custom test."""
+
+ def __init__(self, test_name, exc_info):
+ """Constructs a LinkerExceptionTestResult object.
+
+ Args:
+ test_name: name of the test which raised an exception.
+ exc_info: exception info, ostensibly from sys.exc_info().
+ """
+ exc_type, exc_value, exc_traceback = exc_info
+ trace_info = ''.join(traceback.format_exception(exc_type, exc_value,
+ exc_traceback))
+ log_msg = 'Exception:\n' + trace_info
+
+ super(LinkerExceptionTestResult, self).__init__(
+ test_name,
+ base_test_result.ResultType.FAIL,
+ log="%s %s" % (exc_type, log_msg))
+
+
+class LocalDeviceLinkerTestRun(local_device_test_run.LocalDeviceTestRun):
+
+ def _CreateShards(self, tests):
+ return tests
+
+ def _GetTests(self):
+ return self._test_instance.GetTests()
+
+ def _GetUniqueTestName(self, test):
+ return test.qualified_name
+
+ def _RunTest(self, device, test):
+ assert isinstance(test, test_case.LinkerTestCaseBase)
+
+ try:
+ result = test.Run(device)
+ except Exception: # pylint: disable=broad-except
+ logging.exception('Caught exception while trying to run test: ' +
+ test.tagged_name)
+ exc_info = sys.exc_info()
+ result = LinkerExceptionTestResult(test.tagged_name, exc_info)
+
+ return result, None
+
+ def SetUp(self):
+ @local_device_environment.handle_shard_failures_with(
+ on_failure=self._env.BlacklistDevice)
+ def individual_device_set_up(dev):
+ dev.Install(self._test_instance.test_apk)
+
+ self._env.parallel_devices.pMap(individual_device_set_up)
+
+ def _ShouldShard(self):
+ return True
+
+ def TearDown(self):
+ pass
+
+ def TestPackage(self):
+ pass
diff --git a/deps/v8/build/android/pylib/local/device/local_device_monkey_test_run.py b/deps/v8/build/android/pylib/local/device/local_device_monkey_test_run.py
new file mode 100644
index 0000000000..fe178c8fdb
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_monkey_test_run.py
@@ -0,0 +1,126 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from devil.android import device_errors
+from devil.android.sdk import intent
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.local.device import local_device_test_run
+
+
+_CHROME_PACKAGE = constants.PACKAGE_INFO['chrome'].package
+
+class LocalDeviceMonkeyTestRun(local_device_test_run.LocalDeviceTestRun):
+ def __init__(self, env, test_instance):
+ super(LocalDeviceMonkeyTestRun, self).__init__(env, test_instance)
+
+ def TestPackage(self):
+ return 'monkey'
+
+ #override
+ def SetUp(self):
+ pass
+
+ #override
+ def _RunTest(self, device, test):
+ device.ClearApplicationState(self._test_instance.package)
+
+ # Chrome crashes are not always caught by Monkey test runner.
+ # Launch Chrome and verify Chrome has the same PID before and after
+ # the test.
+ device.StartActivity(
+ intent.Intent(package=self._test_instance.package,
+ activity=self._test_instance.activity,
+ action='android.intent.action.MAIN'),
+ blocking=True, force_stop=True)
+ before_pids = device.GetPids(self._test_instance.package)
+
+ output = ''
+ if before_pids:
+ if len(before_pids.get(self._test_instance.package, [])) > 1:
+ raise Exception(
+ 'At most one instance of process %s expected but found pids: '
+ '%s' % (self._test_instance.package, before_pids))
+ output = '\n'.join(self._LaunchMonkeyTest(device))
+ after_pids = device.GetPids(self._test_instance.package)
+
+ crashed = True
+ if not self._test_instance.package in before_pids:
+ logging.error('Failed to start the process.')
+ elif not self._test_instance.package in after_pids:
+ logging.error('Process %s has died.',
+ before_pids[self._test_instance.package])
+ elif (before_pids[self._test_instance.package] !=
+ after_pids[self._test_instance.package]):
+ logging.error('Detected process restart %s -> %s',
+ before_pids[self._test_instance.package],
+ after_pids[self._test_instance.package])
+ else:
+ crashed = False
+
+ success_pattern = 'Events injected: %d' % self._test_instance.event_count
+ if success_pattern in output and not crashed:
+ result = base_test_result.BaseTestResult(
+ test, base_test_result.ResultType.PASS, log=output)
+ else:
+ result = base_test_result.BaseTestResult(
+ test, base_test_result.ResultType.FAIL, log=output)
+ if 'chrome' in self._test_instance.package:
+ logging.warning('Starting MinidumpUploadService...')
+ # TODO(jbudorick): Update this after upstreaming.
+ minidump_intent = intent.Intent(
+ action='%s.crash.ACTION_FIND_ALL' % _CHROME_PACKAGE,
+ package=self._test_instance.package,
+ activity='%s.crash.MinidumpUploadService' % _CHROME_PACKAGE)
+ try:
+ device.RunShellCommand(
+ ['am', 'startservice'] + minidump_intent.am_args,
+ as_root=True, check_return=True)
+ except device_errors.CommandFailedError:
+ logging.exception('Failed to start MinidumpUploadService')
+
+ return result, None
+
+ #override
+ def TearDown(self):
+ pass
+
+ #override
+ def _CreateShards(self, tests):
+ return tests
+
+ #override
+ def _ShouldShard(self):
+ # TODO(mikecase): Run Monkey test concurrently on each attached device.
+ return False
+
+ #override
+ def _GetTests(self):
+ return ['MonkeyTest']
+
+ def _LaunchMonkeyTest(self, device):
+ try:
+ cmd = ['monkey',
+ '-p', self._test_instance.package,
+ '--throttle', str(self._test_instance.throttle),
+ '-s', str(self._test_instance.seed),
+ '--monitor-native-crashes',
+ '--kill-process-after-error']
+ for category in self._test_instance.categories:
+ cmd.extend(['-c', category])
+ for _ in range(self._test_instance.verbose_count):
+ cmd.append('-v')
+ cmd.append(str(self._test_instance.event_count))
+ return device.RunShellCommand(
+ cmd, timeout=self._test_instance.timeout, check_return=True)
+ finally:
+ try:
+ # Kill the monkey test process on the device. If you manually
+ # interrupt the test run, this will prevent the monkey test from
+ # continuing to run.
+ device.KillAll('com.android.commands.monkey')
+ except device_errors.CommandFailedError:
+ pass
diff --git a/deps/v8/build/android/pylib/local/device/local_device_perf_test_run.py b/deps/v8/build/android/pylib/local/device/local_device_perf_test_run.py
new file mode 100644
index 0000000000..bc828408a0
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_perf_test_run.py
@@ -0,0 +1,538 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import io
+import json
+import logging
+import os
+import pickle
+import shutil
+import tempfile
+import threading
+import time
+import zipfile
+
+from devil.android import battery_utils
+from devil.android import device_errors
+from devil.android import device_list
+from devil.android import device_utils
+from devil.android import forwarder
+from devil.android.tools import device_recovery
+from devil.android.tools import device_status
+from devil.utils import cmd_helper
+from devil.utils import parallelizer
+from devil.utils import reraiser_thread
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.constants import host_paths
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_test_run
+from py_trace_event import trace_event
+from py_utils import contextlib_ext
+
+
+class HeartBeat(object):
+
+ def __init__(self, shard, wait_time=60*10):
+ """ HeartBeat Logger constructor.
+
+ Args:
+ shard: A perf test runner device shard.
+ wait_time: time to wait between heartbeat messages.
+ """
+ self._shard = shard
+ self._running = False
+ self._timer = None
+ self._wait_time = wait_time
+
+ def Start(self):
+ if not self._running:
+ self._timer = threading.Timer(self._wait_time, self._LogMessage)
+ self._timer.start()
+ self._running = True
+
+ def Stop(self):
+ if self._running:
+ self._timer.cancel()
+ self._running = False
+
+ def _LogMessage(self):
+ logging.info('Currently working on test %s', self._shard.current_test)
+ self._timer = threading.Timer(self._wait_time, self._LogMessage)
+ self._timer.start()
+
+
+class TestShard(object):
+ def __init__(self, env, test_instance, tests, retries=3, timeout=None):
+ logging.info('Create shard for the following tests:')
+ for t in tests:
+ logging.info(' %s', t)
+ self._current_test = None
+ self._env = env
+ self._heart_beat = HeartBeat(self)
+ self._index = None
+ self._output_dir = None
+ self._retries = retries
+ self._test_instance = test_instance
+ self._tests = tests
+ self._timeout = timeout
+
+ def _TestSetUp(self, test):
+ if (self._test_instance.collect_chartjson_data
+ or self._tests[test].get('archive_output_dir')):
+ self._output_dir = tempfile.mkdtemp()
+
+ self._current_test = test
+ self._heart_beat.Start()
+
+ def _RunSingleTest(self, test):
+ self._test_instance.WriteBuildBotJson(self._output_dir)
+
+ timeout = self._tests[test].get('timeout', self._timeout)
+ cmd = self._CreateCmd(test)
+ cwd = os.path.abspath(host_paths.DIR_SOURCE_ROOT)
+
+ self._LogTest(test, cmd, timeout)
+
+ try:
+ start_time = time.time()
+
+ with contextlib_ext.Optional(
+ trace_event.trace(test),
+ self._env.trace_output):
+ exit_code, output = cmd_helper.GetCmdStatusAndOutputWithTimeout(
+ cmd, timeout, cwd=cwd, shell=True)
+ end_time = time.time()
+ chart_json_output = self._test_instance.ReadChartjsonOutput(
+ self._output_dir)
+ if exit_code == 0:
+ result_type = base_test_result.ResultType.PASS
+ else:
+ result_type = base_test_result.ResultType.FAIL
+ except cmd_helper.TimeoutError as e:
+ end_time = time.time()
+ exit_code = -1
+ output = e.output
+ chart_json_output = ''
+ result_type = base_test_result.ResultType.TIMEOUT
+ return self._ProcessTestResult(test, cmd, start_time, end_time, exit_code,
+ output, chart_json_output, result_type)
+
+ def _CreateCmd(self, test):
+ cmd = []
+ if self._test_instance.dry_run:
+ cmd.append('echo')
+ cmd.append(self._tests[test]['cmd'])
+ if self._output_dir:
+ cmd.append('--output-dir=%s' % self._output_dir)
+ return ' '.join(self._ExtendCmd(cmd))
+
+ def _ExtendCmd(self, cmd): # pylint: disable=no-self-use
+ return cmd
+
+ def _LogTest(self, _test, _cmd, _timeout):
+ raise NotImplementedError
+
+ def _LogTestExit(self, test, exit_code, duration):
+ # pylint: disable=no-self-use
+ logging.info('%s : exit_code=%d in %d secs.', test, exit_code, duration)
+
+ def _ExtendPersistedResult(self, persisted_result):
+ raise NotImplementedError
+
+ def _ProcessTestResult(self, test, cmd, start_time, end_time, exit_code,
+ output, chart_json_output, result_type):
+ if exit_code is None:
+ exit_code = -1
+
+ self._LogTestExit(test, exit_code, end_time - start_time)
+
+ archive_bytes = (self._ArchiveOutputDir()
+ if self._tests[test].get('archive_output_dir')
+ else None)
+ persisted_result = {
+ 'name': test,
+ 'output': [output],
+ 'chartjson': chart_json_output,
+ 'archive_bytes': archive_bytes,
+ 'exit_code': exit_code,
+ 'result_type': result_type,
+ 'start_time': start_time,
+ 'end_time': end_time,
+ 'total_time': end_time - start_time,
+ 'cmd': cmd,
+ }
+ self._ExtendPersistedResult(persisted_result)
+ self._SaveResult(persisted_result)
+ return result_type
+
+ def _ArchiveOutputDir(self):
+ """Archive all files in the output dir, and return as compressed bytes."""
+ with io.BytesIO() as archive:
+ with zipfile.ZipFile(archive, 'w', zipfile.ZIP_DEFLATED) as contents:
+ num_files = 0
+ for absdir, _, files in os.walk(self._output_dir):
+ reldir = os.path.relpath(absdir, self._output_dir)
+ for filename in files:
+ src_path = os.path.join(absdir, filename)
+ # We use normpath to turn './file.txt' into just 'file.txt'.
+ dst_path = os.path.normpath(os.path.join(reldir, filename))
+ contents.write(src_path, dst_path)
+ num_files += 1
+ if num_files:
+ logging.info('%d files in the output dir were archived.', num_files)
+ else:
+ logging.warning('No files in the output dir. Archive is empty.')
+ return archive.getvalue()
+
+ @staticmethod
+ def _SaveResult(result):
+ pickled = os.path.join(constants.PERF_OUTPUT_DIR, result['name'])
+ if os.path.exists(pickled):
+ with file(pickled, 'r') as f:
+ previous = pickle.load(f)
+ result['output'] = previous['output'] + result['output']
+ with file(pickled, 'w') as f:
+ pickle.dump(result, f)
+
+ def _TestTearDown(self):
+ if self._output_dir:
+ shutil.rmtree(self._output_dir, ignore_errors=True)
+ self._output_dir = None
+ self._heart_beat.Stop()
+ self._current_test = None
+
+ @property
+ def current_test(self):
+ return self._current_test
+
+
+class DeviceTestShard(TestShard):
+ def __init__(
+ self, env, test_instance, device, index, tests, retries=3, timeout=None):
+ super(DeviceTestShard, self).__init__(
+ env, test_instance, tests, retries, timeout)
+ self._battery = battery_utils.BatteryUtils(device) if device else None
+ self._device = device
+ self._index = index
+
+ @local_device_environment.handle_shard_failures
+ def RunTestsOnShard(self):
+ results = base_test_result.TestRunResults()
+ for test in self._tests:
+ tries_left = self._retries
+ result_type = None
+ while (result_type != base_test_result.ResultType.PASS
+ and tries_left > 0):
+ try:
+ self._TestSetUp(test)
+ result_type = self._RunSingleTest(test)
+ except device_errors.CommandTimeoutError:
+ result_type = base_test_result.ResultType.TIMEOUT
+ except (device_errors.CommandFailedError,
+ device_errors.DeviceUnreachableError):
+ logging.exception('Exception when executing %s.', test)
+ result_type = base_test_result.ResultType.FAIL
+ finally:
+ self._TestTearDown()
+ if result_type != base_test_result.ResultType.PASS:
+ try:
+ device_recovery.RecoverDevice(self._device, self._env.blacklist)
+ except device_errors.CommandTimeoutError:
+ logging.exception(
+ 'Device failed to recover after failing %s.', test)
+ tries_left -= 1
+
+ results.AddResult(base_test_result.BaseTestResult(test, result_type))
+ return results
+
+ def _LogTestExit(self, test, exit_code, duration):
+ logging.info('%s : exit_code=%d in %d secs on device %s',
+ test, exit_code, duration, str(self._device))
+
+ @trace_event.traced
+ def _TestSetUp(self, test):
+ if not self._device.IsOnline():
+ msg = 'Device %s is unresponsive.' % str(self._device)
+ raise device_errors.DeviceUnreachableError(msg)
+
+ logging.info('Charge level: %s%%',
+ str(self._battery.GetBatteryInfo().get('level')))
+ if self._test_instance.min_battery_level:
+ self._battery.ChargeDeviceToLevel(self._test_instance.min_battery_level)
+
+ logging.info('temperature: %s (0.1 C)',
+ str(self._battery.GetBatteryInfo().get('temperature')))
+ if self._test_instance.max_battery_temp:
+ self._battery.LetBatteryCoolToTemperature(
+ self._test_instance.max_battery_temp)
+
+ if not self._device.IsScreenOn():
+ self._device.SetScreen(True)
+
+ super(DeviceTestShard, self)._TestSetUp(test)
+
+ def _LogTest(self, test, cmd, timeout):
+ logging.debug("Running %s with command '%s' on shard %s with timeout %d",
+ test, cmd, str(self._index), timeout)
+
+ def _ExtendCmd(self, cmd):
+ cmd.extend(['--device=%s' % str(self._device)])
+ return cmd
+
+ def _ExtendPersistedResult(self, persisted_result):
+ persisted_result['host_test'] = False
+ persisted_result['device'] = str(self._device)
+
+ @trace_event.traced
+ def _TestTearDown(self):
+ try:
+ logging.info('Unmapping device ports for %s.', self._device)
+ forwarder.Forwarder.UnmapAllDevicePorts(self._device)
+ except Exception: # pylint: disable=broad-except
+ logging.exception('Exception when resetting ports.')
+ finally:
+ super(DeviceTestShard, self)._TestTearDown()
+
+class HostTestShard(TestShard):
+ def __init__(self, env, test_instance, tests, retries=3, timeout=None):
+ super(HostTestShard, self).__init__(
+ env, test_instance, tests, retries, timeout)
+
+ @local_device_environment.handle_shard_failures
+ def RunTestsOnShard(self):
+ results = base_test_result.TestRunResults()
+ for test in self._tests:
+ tries_left = self._retries + 1
+ result_type = None
+ while (result_type != base_test_result.ResultType.PASS
+ and tries_left > 0):
+ try:
+ self._TestSetUp(test)
+ result_type = self._RunSingleTest(test)
+ finally:
+ self._TestTearDown()
+ tries_left -= 1
+ results.AddResult(base_test_result.BaseTestResult(test, result_type))
+ return results
+
+ def _LogTest(self, test, cmd, timeout):
+ logging.debug("Running %s with command '%s' on host shard with timeout %d",
+ test, cmd, timeout)
+
+ def _ExtendPersistedResult(self, persisted_result):
+ persisted_result['host_test'] = True
+
+
+class LocalDevicePerfTestRun(local_device_test_run.LocalDeviceTestRun):
+
+ _DEFAULT_TIMEOUT = 5 * 60 * 60 # 5 hours.
+ _CONFIG_VERSION = 1
+
+ def __init__(self, env, test_instance):
+ super(LocalDevicePerfTestRun, self).__init__(env, test_instance)
+ self._devices = None
+ self._env = env
+ self._no_device_tests = {}
+ self._test_buckets = []
+ self._test_instance = test_instance
+ self._timeout = None if test_instance.no_timeout else self._DEFAULT_TIMEOUT
+
+ #override
+ def SetUp(self):
+ if os.path.exists(constants.PERF_OUTPUT_DIR):
+ shutil.rmtree(constants.PERF_OUTPUT_DIR)
+ os.makedirs(constants.PERF_OUTPUT_DIR)
+
+ #override
+ def TearDown(self):
+ pass
+
+ def _GetStepsFromDict(self):
+ # From where this is called one of these two must be set.
+ if self._test_instance.single_step:
+ return {
+ 'version': self._CONFIG_VERSION,
+ 'steps': {
+ 'single_step': {
+ 'device_affinity': 0,
+ 'cmd': self._test_instance.single_step
+ },
+ }
+ }
+ if self._test_instance.steps:
+ with file(self._test_instance.steps, 'r') as f:
+ steps = json.load(f)
+ if steps['version'] != self._CONFIG_VERSION:
+ raise TestDictVersionError(
+ 'Version is expected to be %d but was %d' % (self._CONFIG_VERSION,
+ steps['version']))
+ return steps
+ raise PerfTestRunGetStepsError(
+ 'Neither single_step or steps set in test_instance.')
+
+ def _SplitTestsByAffinity(self):
+ # This splits tests by their device affinity so that the same tests always
+ # run on the same devices. This is important for perf tests since different
+ # devices might yield slightly different performance results.
+ test_dict = self._GetStepsFromDict()
+ for test, test_config in sorted(test_dict['steps'].iteritems()):
+ try:
+ affinity = test_config.get('device_affinity')
+ if affinity is None:
+ self._no_device_tests[test] = test_config
+ else:
+ if len(self._test_buckets) < affinity + 1:
+ while len(self._test_buckets) != affinity + 1:
+ self._test_buckets.append(collections.OrderedDict())
+ self._test_buckets[affinity][test] = test_config
+ except KeyError:
+ logging.exception(
+ 'Test config for %s is bad.\n Config:%s', test, str(test_config))
+
+ @staticmethod
+ def _GetAllDevices(active_devices, devices_path):
+ try:
+ if devices_path:
+ devices = [device_utils.DeviceUtils(s)
+ for s in device_list.GetPersistentDeviceList(devices_path)]
+ if not devices and active_devices:
+ logging.warning('%s is empty. Falling back to active devices.',
+ devices_path)
+ devices = active_devices
+ else:
+ logging.warning('Known devices file path not being passed. For device '
+ 'affinity to work properly, it must be passed.')
+ devices = active_devices
+ except IOError as e:
+ logging.error('Unable to find %s [%s]', devices_path, e)
+ devices = active_devices
+ return sorted(devices)
+
+ #override
+ def RunTests(self, results):
+ def run_no_devices_tests():
+ if not self._no_device_tests:
+ return []
+ s = HostTestShard(self._env, self._test_instance, self._no_device_tests,
+ retries=3, timeout=self._timeout)
+ return [s.RunTestsOnShard()]
+
+ def device_shard_helper(shard_id):
+ if device_status.IsBlacklisted(
+ str(self._devices[shard_id]), self._env.blacklist):
+ logging.warning('Device %s is not active. Will not create shard %s.',
+ str(self._devices[shard_id]), shard_id)
+ return None
+ s = DeviceTestShard(self._env, self._test_instance,
+ self._devices[shard_id], shard_id,
+ self._test_buckets[shard_id],
+ retries=self._env.max_tries, timeout=self._timeout)
+ return s.RunTestsOnShard()
+
+ def run_devices_tests():
+ if not self._test_buckets:
+ return []
+ if self._devices is None:
+ self._devices = self._GetAllDevices(
+ self._env.devices, self._test_instance.known_devices_file)
+
+ device_indices = range(min(len(self._devices), len(self._test_buckets)))
+ shards = parallelizer.Parallelizer(device_indices).pMap(
+ device_shard_helper)
+ return [x for x in shards.pGet(self._timeout) if x is not None]
+
+ # Affinitize the tests.
+ self._SplitTestsByAffinity()
+ if not self._test_buckets and not self._no_device_tests:
+ raise local_device_test_run.NoTestsError()
+ host_test_results, device_test_results = reraiser_thread.RunAsync(
+ [run_no_devices_tests, run_devices_tests])
+
+ # Ideally, results would be populated as early as possible, so that in the
+ # event of an exception or timeout, the caller will still have partially
+ # populated results. This looks like it can be done prior to dispatching
+ # tests, but will hold off on making this change unless it looks like it
+ # might provide utility.
+ results.extend(host_test_results + device_test_results)
+
+ # override
+ def TestPackage(self):
+ return 'perf'
+
+ # override
+ def _CreateShards(self, _tests):
+ raise NotImplementedError
+
+ # override
+ def _GetTests(self):
+ return self._test_buckets
+
+ # override
+ def _RunTest(self, _device, _test):
+ raise NotImplementedError
+
+ # override
+ def _ShouldShard(self):
+ return False
+
+
+class OutputJsonList(LocalDevicePerfTestRun):
+ # override
+ def SetUp(self):
+ pass
+
+ # override
+ def RunTests(self, results):
+ result_type = self._test_instance.OutputJsonList()
+ result = base_test_result.TestRunResults()
+ result.AddResult(
+ base_test_result.BaseTestResult('OutputJsonList', result_type))
+
+ # Ideally, results would be populated as early as possible, so that in the
+ # event of an exception or timeout, the caller will still have partially
+ # populated results.
+ results.append(result)
+
+ # override
+ def _CreateShards(self, _tests):
+ raise NotImplementedError
+
+ # override
+ def _RunTest(self, _device, _test):
+ raise NotImplementedError
+
+
+class PrintStep(LocalDevicePerfTestRun):
+ # override
+ def SetUp(self):
+ pass
+
+ # override
+ def RunTests(self, results):
+ result_type = self._test_instance.PrintTestOutput()
+ result = base_test_result.TestRunResults()
+ result.AddResult(
+ base_test_result.BaseTestResult('PrintStep', result_type))
+
+ # Ideally, results would be populated as early as possible, so that in the
+ # event of an exception or timeout, the caller will still have partially
+ # populated results.
+ results.append(result)
+
+ # override
+ def _CreateShards(self, _tests):
+ raise NotImplementedError
+
+ # override
+ def _RunTest(self, _device, _test):
+ raise NotImplementedError
+
+
+class TestDictVersionError(Exception):
+ pass
+
+class PerfTestRunGetStepsError(Exception):
+ pass
diff --git a/deps/v8/build/android/pylib/local/device/local_device_test_run.py b/deps/v8/build/android/pylib/local/device/local_device_test_run.py
new file mode 100644
index 0000000000..62adfabfad
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_test_run.py
@@ -0,0 +1,251 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import logging
+import posixpath
+import signal
+import thread
+import threading
+
+from devil import base_error
+from devil.android import crash_handler
+from devil.android import device_errors
+from devil.android.sdk import version_codes
+from devil.android.tools import device_recovery
+from devil.utils import signal_handler
+from pylib import valgrind_tools
+from pylib.base import base_test_result
+from pylib.base import test_run
+from pylib.base import test_collection
+from pylib.local.device import local_device_environment
+
+
+_SIGTERM_TEST_LOG = (
+ ' Suite execution terminated, probably due to swarming timeout.\n'
+ ' Your test may not have run.')
+
+
+def SubstituteDeviceRoot(device_path, device_root):
+ if not device_path:
+ return device_root
+ elif isinstance(device_path, list):
+ return posixpath.join(*(p if p else device_root for p in device_path))
+ else:
+ return device_path
+
+
+class TestsTerminated(Exception):
+ pass
+
+
+class InvalidShardingSettings(Exception):
+ def __init__(self, shard_index, total_shards):
+ super(InvalidShardingSettings, self).__init__(
+ 'Invalid sharding settings. shard_index: %d total_shards: %d'
+ % (shard_index, total_shards))
+
+
+class LocalDeviceTestRun(test_run.TestRun):
+
+ def __init__(self, env, test_instance):
+ super(LocalDeviceTestRun, self).__init__(env, test_instance)
+ self._tools = {}
+ env.SetPreferredAbis(test_instance.GetPreferredAbis())
+
+ #override
+ def RunTests(self, results):
+ tests = self._GetTests()
+
+ exit_now = threading.Event()
+
+ @local_device_environment.handle_shard_failures
+ def run_tests_on_device(dev, tests, results):
+ for test in tests:
+ if exit_now.isSet():
+ thread.exit()
+
+ result = None
+ rerun = None
+ try:
+ result, rerun = crash_handler.RetryOnSystemCrash(
+ lambda d, t=test: self._RunTest(d, t),
+ device=dev)
+ if isinstance(result, base_test_result.BaseTestResult):
+ results.AddResult(result)
+ elif isinstance(result, list):
+ results.AddResults(result)
+ else:
+ raise Exception(
+ 'Unexpected result type: %s' % type(result).__name__)
+ except device_errors.CommandTimeoutError:
+ if isinstance(test, list):
+ results.AddResults(
+ base_test_result.BaseTestResult(
+ self._GetUniqueTestName(t),
+ base_test_result.ResultType.TIMEOUT)
+ for t in test)
+ else:
+ results.AddResult(
+ base_test_result.BaseTestResult(
+ self._GetUniqueTestName(test),
+ base_test_result.ResultType.TIMEOUT))
+ except Exception as e: # pylint: disable=broad-except
+ if isinstance(tests, test_collection.TestCollection):
+ rerun = test
+ if (isinstance(e, device_errors.DeviceUnreachableError)
+ or not isinstance(e, base_error.BaseError)):
+ # If we get a device error but believe the device is still
+ # reachable, attempt to continue using it. Otherwise, raise
+ # the exception and terminate this run_tests_on_device call.
+ raise
+ finally:
+ if isinstance(tests, test_collection.TestCollection):
+ if rerun:
+ tests.add(rerun)
+ tests.test_completed()
+
+ logging.info('Finished running tests on this device.')
+
+ def stop_tests(_signum, _frame):
+ logging.critical('Received SIGTERM. Stopping test execution.')
+ exit_now.set()
+ raise TestsTerminated()
+
+ try:
+ with signal_handler.AddSignalHandler(signal.SIGTERM, stop_tests):
+ tries = 0
+ while tries < self._env.max_tries and tests:
+ logging.info('STARTING TRY #%d/%d', tries + 1, self._env.max_tries)
+ if tries > 0 and self._env.recover_devices:
+ if any(d.build_version_sdk == version_codes.LOLLIPOP_MR1
+ for d in self._env.devices):
+ logging.info(
+ 'Attempting to recover devices due to known issue on L MR1. '
+ 'See crbug.com/787056 for details.')
+ self._env.parallel_devices.pMap(
+ device_recovery.RecoverDevice, None)
+ elif tries + 1 == self._env.max_tries:
+ logging.info(
+ 'Attempting to recover devices prior to last test attempt.')
+ self._env.parallel_devices.pMap(
+ device_recovery.RecoverDevice, None)
+ logging.info('Will run %d tests on %d devices: %s',
+ len(tests), len(self._env.devices),
+ ', '.join(str(d) for d in self._env.devices))
+ for t in tests:
+ logging.debug(' %s', t)
+
+ try_results = base_test_result.TestRunResults()
+ test_names = (self._GetUniqueTestName(t) for t in tests)
+ try_results.AddResults(
+ base_test_result.BaseTestResult(
+ t, base_test_result.ResultType.NOTRUN)
+ for t in test_names if not t.endswith('*'))
+
+ # As soon as we know the names of the tests, we populate |results|.
+ # The tests in try_results will have their results updated by
+ # try_results.AddResult() as they are run.
+ results.append(try_results)
+
+ try:
+ if self._ShouldShard():
+ tc = test_collection.TestCollection(self._CreateShards(tests))
+ self._env.parallel_devices.pMap(
+ run_tests_on_device, tc, try_results).pGet(None)
+ else:
+ self._env.parallel_devices.pMap(
+ run_tests_on_device, tests, try_results).pGet(None)
+ except TestsTerminated:
+ for unknown_result in try_results.GetUnknown():
+ try_results.AddResult(
+ base_test_result.BaseTestResult(
+ unknown_result.GetName(),
+ base_test_result.ResultType.TIMEOUT,
+ log=_SIGTERM_TEST_LOG))
+ raise
+
+ tries += 1
+ tests = self._GetTestsToRetry(tests, try_results)
+
+ logging.info('FINISHED TRY #%d/%d', tries, self._env.max_tries)
+ if tests:
+ logging.info('%d failed tests remain.', len(tests))
+ else:
+ logging.info('All tests completed.')
+ except TestsTerminated:
+ pass
+
+ def _GetTestsToRetry(self, tests, try_results):
+
+ def is_failure_result(test_result):
+ if isinstance(test_result, list):
+ return any(is_failure_result(r) for r in test_result)
+ return (
+ test_result is None
+ or test_result.GetType() not in (
+ base_test_result.ResultType.PASS,
+ base_test_result.ResultType.SKIP))
+
+ all_test_results = {r.GetName(): r for r in try_results.GetAll()}
+
+ tests_and_names = ((t, self._GetUniqueTestName(t)) for t in tests)
+
+ tests_and_results = {}
+ for test, name in tests_and_names:
+ if name.endswith('*'):
+ tests_and_results[name] = (
+ test,
+ [r for n, r in all_test_results.iteritems()
+ if fnmatch.fnmatch(n, name)])
+ else:
+ tests_and_results[name] = (test, all_test_results.get(name))
+
+ failed_tests_and_results = (
+ (test, result) for test, result in tests_and_results.itervalues()
+ if is_failure_result(result)
+ )
+
+ return [t for t, r in failed_tests_and_results if self._ShouldRetry(t, r)]
+
+ def _ApplyExternalSharding(self, tests, shard_index, total_shards):
+ logging.info('Using external sharding settings. This is shard %d/%d',
+ shard_index, total_shards)
+
+ if total_shards < 0 or shard_index < 0 or total_shards <= shard_index:
+ raise InvalidShardingSettings(shard_index, total_shards)
+
+ return [
+ t for t in tests
+ if hash(self._GetUniqueTestName(t)) % total_shards == shard_index]
+
+ def GetTool(self, device):
+ if str(device) not in self._tools:
+ self._tools[str(device)] = valgrind_tools.CreateTool(
+ self._env.tool, device)
+ return self._tools[str(device)]
+
+ def _CreateShards(self, tests):
+ raise NotImplementedError
+
+ def _GetUniqueTestName(self, test):
+ # pylint: disable=no-self-use
+ return test
+
+ def _ShouldRetry(self, test, result):
+ # pylint: disable=no-self-use,unused-argument
+ return True
+
+ def _GetTests(self):
+ raise NotImplementedError
+
+ def _RunTest(self, device, test):
+ raise NotImplementedError
+
+ def _ShouldShard(self):
+ raise NotImplementedError
+
+
+class NoTestsError(Exception):
+ """Error for when no tests are found."""
diff --git a/deps/v8/build/android/pylib/local/device/local_device_test_run_test.py b/deps/v8/build/android/pylib/local/device/local_device_test_run_test.py
new file mode 100755
index 0000000000..525bf25200
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_test_run_test.py
@@ -0,0 +1,174 @@
+#!/usr/bin/env vpython
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import unittest
+
+from pylib.base import base_test_result
+from pylib.constants import host_paths
+from pylib.local.device import local_device_test_run
+
+with host_paths.SysPath(host_paths.PYMOCK_PATH):
+ import mock # pylint: disable=import-error
+
+
+class SubstituteDeviceRootTest(unittest.TestCase):
+
+ def testNoneDevicePath(self):
+ self.assertEquals(
+ '/fake/device/root',
+ local_device_test_run.SubstituteDeviceRoot(
+ None, '/fake/device/root'))
+
+ def testStringDevicePath(self):
+ self.assertEquals(
+ '/another/fake/device/path',
+ local_device_test_run.SubstituteDeviceRoot(
+ '/another/fake/device/path', '/fake/device/root'))
+
+ def testListWithNoneDevicePath(self):
+ self.assertEquals(
+ '/fake/device/root/subpath',
+ local_device_test_run.SubstituteDeviceRoot(
+ [None, 'subpath'], '/fake/device/root'))
+
+ def testListWithoutNoneDevicePath(self):
+ self.assertEquals(
+ '/another/fake/device/path',
+ local_device_test_run.SubstituteDeviceRoot(
+ ['/', 'another', 'fake', 'device', 'path'],
+ '/fake/device/root'))
+
+
+class TestLocalDeviceTestRun(local_device_test_run.LocalDeviceTestRun):
+
+ # pylint: disable=abstract-method
+
+ def __init__(self):
+ super(TestLocalDeviceTestRun, self).__init__(
+ mock.MagicMock(), mock.MagicMock())
+
+
+class TestLocalDeviceNonStringTestRun(
+ local_device_test_run.LocalDeviceTestRun):
+
+ # pylint: disable=abstract-method
+
+ def __init__(self):
+ super(TestLocalDeviceNonStringTestRun, self).__init__(
+ mock.MagicMock(), mock.MagicMock())
+
+ def _GetUniqueTestName(self, test):
+ return test['name']
+
+
+class LocalDeviceTestRunTest(unittest.TestCase):
+
+ def testGetTestsToRetry_allTestsPassed(self):
+ results = [
+ base_test_result.BaseTestResult(
+ 'Test1', base_test_result.ResultType.PASS),
+ base_test_result.BaseTestResult(
+ 'Test2', base_test_result.ResultType.PASS),
+ ]
+
+ tests = [r.GetName() for r in results]
+ try_results = base_test_result.TestRunResults()
+ try_results.AddResults(results)
+
+ test_run = TestLocalDeviceTestRun()
+ tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+ self.assertEquals(0, len(tests_to_retry))
+
+ def testGetTestsToRetry_testFailed(self):
+ results = [
+ base_test_result.BaseTestResult(
+ 'Test1', base_test_result.ResultType.FAIL),
+ base_test_result.BaseTestResult(
+ 'Test2', base_test_result.ResultType.PASS),
+ ]
+
+ tests = [r.GetName() for r in results]
+ try_results = base_test_result.TestRunResults()
+ try_results.AddResults(results)
+
+ test_run = TestLocalDeviceTestRun()
+ tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+ self.assertEquals(1, len(tests_to_retry))
+ self.assertIn('Test1', tests_to_retry)
+
+ def testGetTestsToRetry_testUnknown(self):
+ results = [
+ base_test_result.BaseTestResult(
+ 'Test2', base_test_result.ResultType.PASS),
+ ]
+
+ tests = ['Test1'] + [r.GetName() for r in results]
+ try_results = base_test_result.TestRunResults()
+ try_results.AddResults(results)
+
+ test_run = TestLocalDeviceTestRun()
+ tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+ self.assertEquals(1, len(tests_to_retry))
+ self.assertIn('Test1', tests_to_retry)
+
+ def testGetTestsToRetry_wildcardFilter_allPass(self):
+ results = [
+ base_test_result.BaseTestResult(
+ 'TestCase.Test1', base_test_result.ResultType.PASS),
+ base_test_result.BaseTestResult(
+ 'TestCase.Test2', base_test_result.ResultType.PASS),
+ ]
+
+ tests = ['TestCase.*']
+ try_results = base_test_result.TestRunResults()
+ try_results.AddResults(results)
+
+ test_run = TestLocalDeviceTestRun()
+ tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+ self.assertEquals(0, len(tests_to_retry))
+
+ def testGetTestsToRetry_wildcardFilter_oneFails(self):
+ results = [
+ base_test_result.BaseTestResult(
+ 'TestCase.Test1', base_test_result.ResultType.PASS),
+ base_test_result.BaseTestResult(
+ 'TestCase.Test2', base_test_result.ResultType.FAIL),
+ ]
+
+ tests = ['TestCase.*']
+ try_results = base_test_result.TestRunResults()
+ try_results.AddResults(results)
+
+ test_run = TestLocalDeviceTestRun()
+ tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+ self.assertEquals(1, len(tests_to_retry))
+ self.assertIn('TestCase.*', tests_to_retry)
+
+ def testGetTestsToRetry_nonStringTests(self):
+ results = [
+ base_test_result.BaseTestResult(
+ 'TestCase.Test1', base_test_result.ResultType.PASS),
+ base_test_result.BaseTestResult(
+ 'TestCase.Test2', base_test_result.ResultType.FAIL),
+ ]
+
+ tests = [
+ {'name': 'TestCase.Test1'},
+ {'name': 'TestCase.Test2'},
+ ]
+ try_results = base_test_result.TestRunResults()
+ try_results.AddResults(results)
+
+ test_run = TestLocalDeviceNonStringTestRun()
+ tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+ self.assertEquals(1, len(tests_to_retry))
+ self.assertIsInstance(tests_to_retry[0], dict)
+ self.assertEquals(tests[1], tests_to_retry[0])
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/deps/v8/build/android/pylib/local/local_test_server_spawner.py b/deps/v8/build/android/pylib/local/local_test_server_spawner.py
new file mode 100644
index 0000000000..6cd282e3a4
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/local_test_server_spawner.py
@@ -0,0 +1,100 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import time
+
+from devil.android import forwarder
+from devil.android import ports
+from pylib.base import test_server
+from pylib.constants import host_paths
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+ import chrome_test_server_spawner
+
+
+# The tests should not need more than one test server instance.
+MAX_TEST_SERVER_INSTANCES = 1
+
+
+def _WaitUntil(predicate, max_attempts=5):
+ """Blocks until the provided predicate (function) is true.
+
+ Returns:
+ Whether the provided predicate was satisfied once (before the timeout).
+ """
+ sleep_time_sec = 0.025
+ for _ in xrange(1, max_attempts):
+ if predicate():
+ return True
+ time.sleep(sleep_time_sec)
+ sleep_time_sec = min(1, sleep_time_sec * 2) # Don't wait more than 1 sec.
+ return False
+
+
+class PortForwarderAndroid(chrome_test_server_spawner.PortForwarder):
+ def __init__(self, device, tool):
+ self.device = device
+ self.tool = tool
+
+ def Map(self, port_pairs):
+ forwarder.Forwarder.Map(port_pairs, self.device, self.tool)
+
+ def GetDevicePortForHostPort(self, host_port):
+ return forwarder.Forwarder.DevicePortForHostPort(host_port)
+
+ def WaitHostPortAvailable(self, port):
+ return _WaitUntil(lambda: ports.IsHostPortAvailable(port))
+
+ def WaitPortNotAvailable(self, port):
+ return _WaitUntil(lambda: not ports.IsHostPortAvailable(port))
+
+ def WaitDevicePortReady(self, port):
+ return _WaitUntil(lambda: ports.IsDevicePortUsed(self.device, port))
+
+ def Unmap(self, device_port):
+ forwarder.Forwarder.UnmapDevicePort(device_port, self.device)
+
+
+class LocalTestServerSpawner(test_server.TestServer):
+
+ def __init__(self, port, device, tool):
+ super(LocalTestServerSpawner, self).__init__()
+ self._device = device
+ self._spawning_server = chrome_test_server_spawner.SpawningServer(
+ port, PortForwarderAndroid(device, tool), MAX_TEST_SERVER_INSTANCES)
+ self._tool = tool
+
+ @property
+ def server_address(self):
+ return self._spawning_server.server.server_address
+
+ @property
+ def port(self):
+ return self.server_address[1]
+
+ #override
+ def SetUp(self):
+ # See net/test/spawned_test_server/test_server_config.h for description of
+ # the fields in the config file.
+ test_server_config = json.dumps({
+ 'address': '127.0.0.1',
+ 'spawner_url_base': 'http://localhost:%d' % self.port
+ })
+ self._device.WriteFile(
+ '%s/net-test-server-config' % self._device.GetExternalStoragePath(),
+ test_server_config)
+ forwarder.Forwarder.Map(
+ [(self.port, self.port)], self._device, self._tool)
+ self._spawning_server.Start()
+
+ #override
+ def Reset(self):
+ self._spawning_server.CleanupState()
+
+ #override
+ def TearDown(self):
+ self.Reset()
+ self._spawning_server.Stop()
+ forwarder.Forwarder.UnmapDevicePort(self.port, self._device)
diff --git a/deps/v8/build/android/pylib/local/machine/__init__.py b/deps/v8/build/android/pylib/local/machine/__init__.py
new file mode 100644
index 0000000000..ca3e206fdd
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/machine/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/local/machine/local_machine_environment.py b/deps/v8/build/android/pylib/local/machine/local_machine_environment.py
new file mode 100644
index 0000000000..3752a8afbd
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/machine/local_machine_environment.py
@@ -0,0 +1,24 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import devil_chromium
+from pylib import constants
+from pylib.base import environment
+
+
+class LocalMachineEnvironment(environment.Environment):
+
+ def __init__(self, _args, output_manager, _error_func):
+ super(LocalMachineEnvironment, self).__init__(output_manager)
+
+ devil_chromium.Initialize(
+ output_directory=constants.GetOutDirectory())
+
+ #override
+ def SetUp(self):
+ pass
+
+ #override
+ def TearDown(self):
+ pass
diff --git a/deps/v8/build/android/pylib/local/machine/local_machine_junit_test_run.py b/deps/v8/build/android/pylib/local/machine/local_machine_junit_test_run.py
new file mode 100644
index 0000000000..dbfc505d81
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/machine/local_machine_junit_test_run.py
@@ -0,0 +1,136 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import logging
+import os
+import zipfile
+
+from devil.utils import cmd_helper
+from devil.utils import reraiser_thread
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_run
+from pylib.constants import host_paths
+from pylib.results import json_results
+from py_utils import tempfile_ext
+
+
+class LocalMachineJunitTestRun(test_run.TestRun):
+ def __init__(self, env, test_instance):
+ super(LocalMachineJunitTestRun, self).__init__(env, test_instance)
+
+ #override
+ def TestPackage(self):
+ return self._test_instance.suite
+
+ #override
+ def SetUp(self):
+ pass
+
+ #override
+ def RunTests(self, results):
+ with tempfile_ext.NamedTemporaryDirectory() as temp_dir:
+ json_file_path = os.path.join(temp_dir, 'results.json')
+
+ # Extract resources needed for test.
+ # TODO(mikecase): Investigate saving md5sums of zipfiles, and only
+ # extract zipfiles when they change.
+ def extract_resource_zip(resource_zip, filename):
+ def helper():
+ extract_dest = os.path.join(temp_dir, filename)
+ with zipfile.ZipFile(resource_zip, 'r') as zf:
+ zf.extractall(extract_dest)
+ return extract_dest
+ return helper
+
+ resource_dirs = reraiser_thread.RunAsync(
+ extract_resource_zip(resource_zip, 'resources_%d' % index) for index,
+ resource_zip in enumerate(self._test_instance.resource_zips))
+
+ java_script = os.path.join(
+ constants.GetOutDirectory(), 'bin', 'helper',
+ self._test_instance.suite)
+ command = [java_script]
+
+ # Add Jar arguments.
+ jar_args = ['-test-jars', self._test_instance.suite + '.jar',
+ '-json-results-file', json_file_path]
+ if self._test_instance.test_filter:
+ jar_args.extend(['-gtest-filter', self._test_instance.test_filter])
+ if self._test_instance.package_filter:
+ jar_args.extend(['-package-filter',
+ self._test_instance.package_filter])
+ if self._test_instance.runner_filter:
+ jar_args.extend(['-runner-filter', self._test_instance.runner_filter])
+ command.extend(['--jar-args', '"%s"' % ' '.join(jar_args)])
+
+ # Add JVM arguments.
+ jvm_args = ['-Drobolectric.dependency.dir=%s' %
+ self._test_instance.robolectric_runtime_deps_dir,
+ '-Ddir.source.root=%s' % constants.DIR_SOURCE_ROOT,]
+
+ if self._test_instance.android_manifest_path:
+ jvm_args += ['-Dchromium.robolectric.manifest=%s' %
+ self._test_instance.android_manifest_path]
+
+ if self._test_instance.package_name:
+ jvm_args += ['-Dchromium.robolectric.package.name=%s' %
+ self._test_instance.package_name]
+
+ if resource_dirs:
+ jvm_args += ['-Dchromium.robolectric.resource.dirs=%s' %
+ ':'.join(resource_dirs)]
+
+ if logging.getLogger().isEnabledFor(logging.INFO):
+ jvm_args += ['-Drobolectric.logging=stdout']
+
+ if self._test_instance.debug_socket:
+ jvm_args += ['-agentlib:jdwp=transport=dt_socket'
+ ',server=y,suspend=y,address=%s' %
+ self._test_instance.debug_socket]
+
+ if self._test_instance.coverage_dir:
+ if not os.path.exists(self._test_instance.coverage_dir):
+ os.makedirs(self._test_instance.coverage_dir)
+ elif not os.path.isdir(self._test_instance.coverage_dir):
+ raise Exception('--coverage-dir takes a directory, not file path.')
+ if self._test_instance.jacoco:
+ jacoco_coverage_file = os.path.join(
+ self._test_instance.coverage_dir,
+ '%s.exec' % self._test_instance.suite)
+ jacoco_agent_path = os.path.join(host_paths.DIR_SOURCE_ROOT,
+ 'third_party', 'jacoco', 'lib',
+ 'jacocoagent.jar')
+ jacoco_args = '-javaagent:{}=destfile={},includes=org.chromium.*'
+ jvm_args.append(
+ jacoco_args.format(jacoco_agent_path, jacoco_coverage_file))
+ else:
+ jvm_args.append('-Demma.coverage.out.file=%s' % os.path.join(
+ self._test_instance.coverage_dir,
+ '%s.ec' % self._test_instance.suite))
+
+ if jvm_args:
+ command.extend(['--jvm-args', '"%s"' % ' '.join(jvm_args)])
+
+ cmd_helper.RunCmd(command)
+ try:
+ with open(json_file_path, 'r') as f:
+ results_list = json_results.ParseResultsFromJson(
+ json.loads(f.read()))
+ except IOError:
+ # In the case of a failure in the JUnit or Robolectric test runner
+ # the output json file may never be written.
+ results_list = [
+ base_test_result.BaseTestResult(
+ 'Test Runner Failure', base_test_result.ResultType.UNKNOWN)
+ ]
+
+ test_run_results = base_test_result.TestRunResults()
+ test_run_results.AddResults(results_list)
+ results.append(test_run_results)
+
+ #override
+ def TearDown(self):
+ pass
diff --git a/deps/v8/build/android/pylib/monkey/__init__.py b/deps/v8/build/android/pylib/monkey/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/deps/v8/build/android/pylib/monkey/__init__.py
diff --git a/deps/v8/build/android/pylib/monkey/monkey_test_instance.py b/deps/v8/build/android/pylib/monkey/monkey_test_instance.py
new file mode 100644
index 0000000000..10b11315bc
--- /dev/null
+++ b/deps/v8/build/android/pylib/monkey/monkey_test_instance.py
@@ -0,0 +1,72 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import random
+
+from pylib import constants
+from pylib.base import test_instance
+
+
+_SINGLE_EVENT_TIMEOUT = 100 # Milliseconds
+
+class MonkeyTestInstance(test_instance.TestInstance):
+
+ def __init__(self, args, _):
+ super(MonkeyTestInstance, self).__init__()
+
+ self._categories = args.categories
+ self._event_count = args.event_count
+ self._seed = args.seed or random.randint(1, 100)
+ self._throttle = args.throttle
+ self._verbose_count = args.verbose_count
+
+ self._package = constants.PACKAGE_INFO[args.browser].package
+ self._activity = constants.PACKAGE_INFO[args.browser].activity
+
+ self._timeout_s = (
+ self.event_count * (self.throttle + _SINGLE_EVENT_TIMEOUT)) / 1000
+
+ #override
+ def TestType(self):
+ return 'monkey'
+
+ #override
+ def SetUp(self):
+ pass
+
+ #override
+ def TearDown(self):
+ pass
+
+ @property
+ def activity(self):
+ return self._activity
+
+ @property
+ def categories(self):
+ return self._categories
+
+ @property
+ def event_count(self):
+ return self._event_count
+
+ @property
+ def package(self):
+ return self._package
+
+ @property
+ def seed(self):
+ return self._seed
+
+ @property
+ def throttle(self):
+ return self._throttle
+
+ @property
+ def timeout(self):
+ return self._timeout_s
+
+ @property
+ def verbose_count(self):
+ return self._verbose_count
diff --git a/deps/v8/build/android/pylib/output/__init__.py b/deps/v8/build/android/pylib/output/__init__.py
new file mode 100644
index 0000000000..a22a6ee39a
--- /dev/null
+++ b/deps/v8/build/android/pylib/output/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/output/local_output_manager.py b/deps/v8/build/android/pylib/output/local_output_manager.py
new file mode 100644
index 0000000000..89becd7f71
--- /dev/null
+++ b/deps/v8/build/android/pylib/output/local_output_manager.py
@@ -0,0 +1,45 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import time
+import os
+import shutil
+import urllib
+
+from pylib.base import output_manager
+
+
+class LocalOutputManager(output_manager.OutputManager):
+ """Saves and manages test output files locally in output directory.
+
+ Location files will be saved in {output_dir}/TEST_RESULTS_{timestamp}.
+ """
+
+ def __init__(self, output_dir):
+ super(LocalOutputManager, self).__init__()
+ timestamp = time.strftime(
+ '%Y_%m_%dT%H_%M_%S', time.localtime())
+ self._output_root = os.path.abspath(os.path.join(
+ output_dir, 'TEST_RESULTS_%s' % timestamp))
+
+ #override
+ def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
+ return LocalArchivedFile(
+ out_filename, out_subdir, datatype, self._output_root)
+
+
+class LocalArchivedFile(output_manager.ArchivedFile):
+
+ def __init__(self, out_filename, out_subdir, datatype, out_root):
+ super(LocalArchivedFile, self).__init__(
+ out_filename, out_subdir, datatype)
+ self._output_path = os.path.join(out_root, out_subdir, out_filename)
+
+ def _Link(self):
+ return 'file://%s' % urllib.quote(self._output_path)
+
+ def _Archive(self):
+ if not os.path.exists(os.path.dirname(self._output_path)):
+ os.makedirs(os.path.dirname(self._output_path))
+ shutil.copy(self.name, self._output_path)
diff --git a/deps/v8/build/android/pylib/output/local_output_manager_test.py b/deps/v8/build/android/pylib/output/local_output_manager_test.py
new file mode 100755
index 0000000000..12452a6616
--- /dev/null
+++ b/deps/v8/build/android/pylib/output/local_output_manager_test.py
@@ -0,0 +1,34 @@
+#! /usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import tempfile
+import shutil
+import unittest
+
+from pylib.base import output_manager
+from pylib.base import output_manager_test_case
+from pylib.output import local_output_manager
+
+
+class LocalOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
+
+ def setUp(self):
+ self._output_dir = tempfile.mkdtemp()
+ self._output_manager = local_output_manager.LocalOutputManager(
+ self._output_dir)
+
+ def testUsableTempFile(self):
+ self.assertUsableTempFile(
+ self._output_manager._CreateArchivedFile(
+ 'test_file', 'test_subdir', output_manager.Datatype.TEXT))
+
+ def tearDown(self):
+ shutil.rmtree(self._output_dir)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/output/noop_output_manager.py b/deps/v8/build/android/pylib/output/noop_output_manager.py
new file mode 100644
index 0000000000..d29a7432f9
--- /dev/null
+++ b/deps/v8/build/android/pylib/output/noop_output_manager.py
@@ -0,0 +1,42 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import output_manager
+
+# TODO(jbudorick): This class is currently mostly unused.
+# Add a --bot-mode argument that all bots pass. If --bot-mode and
+# --local-output args are both not passed to test runner then use this
+# as the output manager impl.
+
+# pylint: disable=no-self-use
+
+class NoopOutputManager(output_manager.OutputManager):
+
+ def __init__(self):
+ super(NoopOutputManager, self).__init__()
+
+ #override
+ def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
+ del out_filename, out_subdir, datatype
+ return NoopArchivedFile()
+
+
+class NoopArchivedFile(output_manager.ArchivedFile):
+
+ def __init__(self):
+ super(NoopArchivedFile, self).__init__(None, None, None)
+
+ def Link(self):
+ """NoopArchivedFiles are not retained."""
+ return ''
+
+ def _Link(self):
+ pass
+
+ def Archive(self):
+ """NoopArchivedFiles are not retained."""
+ pass
+
+ def _Archive(self):
+ pass
diff --git a/deps/v8/build/android/pylib/output/noop_output_manager_test.py b/deps/v8/build/android/pylib/output/noop_output_manager_test.py
new file mode 100755
index 0000000000..c735a0469a
--- /dev/null
+++ b/deps/v8/build/android/pylib/output/noop_output_manager_test.py
@@ -0,0 +1,27 @@
+#! /usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import unittest
+
+from pylib.base import output_manager
+from pylib.base import output_manager_test_case
+from pylib.output import noop_output_manager
+
+
+class NoopOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
+
+ def setUp(self):
+ self._output_manager = noop_output_manager.NoopOutputManager()
+
+ def testUsableTempFile(self):
+ self.assertUsableTempFile(
+ self._output_manager._CreateArchivedFile(
+ 'test_file', 'test_subdir', output_manager.Datatype.TEXT))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/output/remote_output_manager.py b/deps/v8/build/android/pylib/output/remote_output_manager.py
new file mode 100644
index 0000000000..9fdb4bf65f
--- /dev/null
+++ b/deps/v8/build/android/pylib/output/remote_output_manager.py
@@ -0,0 +1,89 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import hashlib
+import os
+
+from pylib.base import output_manager
+from pylib.output import noop_output_manager
+from pylib.utils import logdog_helper
+from pylib.utils import google_storage_helper
+
+
+class RemoteOutputManager(output_manager.OutputManager):
+
+ def __init__(self, bucket):
+ """Uploads output files to Google Storage or LogDog.
+
+ Files will either be uploaded directly to Google Storage or LogDog
+ depending on the datatype.
+
+ Args
+ bucket: Bucket to use when saving to Google Storage.
+ """
+ super(RemoteOutputManager, self).__init__()
+ self._bucket = bucket
+
+ #override
+ def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
+ if datatype == output_manager.Datatype.TEXT:
+ try:
+ logdog_helper.get_logdog_client()
+ return LogdogArchivedFile(out_filename, out_subdir, datatype)
+ except RuntimeError:
+ return noop_output_manager.NoopArchivedFile()
+ else:
+ if self._bucket is None:
+ return noop_output_manager.NoopArchivedFile()
+ return GoogleStorageArchivedFile(
+ out_filename, out_subdir, datatype, self._bucket)
+
+
+class LogdogArchivedFile(output_manager.ArchivedFile):
+
+ def __init__(self, out_filename, out_subdir, datatype):
+ super(LogdogArchivedFile, self).__init__(out_filename, out_subdir, datatype)
+ self._stream_name = '%s_%s' % (out_subdir, out_filename)
+
+ def _Link(self):
+ return logdog_helper.get_viewer_url(self._stream_name)
+
+ def _Archive(self):
+ with open(self.name, 'r') as f:
+ logdog_helper.text(self._stream_name, f.read())
+
+
+class GoogleStorageArchivedFile(output_manager.ArchivedFile):
+
+ def __init__(self, out_filename, out_subdir, datatype, bucket):
+ super(GoogleStorageArchivedFile, self).__init__(
+ out_filename, out_subdir, datatype)
+ self._bucket = bucket
+ self._upload_path = None
+ self._content_addressed = None
+
+ def _PrepareArchive(self):
+ self._content_addressed = (self._datatype in (
+ output_manager.Datatype.HTML,
+ output_manager.Datatype.PNG,
+ output_manager.Datatype.JSON))
+ if self._content_addressed:
+ sha1 = hashlib.sha1()
+ with open(self.name, 'rb') as f:
+ sha1.update(f.read())
+ self._upload_path = sha1.hexdigest()
+ else:
+ self._upload_path = os.path.join(self._out_subdir, self._out_filename)
+
+ def _Link(self):
+ return google_storage_helper.get_url_link(
+ self._upload_path, self._bucket)
+
+ def _Archive(self):
+ if (self._content_addressed and
+ google_storage_helper.exists(self._upload_path, self._bucket)):
+ return
+
+ google_storage_helper.upload(
+ self._upload_path, self.name, self._bucket, content_type=self._datatype)
diff --git a/deps/v8/build/android/pylib/output/remote_output_manager_test.py b/deps/v8/build/android/pylib/output/remote_output_manager_test.py
new file mode 100755
index 0000000000..6917260dd7
--- /dev/null
+++ b/deps/v8/build/android/pylib/output/remote_output_manager_test.py
@@ -0,0 +1,34 @@
+#! /usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import unittest
+
+from pylib.base import output_manager
+from pylib.base import output_manager_test_case
+from pylib.constants import host_paths
+from pylib.output import remote_output_manager
+
+with host_paths.SysPath(host_paths.PYMOCK_PATH):
+ import mock # pylint: disable=import-error
+
+
+@mock.patch('pylib.utils.google_storage_helper')
+class RemoteOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
+
+ def setUp(self):
+ self._output_manager = remote_output_manager.RemoteOutputManager(
+ 'this-is-a-fake-bucket')
+
+ def testUsableTempFile(self, google_storage_helper_mock):
+ del google_storage_helper_mock
+ self.assertUsableTempFile(
+ self._output_manager._CreateArchivedFile(
+ 'test_file', 'test_subdir', output_manager.Datatype.TEXT))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/perf/__init__.py b/deps/v8/build/android/pylib/perf/__init__.py
new file mode 100644
index 0000000000..9228df89b0
--- /dev/null
+++ b/deps/v8/build/android/pylib/perf/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/perf/perf_test_instance.py b/deps/v8/build/android/pylib/perf/perf_test_instance.py
new file mode 100644
index 0000000000..49d75e48e3
--- /dev/null
+++ b/deps/v8/build/android/pylib/perf/perf_test_instance.py
@@ -0,0 +1,239 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import logging
+import os
+import pickle
+import re
+
+from devil import base_error
+from devil.utils import cmd_helper
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_instance
+from pylib.constants import host_paths
+from pylib.utils import test_filter
+
+
+_GIT_CR_POS_RE = re.compile(r'^Cr-Commit-Position: refs/heads/master@{#(\d+)}$')
+
+
+def _GetPersistedResult(test_name):
+ file_name = os.path.join(constants.PERF_OUTPUT_DIR, test_name)
+ if not os.path.exists(file_name):
+ logging.error('File not found %s', file_name)
+ return None
+
+ with file(file_name, 'r') as f:
+ return pickle.load(f)
+
+
+def _GetChromiumRevision():
+ # pylint: disable=line-too-long
+ """Get the git hash and commit position of the chromium master branch.
+
+ See:
+ https://chromium.googlesource.com/chromium/tools/build/+/387e3cf3/scripts/slave/runtest.py#211
+
+ Returns:
+ A dictionary with 'revision' and 'commit_pos' keys.
+ """
+ # pylint: enable=line-too-long
+ status, output = cmd_helper.GetCmdStatusAndOutput(
+ ['git', 'log', '-n', '1', '--pretty=format:%H%n%B', 'HEAD'],
+ cwd=host_paths.DIR_SOURCE_ROOT)
+ revision = None
+ commit_pos = None
+ if not status:
+ lines = output.splitlines()
+ revision = lines[0]
+ for line in reversed(lines):
+ m = _GIT_CR_POS_RE.match(line.strip())
+ if m:
+ commit_pos = int(m.group(1))
+ break
+ return {'revision': revision, 'commit_pos': commit_pos}
+
+
+class PerfTestInstance(test_instance.TestInstance):
+ def __init__(self, args, _):
+ super(PerfTestInstance, self).__init__()
+
+ self._collect_chartjson_data = args.collect_chartjson_data
+ self._dry_run = args.dry_run
+ self._output_dir_archive_path = args.output_dir_archive_path
+ # TODO(rnephew): Get rid of this when everything uses
+ # --output-dir-archive-path
+ if self._output_dir_archive_path is None and args.get_output_dir_archive:
+ self._output_dir_archive_path = args.get_output_dir_archive
+ self._known_devices_file = args.known_devices_file
+ self._max_battery_temp = args.max_battery_temp
+ self._min_battery_level = args.min_battery_level
+ self._no_timeout = args.no_timeout
+ self._output_chartjson_data = args.output_chartjson_data
+ self._output_json_list = args.output_json_list
+ self._print_step = args.print_step
+ self._single_step = (
+ ' '.join(args.single_step_command) if args.single_step else None)
+ self._steps = args.steps
+ self._test_filter = test_filter.InitializeFilterFromArgs(args)
+ self._write_buildbot_json = args.write_buildbot_json
+
+ #override
+ def SetUp(self):
+ pass
+
+ #override
+ def TearDown(self):
+ pass
+
+ def OutputJsonList(self):
+ try:
+ with file(self._steps, 'r') as i:
+ all_steps = json.load(i)
+
+ step_values = []
+ for k, v in all_steps['steps'].iteritems():
+ data = {'test': k, 'device_affinity': v['device_affinity']}
+
+ persisted_result = _GetPersistedResult(k)
+ if persisted_result:
+ data['start_time'] = persisted_result['start_time']
+ data['end_time'] = persisted_result['end_time']
+ data['total_time'] = persisted_result['total_time']
+ data['has_archive'] = persisted_result['archive_bytes'] is not None
+ step_values.append(data)
+
+ with file(self.output_json_list, 'w') as o:
+ o.write(json.dumps(step_values))
+ return base_test_result.ResultType.PASS
+ except KeyError:
+ logging.exception('Persistent results file missing key.')
+ return base_test_result.ResultType.FAIL
+
+ def PrintTestOutput(self):
+ """Helper method to print the output of previously executed test_name.
+
+ Test_name is passed from the command line as print_step
+
+ Returns:
+ exit code generated by the test step.
+ """
+ persisted_result = _GetPersistedResult(self._print_step)
+ if not persisted_result:
+ raise PersistentDataError('No data for test %s found.' % self._print_step)
+ logging.info('*' * 80)
+ logging.info('Output from:')
+ logging.info(persisted_result['cmd'])
+ logging.info('*' * 80)
+
+ output_formatted = ''
+ persisted_outputs = persisted_result['output']
+ for i in xrange(len(persisted_outputs)):
+ output_formatted += '\n\nOutput from run #%d:\n\n%s' % (
+ i, persisted_outputs[i])
+ print output_formatted
+
+ if self.output_chartjson_data:
+ with file(self.output_chartjson_data, 'w') as f:
+ f.write(persisted_result['chartjson'])
+
+ if self.output_dir_archive_path:
+ if persisted_result['archive_bytes'] is not None:
+ with file(self.output_dir_archive_path, 'wb') as f:
+ f.write(persisted_result['archive_bytes'])
+ else:
+ logging.error('The output dir was not archived.')
+ if persisted_result['exit_code'] == 0:
+ return base_test_result.ResultType.PASS
+ return base_test_result.ResultType.FAIL
+
+ #override
+ def TestType(self):
+ return 'perf'
+
+ @staticmethod
+ def ReadChartjsonOutput(output_dir):
+ if not output_dir:
+ return ''
+ json_output_path = os.path.join(output_dir, 'results-chart.json')
+ try:
+ with open(json_output_path) as f:
+ return f.read()
+ except IOError:
+ logging.exception('Exception when reading chartjson.')
+ logging.error('This usually means that telemetry did not run, so it could'
+ ' not generate the file. Please check the device running'
+ ' the test.')
+ return ''
+
+ def WriteBuildBotJson(self, output_dir):
+ """Write metadata about the buildbot environment to the output dir."""
+ if not output_dir or not self._write_buildbot_json:
+ return
+ data = {
+ 'chromium': _GetChromiumRevision(),
+ 'environment': dict(os.environ)
+ }
+ with open(os.path.join(output_dir, 'buildbot.json'), 'w') as f:
+ json.dump(data, f, sort_keys=True, separators=(',', ': '))
+
+ @property
+ def collect_chartjson_data(self):
+ return self._collect_chartjson_data
+
+ @property
+ def dry_run(self):
+ return self._dry_run
+
+ @property
+ def known_devices_file(self):
+ return self._known_devices_file
+
+ @property
+ def max_battery_temp(self):
+ return self._max_battery_temp
+
+ @property
+ def min_battery_level(self):
+ return self._min_battery_level
+
+ @property
+ def no_timeout(self):
+ return self._no_timeout
+
+ @property
+ def output_chartjson_data(self):
+ return self._output_chartjson_data
+
+ @property
+ def output_dir_archive_path(self):
+ return self._output_dir_archive_path
+
+ @property
+ def output_json_list(self):
+ return self._output_json_list
+
+ @property
+ def print_step(self):
+ return self._print_step
+
+ @property
+ def single_step(self):
+ return self._single_step
+
+ @property
+ def steps(self):
+ return self._steps
+
+ @property
+ def test_filter(self):
+ return self._test_filter
+
+
+class PersistentDataError(base_error.BaseError):
+ def __init__(self, message):
+ super(PersistentDataError, self).__init__(message)
+ self._is_infra_error = True
diff --git a/deps/v8/build/android/pylib/pexpect.py b/deps/v8/build/android/pylib/pexpect.py
new file mode 100644
index 0000000000..cf59fb0f6d
--- /dev/null
+++ b/deps/v8/build/android/pylib/pexpect.py
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import absolute_import
+
+import os
+import sys
+
+_CHROME_SRC = os.path.join(
+ os.path.abspath(os.path.dirname(__file__)), '..', '..', '..')
+
+_PEXPECT_PATH = os.path.join(_CHROME_SRC, 'third_party', 'pexpect')
+if _PEXPECT_PATH not in sys.path:
+ sys.path.append(_PEXPECT_PATH)
+
+# pexpect is not available on all platforms. We allow this file to be imported
+# on platforms without pexpect and only fail when pexpect is actually used.
+try:
+ from pexpect import * # pylint: disable=W0401,W0614
+except ImportError:
+ pass
diff --git a/deps/v8/build/android/pylib/restart_adbd.sh b/deps/v8/build/android/pylib/restart_adbd.sh
new file mode 100755
index 0000000000..393b2ebac0
--- /dev/null
+++ b/deps/v8/build/android/pylib/restart_adbd.sh
@@ -0,0 +1,20 @@
+#!/system/bin/sh
+
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Android shell script to restart adbd on the device. This has to be run
+# atomically as a shell script because stopping adbd prevents further commands
+# from running (even if called in the same adb shell).
+
+trap '' HUP
+trap '' TERM
+trap '' PIPE
+
+function restart() {
+ stop adbd
+ start adbd
+}
+
+restart &
diff --git a/deps/v8/build/android/pylib/results/__init__.py b/deps/v8/build/android/pylib/results/__init__.py
new file mode 100644
index 0000000000..4d6aabb953
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/results/flakiness_dashboard/__init__.py b/deps/v8/build/android/pylib/results/flakiness_dashboard/__init__.py
new file mode 100644
index 0000000000..4d6aabb953
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/flakiness_dashboard/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/results/flakiness_dashboard/json_results_generator.py b/deps/v8/build/android/pylib/results/flakiness_dashboard/json_results_generator.py
new file mode 100644
index 0000000000..5e5f83f2a2
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/flakiness_dashboard/json_results_generator.py
@@ -0,0 +1,699 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Most of this file was ported over from Blink's
+# tools/blinkpy/web_tests/layout_package/json_results_generator.py
+# tools/blinkpy/common/net/file_uploader.py
+#
+
+import json
+import logging
+import mimetypes
+import os
+import time
+import urllib2
+
+_log = logging.getLogger(__name__)
+
+_JSON_PREFIX = 'ADD_RESULTS('
+_JSON_SUFFIX = ');'
+
+
+def HasJSONWrapper(string):
+ return string.startswith(_JSON_PREFIX) and string.endswith(_JSON_SUFFIX)
+
+
+def StripJSONWrapper(json_content):
+ # FIXME: Kill this code once the server returns json instead of jsonp.
+ if HasJSONWrapper(json_content):
+ return json_content[len(_JSON_PREFIX):len(json_content) - len(_JSON_SUFFIX)]
+ return json_content
+
+
+def WriteJSON(json_object, file_path, callback=None):
+ # Specify separators in order to get compact encoding.
+ json_string = json.dumps(json_object, separators=(',', ':'))
+ if callback:
+ json_string = callback + '(' + json_string + ');'
+ with open(file_path, 'w') as fp:
+ fp.write(json_string)
+
+
+def ConvertTrieToFlatPaths(trie, prefix=None):
+ """Flattens the trie of paths, prepending a prefix to each."""
+ result = {}
+ for name, data in trie.iteritems():
+ if prefix:
+ name = prefix + '/' + name
+
+ if len(data) and not 'results' in data:
+ result.update(ConvertTrieToFlatPaths(data, name))
+ else:
+ result[name] = data
+
+ return result
+
+
+def AddPathToTrie(path, value, trie):
+ """Inserts a single path and value into a directory trie structure."""
+ if not '/' in path:
+ trie[path] = value
+ return
+
+ directory, _, rest = path.partition('/')
+ if not directory in trie:
+ trie[directory] = {}
+ AddPathToTrie(rest, value, trie[directory])
+
+
+def TestTimingsTrie(individual_test_timings):
+ """Breaks a test name into dicts by directory
+
+ foo/bar/baz.html: 1ms
+ foo/bar/baz1.html: 3ms
+
+ becomes
+ foo: {
+ bar: {
+ baz.html: 1,
+ baz1.html: 3
+ }
+ }
+ """
+ trie = {}
+ for test_result in individual_test_timings:
+ test = test_result.test_name
+
+ AddPathToTrie(test, int(1000 * test_result.test_run_time), trie)
+
+ return trie
+
+
+class TestResult(object):
+ """A simple class that represents a single test result."""
+
+ # Test modifier constants.
+ (NONE, FAILS, FLAKY, DISABLED) = range(4)
+
+ def __init__(self, test, failed=False, elapsed_time=0):
+ self.test_name = test
+ self.failed = failed
+ self.test_run_time = elapsed_time
+
+ test_name = test
+ try:
+ test_name = test.split('.')[1]
+ except IndexError:
+ _log.warn('Invalid test name: %s.', test)
+
+ if test_name.startswith('FAILS_'):
+ self.modifier = self.FAILS
+ elif test_name.startswith('FLAKY_'):
+ self.modifier = self.FLAKY
+ elif test_name.startswith('DISABLED_'):
+ self.modifier = self.DISABLED
+ else:
+ self.modifier = self.NONE
+
+ def Fixable(self):
+ return self.failed or self.modifier == self.DISABLED
+
+
+class JSONResultsGeneratorBase(object):
+ """A JSON results generator for generic tests."""
+
+ MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG = 750
+ # Min time (seconds) that will be added to the JSON.
+ MIN_TIME = 1
+
+ # Note that in non-chromium tests those chars are used to indicate
+ # test modifiers (FAILS, FLAKY, etc) but not actual test results.
+ PASS_RESULT = 'P'
+ SKIP_RESULT = 'X'
+ FAIL_RESULT = 'F'
+ FLAKY_RESULT = 'L'
+ NO_DATA_RESULT = 'N'
+
+ MODIFIER_TO_CHAR = {TestResult.NONE: PASS_RESULT,
+ TestResult.DISABLED: SKIP_RESULT,
+ TestResult.FAILS: FAIL_RESULT,
+ TestResult.FLAKY: FLAKY_RESULT}
+
+ VERSION = 4
+ VERSION_KEY = 'version'
+ RESULTS = 'results'
+ TIMES = 'times'
+ BUILD_NUMBERS = 'buildNumbers'
+ TIME = 'secondsSinceEpoch'
+ TESTS = 'tests'
+
+ FIXABLE_COUNT = 'fixableCount'
+ FIXABLE = 'fixableCounts'
+ ALL_FIXABLE_COUNT = 'allFixableCount'
+
+ RESULTS_FILENAME = 'results.json'
+ TIMES_MS_FILENAME = 'times_ms.json'
+ INCREMENTAL_RESULTS_FILENAME = 'incremental_results.json'
+
+ # line too long pylint: disable=line-too-long
+ URL_FOR_TEST_LIST_JSON = (
+ 'https://%s/testfile?builder=%s&name=%s&testlistjson=1&testtype=%s&'
+ 'master=%s')
+ # pylint: enable=line-too-long
+
+ def __init__(self, builder_name, build_name, build_number,
+ results_file_base_path, builder_base_url,
+ test_results_map, svn_repositories=None,
+ test_results_server=None,
+ test_type='',
+ master_name=''):
+ """Modifies the results.json file. Grabs it off the archive directory
+ if it is not found locally.
+
+ Args
+ builder_name: the builder name (e.g. Webkit).
+ build_name: the build name (e.g. webkit-rel).
+ build_number: the build number.
+ results_file_base_path: Absolute path to the directory containing the
+ results json file.
+ builder_base_url: the URL where we have the archived test results.
+ If this is None no archived results will be retrieved.
+ test_results_map: A dictionary that maps test_name to TestResult.
+ svn_repositories: A (json_field_name, svn_path) pair for SVN
+ repositories that tests rely on. The SVN revision will be
+ included in the JSON with the given json_field_name.
+ test_results_server: server that hosts test results json.
+ test_type: test type string (e.g. 'layout-tests').
+ master_name: the name of the buildbot master.
+ """
+ self._builder_name = builder_name
+ self._build_name = build_name
+ self._build_number = build_number
+ self._builder_base_url = builder_base_url
+ self._results_directory = results_file_base_path
+
+ self._test_results_map = test_results_map
+ self._test_results = test_results_map.values()
+
+ self._svn_repositories = svn_repositories
+ if not self._svn_repositories:
+ self._svn_repositories = {}
+
+ self._test_results_server = test_results_server
+ self._test_type = test_type
+ self._master_name = master_name
+
+ self._archived_results = None
+
+ def GenerateJSONOutput(self):
+ json_object = self.GetJSON()
+ if json_object:
+ file_path = (
+ os.path.join(
+ self._results_directory,
+ self.INCREMENTAL_RESULTS_FILENAME))
+ WriteJSON(json_object, file_path)
+
+ def GenerateTimesMSFile(self):
+ times = TestTimingsTrie(self._test_results_map.values())
+ file_path = os.path.join(self._results_directory, self.TIMES_MS_FILENAME)
+ WriteJSON(times, file_path)
+
+ def GetJSON(self):
+ """Gets the results for the results.json file."""
+ results_json = {}
+
+ if not results_json:
+ results_json, error = self._GetArchivedJSONResults()
+ if error:
+ # If there was an error don't write a results.json
+ # file at all as it would lose all the information on the
+ # bot.
+ _log.error('Archive directory is inaccessible. Not '
+ 'modifying or clobbering the results.json '
+ 'file: ' + str(error))
+ return None
+
+ builder_name = self._builder_name
+ if results_json and builder_name not in results_json:
+ _log.debug('Builder name (%s) is not in the results.json file.',
+ builder_name)
+
+ self._ConvertJSONToCurrentVersion(results_json)
+
+ if builder_name not in results_json:
+ results_json[builder_name] = (
+ self._CreateResultsForBuilderJSON())
+
+ results_for_builder = results_json[builder_name]
+
+ if builder_name:
+ self._InsertGenericMetaData(results_for_builder)
+
+ self._InsertFailureSummaries(results_for_builder)
+
+ # Update the all failing tests with result type and time.
+ tests = results_for_builder[self.TESTS]
+ all_failing_tests = self._GetFailedTestNames()
+ all_failing_tests.update(ConvertTrieToFlatPaths(tests))
+
+ for test in all_failing_tests:
+ self._InsertTestTimeAndResult(test, tests)
+
+ return results_json
+
+ def SetArchivedResults(self, archived_results):
+ self._archived_results = archived_results
+
+ def UploadJSONFiles(self, json_files):
+ """Uploads the given json_files to the test_results_server (if the
+ test_results_server is given)."""
+ if not self._test_results_server:
+ return
+
+ if not self._master_name:
+ _log.error(
+ '--test-results-server was set, but --master-name was not. Not '
+ 'uploading JSON files.')
+ return
+
+ _log.info('Uploading JSON files for builder: %s', self._builder_name)
+ attrs = [('builder', self._builder_name),
+ ('testtype', self._test_type),
+ ('master', self._master_name)]
+
+ files = [(json_file, os.path.join(self._results_directory, json_file))
+ for json_file in json_files]
+
+ url = 'https://%s/testfile/upload' % self._test_results_server
+ # Set uploading timeout in case appengine server is having problems.
+ # 120 seconds are more than enough to upload test results.
+ uploader = _FileUploader(url, 120)
+ try:
+ response = uploader.UploadAsMultipartFormData(files, attrs)
+ if response:
+ if response.code == 200:
+ _log.info('JSON uploaded.')
+ else:
+ _log.debug(
+ "JSON upload failed, %d: '%s'", response.code, response.read())
+ else:
+ _log.error('JSON upload failed; no response returned')
+ except Exception, err: # pylint: disable=broad-except
+ _log.error('Upload failed: %s', err)
+ return
+
+ def _GetTestTiming(self, test_name):
+ """Returns test timing data (elapsed time) in second
+ for the given test_name."""
+ if test_name in self._test_results_map:
+ # Floor for now to get time in seconds.
+ return int(self._test_results_map[test_name].test_run_time)
+ return 0
+
+ def _GetFailedTestNames(self):
+ """Returns a set of failed test names."""
+ return set([r.test_name for r in self._test_results if r.failed])
+
+ def _GetModifierChar(self, test_name):
+ """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
+ PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test modifier
+ for the given test_name.
+ """
+ if test_name not in self._test_results_map:
+ return self.__class__.NO_DATA_RESULT
+
+ test_result = self._test_results_map[test_name]
+ if test_result.modifier in self.MODIFIER_TO_CHAR.keys():
+ return self.MODIFIER_TO_CHAR[test_result.modifier]
+
+ return self.__class__.PASS_RESULT
+
+ def _get_result_char(self, test_name):
+ """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
+ PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test result
+ for the given test_name.
+ """
+ if test_name not in self._test_results_map:
+ return self.__class__.NO_DATA_RESULT
+
+ test_result = self._test_results_map[test_name]
+ if test_result.modifier == TestResult.DISABLED:
+ return self.__class__.SKIP_RESULT
+
+ if test_result.failed:
+ return self.__class__.FAIL_RESULT
+
+ return self.__class__.PASS_RESULT
+
+ def _GetSVNRevision(self, in_directory):
+ """Returns the svn revision for the given directory.
+
+ Args:
+ in_directory: The directory where svn is to be run.
+ """
+ # This is overridden in flakiness_dashboard_results_uploader.py.
+ raise NotImplementedError()
+
+ def _GetArchivedJSONResults(self):
+ """Download JSON file that only contains test
+ name list from test-results server. This is for generating incremental
+ JSON so the file generated has info for tests that failed before but
+ pass or are skipped from current run.
+
+ Returns (archived_results, error) tuple where error is None if results
+ were successfully read.
+ """
+ results_json = {}
+ old_results = None
+ error = None
+
+ if not self._test_results_server:
+ return {}, None
+
+ results_file_url = (self.URL_FOR_TEST_LIST_JSON %
+ (urllib2.quote(self._test_results_server),
+ urllib2.quote(self._builder_name),
+ self.RESULTS_FILENAME,
+ urllib2.quote(self._test_type),
+ urllib2.quote(self._master_name)))
+
+ # pylint: disable=redefined-variable-type
+ try:
+ # FIXME: We should talk to the network via a Host object.
+ results_file = urllib2.urlopen(results_file_url)
+ old_results = results_file.read()
+ except urllib2.HTTPError, http_error:
+ # A non-4xx status code means the bot is hosed for some reason
+ # and we can't grab the results.json file off of it.
+ if http_error.code < 400 and http_error.code >= 500:
+ error = http_error
+ except urllib2.URLError, url_error:
+ error = url_error
+ # pylint: enable=redefined-variable-type
+
+ if old_results:
+ # Strip the prefix and suffix so we can get the actual JSON object.
+ old_results = StripJSONWrapper(old_results)
+
+ try:
+ results_json = json.loads(old_results)
+ except Exception: # pylint: disable=broad-except
+ _log.debug('results.json was not valid JSON. Clobbering.')
+ # The JSON file is not valid JSON. Just clobber the results.
+ results_json = {}
+ else:
+ _log.debug('Old JSON results do not exist. Starting fresh.')
+ results_json = {}
+
+ return results_json, error
+
+ def _InsertFailureSummaries(self, results_for_builder):
+ """Inserts aggregate pass/failure statistics into the JSON.
+ This method reads self._test_results and generates
+ FIXABLE, FIXABLE_COUNT and ALL_FIXABLE_COUNT entries.
+
+ Args:
+ results_for_builder: Dictionary containing the test results for a
+ single builder.
+ """
+ # Insert the number of tests that failed or skipped.
+ fixable_count = len([r for r in self._test_results if r.Fixable()])
+ self._InsertItemIntoRawList(results_for_builder,
+ fixable_count, self.FIXABLE_COUNT)
+
+ # Create a test modifiers (FAILS, FLAKY etc) summary dictionary.
+ entry = {}
+ for test_name in self._test_results_map.iterkeys():
+ result_char = self._GetModifierChar(test_name)
+ entry[result_char] = entry.get(result_char, 0) + 1
+
+ # Insert the pass/skip/failure summary dictionary.
+ self._InsertItemIntoRawList(results_for_builder, entry,
+ self.FIXABLE)
+
+ # Insert the number of all the tests that are supposed to pass.
+ all_test_count = len(self._test_results)
+ self._InsertItemIntoRawList(results_for_builder,
+ all_test_count, self.ALL_FIXABLE_COUNT)
+
+ def _InsertItemIntoRawList(self, results_for_builder, item, key):
+ """Inserts the item into the list with the given key in the results for
+ this builder. Creates the list if no such list exists.
+
+ Args:
+ results_for_builder: Dictionary containing the test results for a
+ single builder.
+ item: Number or string to insert into the list.
+ key: Key in results_for_builder for the list to insert into.
+ """
+ if key in results_for_builder:
+ raw_list = results_for_builder[key]
+ else:
+ raw_list = []
+
+ raw_list.insert(0, item)
+ raw_list = raw_list[:self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG]
+ results_for_builder[key] = raw_list
+
+ def _InsertItemRunLengthEncoded(self, item, encoded_results):
+ """Inserts the item into the run-length encoded results.
+
+ Args:
+ item: String or number to insert.
+ encoded_results: run-length encoded results. An array of arrays, e.g.
+ [[3,'A'],[1,'Q']] encodes AAAQ.
+ """
+ if len(encoded_results) and item == encoded_results[0][1]:
+ num_results = encoded_results[0][0]
+ if num_results <= self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
+ encoded_results[0][0] = num_results + 1
+ else:
+ # Use a list instead of a class for the run-length encoding since
+ # we want the serialized form to be concise.
+ encoded_results.insert(0, [1, item])
+
+ def _InsertGenericMetaData(self, results_for_builder):
+ """ Inserts generic metadata (such as version number, current time etc)
+ into the JSON.
+
+ Args:
+ results_for_builder: Dictionary containing the test results for
+ a single builder.
+ """
+ self._InsertItemIntoRawList(results_for_builder,
+ self._build_number, self.BUILD_NUMBERS)
+
+ # Include SVN revisions for the given repositories.
+ for (name, path) in self._svn_repositories:
+ # Note: for JSON file's backward-compatibility we use 'chrome' rather
+ # than 'chromium' here.
+ lowercase_name = name.lower()
+ if lowercase_name == 'chromium':
+ lowercase_name = 'chrome'
+ self._InsertItemIntoRawList(results_for_builder,
+ self._GetSVNRevision(path),
+ lowercase_name + 'Revision')
+
+ self._InsertItemIntoRawList(results_for_builder,
+ int(time.time()),
+ self.TIME)
+
+ def _InsertTestTimeAndResult(self, test_name, tests):
+ """ Insert a test item with its results to the given tests dictionary.
+
+ Args:
+ tests: Dictionary containing test result entries.
+ """
+
+ result = self._get_result_char(test_name)
+ test_time = self._GetTestTiming(test_name)
+
+ this_test = tests
+ for segment in test_name.split('/'):
+ if segment not in this_test:
+ this_test[segment] = {}
+ this_test = this_test[segment]
+
+ if not len(this_test):
+ self._PopulateResultsAndTimesJSON(this_test)
+
+ if self.RESULTS in this_test:
+ self._InsertItemRunLengthEncoded(result, this_test[self.RESULTS])
+ else:
+ this_test[self.RESULTS] = [[1, result]]
+
+ if self.TIMES in this_test:
+ self._InsertItemRunLengthEncoded(test_time, this_test[self.TIMES])
+ else:
+ this_test[self.TIMES] = [[1, test_time]]
+
+ def _ConvertJSONToCurrentVersion(self, results_json):
+ """If the JSON does not match the current version, converts it to the
+ current version and adds in the new version number.
+ """
+ if self.VERSION_KEY in results_json:
+ archive_version = results_json[self.VERSION_KEY]
+ if archive_version == self.VERSION:
+ return
+ else:
+ archive_version = 3
+
+ # version 3->4
+ if archive_version == 3:
+ for results in results_json.values():
+ self._ConvertTestsToTrie(results)
+
+ results_json[self.VERSION_KEY] = self.VERSION
+
+ def _ConvertTestsToTrie(self, results):
+ if not self.TESTS in results:
+ return
+
+ test_results = results[self.TESTS]
+ test_results_trie = {}
+ for test in test_results.iterkeys():
+ single_test_result = test_results[test]
+ AddPathToTrie(test, single_test_result, test_results_trie)
+
+ results[self.TESTS] = test_results_trie
+
+ def _PopulateResultsAndTimesJSON(self, results_and_times):
+ results_and_times[self.RESULTS] = []
+ results_and_times[self.TIMES] = []
+ return results_and_times
+
+ def _CreateResultsForBuilderJSON(self):
+ results_for_builder = {}
+ results_for_builder[self.TESTS] = {}
+ return results_for_builder
+
+ def _RemoveItemsOverMaxNumberOfBuilds(self, encoded_list):
+ """Removes items from the run-length encoded list after the final
+ item that exceeds the max number of builds to track.
+
+ Args:
+ encoded_results: run-length encoded results. An array of arrays, e.g.
+ [[3,'A'],[1,'Q']] encodes AAAQ.
+ """
+ num_builds = 0
+ index = 0
+ for result in encoded_list:
+ num_builds = num_builds + result[0]
+ index = index + 1
+ if num_builds > self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
+ return encoded_list[:index]
+ return encoded_list
+
+ def _NormalizeResultsJSON(self, test, test_name, tests):
+ """ Prune tests where all runs pass or tests that no longer exist and
+ truncate all results to maxNumberOfBuilds.
+
+ Args:
+ test: ResultsAndTimes object for this test.
+ test_name: Name of the test.
+ tests: The JSON object with all the test results for this builder.
+ """
+ test[self.RESULTS] = self._RemoveItemsOverMaxNumberOfBuilds(
+ test[self.RESULTS])
+ test[self.TIMES] = self._RemoveItemsOverMaxNumberOfBuilds(
+ test[self.TIMES])
+
+ is_all_pass = self._IsResultsAllOfType(test[self.RESULTS],
+ self.PASS_RESULT)
+ is_all_no_data = self._IsResultsAllOfType(test[self.RESULTS],
+ self.NO_DATA_RESULT)
+ max_time = max([test_time[1] for test_time in test[self.TIMES]])
+
+ # Remove all passes/no-data from the results to reduce noise and
+ # filesize. If a test passes every run, but takes > MIN_TIME to run,
+ # don't throw away the data.
+ if is_all_no_data or (is_all_pass and max_time <= self.MIN_TIME):
+ del tests[test_name]
+
+ # method could be a function pylint: disable=R0201
+ def _IsResultsAllOfType(self, results, result_type):
+ """Returns whether all the results are of the given type
+ (e.g. all passes)."""
+ return len(results) == 1 and results[0][1] == result_type
+
+
+class _FileUploader(object):
+
+ def __init__(self, url, timeout_seconds):
+ self._url = url
+ self._timeout_seconds = timeout_seconds
+
+ def UploadAsMultipartFormData(self, files, attrs):
+ file_objs = []
+ for filename, path in files:
+ with file(path, 'rb') as fp:
+ file_objs.append(('file', filename, fp.read()))
+
+ # FIXME: We should use the same variable names for the formal and actual
+ # parameters.
+ content_type, data = _EncodeMultipartFormData(attrs, file_objs)
+ return self._UploadData(content_type, data)
+
+ def _UploadData(self, content_type, data):
+ start = time.time()
+ end = start + self._timeout_seconds
+ while time.time() < end:
+ try:
+ request = urllib2.Request(self._url, data,
+ {'Content-Type': content_type})
+ return urllib2.urlopen(request)
+ except urllib2.HTTPError as e:
+ _log.warn("Received HTTP status %s loading \"%s\". "
+ 'Retrying in 10 seconds...', e.code, e.filename)
+ time.sleep(10)
+
+
+def _GetMIMEType(filename):
+ return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+
+
+# FIXME: Rather than taking tuples, this function should take more
+# structured data.
+def _EncodeMultipartFormData(fields, files):
+ """Encode form fields for multipart/form-data.
+
+ Args:
+ fields: A sequence of (name, value) elements for regular form fields.
+ files: A sequence of (name, filename, value) elements for data to be
+ uploaded as files.
+ Returns:
+ (content_type, body) ready for httplib.HTTP instance.
+
+ Source:
+ http://code.google.com/p/rietveld/source/browse/trunk/upload.py
+ """
+ BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
+ CRLF = '\r\n'
+ lines = []
+
+ for key, value in fields:
+ lines.append('--' + BOUNDARY)
+ lines.append('Content-Disposition: form-data; name="%s"' % key)
+ lines.append('')
+ if isinstance(value, unicode):
+ value = value.encode('utf-8')
+ lines.append(value)
+
+ for key, filename, value in files:
+ lines.append('--' + BOUNDARY)
+ lines.append('Content-Disposition: form-data; name="%s"; '
+ 'filename="%s"' % (key, filename))
+ lines.append('Content-Type: %s' % _GetMIMEType(filename))
+ lines.append('')
+ if isinstance(value, unicode):
+ value = value.encode('utf-8')
+ lines.append(value)
+
+ lines.append('--' + BOUNDARY + '--')
+ lines.append('')
+ body = CRLF.join(lines)
+ content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
+ return content_type, body
diff --git a/deps/v8/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py b/deps/v8/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py
new file mode 100644
index 0000000000..d6aee057bf
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py
@@ -0,0 +1,213 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Most of this file was ported over from Blink's
+# webkitpy/layout_tests/layout_package/json_results_generator_unittest.py
+#
+
+import unittest
+import json
+
+from pylib.results.flakiness_dashboard import json_results_generator
+
+
+class JSONGeneratorTest(unittest.TestCase):
+
+ def setUp(self):
+ self.builder_name = 'DUMMY_BUILDER_NAME'
+ self.build_name = 'DUMMY_BUILD_NAME'
+ self.build_number = 'DUMMY_BUILDER_NUMBER'
+
+ # For archived results.
+ self._json = None
+ self._num_runs = 0
+ self._tests_set = set([])
+ self._test_timings = {}
+ self._failed_count_map = {}
+
+ self._PASS_count = 0
+ self._DISABLED_count = 0
+ self._FLAKY_count = 0
+ self._FAILS_count = 0
+ self._fixable_count = 0
+
+ self._orig_write_json = json_results_generator.WriteJSON
+
+ # unused arguments ... pylint: disable=W0613
+ def _WriteJSONStub(json_object, file_path, callback=None):
+ pass
+
+ json_results_generator.WriteJSON = _WriteJSONStub
+
+ def tearDown(self):
+ json_results_generator.WriteJSON = self._orig_write_json
+
+ def _TestJSONGeneration(self, passed_tests_list, failed_tests_list):
+ tests_set = set(passed_tests_list) | set(failed_tests_list)
+
+ DISABLED_tests = set([t for t in tests_set
+ if t.startswith('DISABLED_')])
+ FLAKY_tests = set([t for t in tests_set
+ if t.startswith('FLAKY_')])
+ FAILS_tests = set([t for t in tests_set
+ if t.startswith('FAILS_')])
+ PASS_tests = tests_set - (DISABLED_tests | FLAKY_tests | FAILS_tests)
+
+ failed_tests = set(failed_tests_list) - DISABLED_tests
+ failed_count_map = dict([(t, 1) for t in failed_tests])
+
+ test_timings = {}
+ i = 0
+ for test in tests_set:
+ test_timings[test] = float(self._num_runs * 100 + i)
+ i += 1
+
+ test_results_map = dict()
+ for test in tests_set:
+ test_results_map[test] = json_results_generator.TestResult(
+ test, failed=(test in failed_tests),
+ elapsed_time=test_timings[test])
+
+ generator = json_results_generator.JSONResultsGeneratorBase(
+ self.builder_name, self.build_name, self.build_number,
+ '',
+ None, # don't fetch past json results archive
+ test_results_map)
+
+ failed_count_map = dict([(t, 1) for t in failed_tests])
+
+ # Test incremental json results
+ incremental_json = generator.GetJSON()
+ self._VerifyJSONResults(
+ tests_set,
+ test_timings,
+ failed_count_map,
+ len(PASS_tests),
+ len(DISABLED_tests),
+ len(FLAKY_tests),
+ len(DISABLED_tests | failed_tests),
+ incremental_json,
+ 1)
+
+ # We don't verify the results here, but at least we make sure the code
+ # runs without errors.
+ generator.GenerateJSONOutput()
+ generator.GenerateTimesMSFile()
+
+ def _VerifyJSONResults(self, tests_set, test_timings, failed_count_map,
+ PASS_count, DISABLED_count, FLAKY_count,
+ fixable_count, json_obj, num_runs):
+ # Aliasing to a short name for better access to its constants.
+ JRG = json_results_generator.JSONResultsGeneratorBase
+
+ self.assertIn(JRG.VERSION_KEY, json_obj)
+ self.assertIn(self.builder_name, json_obj)
+
+ buildinfo = json_obj[self.builder_name]
+ self.assertIn(JRG.FIXABLE, buildinfo)
+ self.assertIn(JRG.TESTS, buildinfo)
+ self.assertEqual(len(buildinfo[JRG.BUILD_NUMBERS]), num_runs)
+ self.assertEqual(buildinfo[JRG.BUILD_NUMBERS][0], self.build_number)
+
+ if tests_set or DISABLED_count:
+ fixable = {}
+ for fixable_items in buildinfo[JRG.FIXABLE]:
+ for (result_type, count) in fixable_items.iteritems():
+ if result_type in fixable:
+ fixable[result_type] = fixable[result_type] + count
+ else:
+ fixable[result_type] = count
+
+ if PASS_count:
+ self.assertEqual(fixable[JRG.PASS_RESULT], PASS_count)
+ else:
+ self.assertTrue(JRG.PASS_RESULT not in fixable or
+ fixable[JRG.PASS_RESULT] == 0)
+ if DISABLED_count:
+ self.assertEqual(fixable[JRG.SKIP_RESULT], DISABLED_count)
+ else:
+ self.assertTrue(JRG.SKIP_RESULT not in fixable or
+ fixable[JRG.SKIP_RESULT] == 0)
+ if FLAKY_count:
+ self.assertEqual(fixable[JRG.FLAKY_RESULT], FLAKY_count)
+ else:
+ self.assertTrue(JRG.FLAKY_RESULT not in fixable or
+ fixable[JRG.FLAKY_RESULT] == 0)
+
+ if failed_count_map:
+ tests = buildinfo[JRG.TESTS]
+ for test_name in failed_count_map.iterkeys():
+ test = self._FindTestInTrie(test_name, tests)
+
+ failed = 0
+ for result in test[JRG.RESULTS]:
+ if result[1] == JRG.FAIL_RESULT:
+ failed += result[0]
+ self.assertEqual(failed_count_map[test_name], failed)
+
+ timing_count = 0
+ for timings in test[JRG.TIMES]:
+ if timings[1] == test_timings[test_name]:
+ timing_count = timings[0]
+ self.assertEqual(1, timing_count)
+
+ if fixable_count:
+ self.assertEqual(sum(buildinfo[JRG.FIXABLE_COUNT]), fixable_count)
+
+ def _FindTestInTrie(self, path, trie):
+ nodes = path.split('/')
+ sub_trie = trie
+ for node in nodes:
+ self.assertIn(node, sub_trie)
+ sub_trie = sub_trie[node]
+ return sub_trie
+
+ def testJSONGeneration(self):
+ self._TestJSONGeneration([], [])
+ self._TestJSONGeneration(['A1', 'B1'], [])
+ self._TestJSONGeneration([], ['FAILS_A2', 'FAILS_B2'])
+ self._TestJSONGeneration(['DISABLED_A3', 'DISABLED_B3'], [])
+ self._TestJSONGeneration(['A4'], ['B4', 'FAILS_C4'])
+ self._TestJSONGeneration(['DISABLED_C5', 'DISABLED_D5'], ['A5', 'B5'])
+ self._TestJSONGeneration(
+ ['A6', 'B6', 'FAILS_C6', 'DISABLED_E6', 'DISABLED_F6'],
+ ['FAILS_D6'])
+
+ # Generate JSON with the same test sets. (Both incremental results and
+ # archived results must be updated appropriately.)
+ self._TestJSONGeneration(
+ ['A', 'FLAKY_B', 'DISABLED_C'],
+ ['FAILS_D', 'FLAKY_E'])
+ self._TestJSONGeneration(
+ ['A', 'DISABLED_C', 'FLAKY_E'],
+ ['FLAKY_B', 'FAILS_D'])
+ self._TestJSONGeneration(
+ ['FLAKY_B', 'DISABLED_C', 'FAILS_D'],
+ ['A', 'FLAKY_E'])
+
+ def testHierarchicalJSNGeneration(self):
+ # FIXME: Re-work tests to be more comprehensible and comprehensive.
+ self._TestJSONGeneration(['foo/A'], ['foo/B', 'bar/C'])
+
+ def testTestTimingsTrie(self):
+ individual_test_timings = []
+ individual_test_timings.append(
+ json_results_generator.TestResult(
+ 'foo/bar/baz.html',
+ elapsed_time=1.2))
+ individual_test_timings.append(
+ json_results_generator.TestResult('bar.html', elapsed_time=0.0001))
+ trie = json_results_generator.TestTimingsTrie(individual_test_timings)
+
+ expected_trie = {
+ 'bar.html': 0,
+ 'foo': {
+ 'bar': {
+ 'baz.html': 1200,
+ }
+ }
+ }
+
+ self.assertEqual(json.dumps(trie), json.dumps(expected_trie))
diff --git a/deps/v8/build/android/pylib/results/flakiness_dashboard/results_uploader.py b/deps/v8/build/android/pylib/results/flakiness_dashboard/results_uploader.py
new file mode 100644
index 0000000000..b68a898b7d
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/flakiness_dashboard/results_uploader.py
@@ -0,0 +1,176 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Uploads the results to the flakiness dashboard server."""
+# pylint: disable=E1002,R0201
+
+import logging
+import os
+import shutil
+import tempfile
+import xml
+
+
+from devil.utils import cmd_helper
+from pylib.constants import host_paths
+from pylib.results.flakiness_dashboard import json_results_generator
+from pylib.utils import repo_utils
+
+
+
+class JSONResultsGenerator(json_results_generator.JSONResultsGeneratorBase):
+ """Writes test results to a JSON file and handles uploading that file to
+ the test results server.
+ """
+ def __init__(self, builder_name, build_name, build_number, tmp_folder,
+ test_results_map, test_results_server, test_type, master_name):
+ super(JSONResultsGenerator, self).__init__(
+ builder_name=builder_name,
+ build_name=build_name,
+ build_number=build_number,
+ results_file_base_path=tmp_folder,
+ builder_base_url=None,
+ test_results_map=test_results_map,
+ svn_repositories=(('webkit', 'third_party/WebKit'),
+ ('chrome', '.')),
+ test_results_server=test_results_server,
+ test_type=test_type,
+ master_name=master_name)
+
+ #override
+ def _GetModifierChar(self, test_name):
+ if test_name not in self._test_results_map:
+ return self.__class__.NO_DATA_RESULT
+
+ return self._test_results_map[test_name].modifier
+
+ #override
+ def _GetSVNRevision(self, in_directory):
+ """Returns the git/svn revision for the given directory.
+
+ Args:
+ in_directory: The directory relative to src.
+ """
+ def _is_git_directory(in_directory):
+ """Returns true if the given directory is in a git repository.
+
+ Args:
+ in_directory: The directory path to be tested.
+ """
+ if os.path.exists(os.path.join(in_directory, '.git')):
+ return True
+ parent = os.path.dirname(in_directory)
+ if parent == host_paths.DIR_SOURCE_ROOT or parent == in_directory:
+ return False
+ return _is_git_directory(parent)
+
+ in_directory = os.path.join(host_paths.DIR_SOURCE_ROOT, in_directory)
+
+ if not os.path.exists(os.path.join(in_directory, '.svn')):
+ if _is_git_directory(in_directory):
+ return repo_utils.GetGitHeadSHA1(in_directory)
+ else:
+ return ''
+
+ output = cmd_helper.GetCmdOutput(['svn', 'info', '--xml'], cwd=in_directory)
+ try:
+ dom = xml.dom.minidom.parseString(output)
+ return dom.getElementsByTagName('entry')[0].getAttribute('revision')
+ except xml.parsers.expat.ExpatError:
+ return ''
+ return ''
+
+
+class ResultsUploader(object):
+ """Handles uploading buildbot tests results to the flakiness dashboard."""
+ def __init__(self, tests_type):
+ self._build_number = os.environ.get('BUILDBOT_BUILDNUMBER')
+ self._master_name = os.environ.get('BUILDBOT_MASTERNAME')
+ self._builder_name = os.environ.get('BUILDBOT_BUILDERNAME')
+ self._tests_type = tests_type
+ self._build_name = None
+
+ if not self._build_number or not self._builder_name:
+ raise Exception('You should not be uploading tests results to the server'
+ 'from your local machine.')
+
+ upstream = (tests_type != 'Chromium_Android_Instrumentation')
+ if not upstream:
+ self._build_name = 'chromium-android'
+ buildbot_branch = os.environ.get('BUILDBOT_BRANCH')
+ if not buildbot_branch:
+ buildbot_branch = 'master'
+ else:
+ # Ensure there's no leading "origin/"
+ buildbot_branch = buildbot_branch[buildbot_branch.find('/') + 1:]
+ self._master_name = '%s-%s' % (self._build_name, buildbot_branch)
+
+ self._test_results_map = {}
+
+ def AddResults(self, test_results):
+ # TODO(frankf): Differentiate between fail/crash/timeouts.
+ conversion_map = [
+ (test_results.GetPass(), False,
+ json_results_generator.JSONResultsGeneratorBase.PASS_RESULT),
+ (test_results.GetFail(), True,
+ json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+ (test_results.GetCrash(), True,
+ json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+ (test_results.GetTimeout(), True,
+ json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+ (test_results.GetUnknown(), True,
+ json_results_generator.JSONResultsGeneratorBase.NO_DATA_RESULT),
+ ]
+
+ for results_list, failed, modifier in conversion_map:
+ for single_test_result in results_list:
+ test_result = json_results_generator.TestResult(
+ test=single_test_result.GetName(),
+ failed=failed,
+ elapsed_time=single_test_result.GetDuration() / 1000)
+ # The WebKit TestResult object sets the modifier it based on test name.
+ # Since we don't use the same test naming convention as WebKit the
+ # modifier will be wrong, so we need to overwrite it.
+ test_result.modifier = modifier
+
+ self._test_results_map[single_test_result.GetName()] = test_result
+
+ def Upload(self, test_results_server):
+ if not self._test_results_map:
+ return
+
+ tmp_folder = tempfile.mkdtemp()
+
+ try:
+ results_generator = JSONResultsGenerator(
+ builder_name=self._builder_name,
+ build_name=self._build_name,
+ build_number=self._build_number,
+ tmp_folder=tmp_folder,
+ test_results_map=self._test_results_map,
+ test_results_server=test_results_server,
+ test_type=self._tests_type,
+ master_name=self._master_name)
+
+ json_files = ["incremental_results.json", "times_ms.json"]
+ results_generator.GenerateJSONOutput()
+ results_generator.GenerateTimesMSFile()
+ results_generator.UploadJSONFiles(json_files)
+ except Exception as e: # pylint: disable=broad-except
+ logging.error("Uploading results to test server failed: %s.", e)
+ finally:
+ shutil.rmtree(tmp_folder)
+
+
+def Upload(results, flakiness_dashboard_server, test_type):
+ """Reports test results to the flakiness dashboard for Chrome for Android.
+
+ Args:
+ results: test results.
+ flakiness_dashboard_server: the server to upload the results to.
+ test_type: the type of the tests (as displayed by the flakiness dashboard).
+ """
+ uploader = ResultsUploader(test_type)
+ uploader.AddResults(results)
+ uploader.Upload(flakiness_dashboard_server)
diff --git a/deps/v8/build/android/pylib/results/json_results.py b/deps/v8/build/android/pylib/results/json_results.py
new file mode 100644
index 0000000000..6a10ba4bc9
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/json_results.py
@@ -0,0 +1,154 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import itertools
+import json
+import logging
+
+from pylib.base import base_test_result
+
+def GenerateResultsDict(test_run_results, global_tags=None):
+ """Create a results dict from |test_run_results| suitable for writing to JSON.
+ Args:
+ test_run_results: a list of base_test_result.TestRunResults objects.
+ Returns:
+ A results dict that mirrors the one generated by
+ base/test/launcher/test_results_tracker.cc:SaveSummaryAsJSON.
+ """
+ # Example json output.
+ # {
+ # "global_tags": [],
+ # "all_tests": [
+ # "test1",
+ # "test2",
+ # ],
+ # "disabled_tests": [],
+ # "per_iteration_data": [
+ # {
+ # "test1": [
+ # {
+ # "status": "SUCCESS",
+ # "elapsed_time_ms": 1,
+ # "output_snippet": "",
+ # "output_snippet_base64": "",
+ # "losless_snippet": "",
+ # },
+ # ...
+ # ],
+ # "test2": [
+ # {
+ # "status": "FAILURE",
+ # "elapsed_time_ms": 12,
+ # "output_snippet": "",
+ # "output_snippet_base64": "",
+ # "losless_snippet": "",
+ # },
+ # ...
+ # ],
+ # },
+ # {
+ # "test1": [
+ # {
+ # "status": "SUCCESS",
+ # "elapsed_time_ms": 1,
+ # "output_snippet": "",
+ # "output_snippet_base64": "",
+ # "losless_snippet": "",
+ # },
+ # ],
+ # "test2": [
+ # {
+ # "status": "FAILURE",
+ # "elapsed_time_ms": 12,
+ # "output_snippet": "",
+ # "output_snippet_base64": "",
+ # "losless_snippet": "",
+ # },
+ # ],
+ # },
+ # ...
+ # ],
+ # }
+
+ all_tests = set()
+ per_iteration_data = []
+ test_run_links = {}
+
+ for test_run_result in test_run_results:
+ iteration_data = collections.defaultdict(list)
+ if isinstance(test_run_result, list):
+ results_iterable = itertools.chain(*(t.GetAll() for t in test_run_result))
+ for tr in test_run_result:
+ test_run_links.update(tr.GetLinks())
+
+ else:
+ results_iterable = test_run_result.GetAll()
+ test_run_links.update(test_run_result.GetLinks())
+
+ for r in results_iterable:
+ result_dict = {
+ 'status': r.GetType(),
+ 'elapsed_time_ms': r.GetDuration(),
+ 'output_snippet': unicode(r.GetLog(), errors='replace'),
+ 'losless_snippet': True,
+ 'output_snippet_base64': '',
+ 'links': r.GetLinks(),
+ }
+ iteration_data[r.GetName()].append(result_dict)
+
+ all_tests = all_tests.union(set(iteration_data.iterkeys()))
+ per_iteration_data.append(iteration_data)
+
+ return {
+ 'global_tags': global_tags or [],
+ 'all_tests': sorted(list(all_tests)),
+ # TODO(jbudorick): Add support for disabled tests within base_test_result.
+ 'disabled_tests': [],
+ 'per_iteration_data': per_iteration_data,
+ 'links': test_run_links,
+ }
+
+
+def GenerateJsonResultsFile(test_run_result, file_path, global_tags=None,
+ **kwargs):
+ """Write |test_run_result| to JSON.
+
+ This emulates the format of the JSON emitted by
+ base/test/launcher/test_results_tracker.cc:SaveSummaryAsJSON.
+
+ Args:
+ test_run_result: a base_test_result.TestRunResults object.
+ file_path: The path to the JSON file to write.
+ """
+ with open(file_path, 'w') as json_result_file:
+ json_result_file.write(json.dumps(
+ GenerateResultsDict(test_run_result, global_tags=global_tags),
+ **kwargs))
+ logging.info('Generated json results file at %s', file_path)
+
+
+def ParseResultsFromJson(json_results):
+ """Creates a list of BaseTestResult objects from JSON.
+
+ Args:
+ json_results: A JSON dict in the format created by
+ GenerateJsonResultsFile.
+ """
+
+ def string_as_status(s):
+ if s in base_test_result.ResultType.GetTypes():
+ return s
+ return base_test_result.ResultType.UNKNOWN
+
+ results_list = []
+ testsuite_runs = json_results['per_iteration_data']
+ for testsuite_run in testsuite_runs:
+ for test, test_runs in testsuite_run.iteritems():
+ results_list.extend(
+ [base_test_result.BaseTestResult(test,
+ string_as_status(tr['status']),
+ duration=tr['elapsed_time_ms'])
+ for tr in test_runs])
+ return results_list
diff --git a/deps/v8/build/android/pylib/results/json_results_test.py b/deps/v8/build/android/pylib/results/json_results_test.py
new file mode 100755
index 0000000000..68e71f5785
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/json_results_test.py
@@ -0,0 +1,207 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.base import base_test_result
+from pylib.results import json_results
+
+
+class JsonResultsTest(unittest.TestCase):
+
+ def testGenerateResultsDict_passedResult(self):
+ result = base_test_result.BaseTestResult(
+ 'test.package.TestName', base_test_result.ResultType.PASS)
+
+ all_results = base_test_result.TestRunResults()
+ all_results.AddResult(result)
+
+ results_dict = json_results.GenerateResultsDict([all_results])
+ self.assertEquals(
+ ['test.package.TestName'],
+ results_dict['all_tests'])
+ self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+ iteration_result = results_dict['per_iteration_data'][0]
+ self.assertTrue('test.package.TestName' in iteration_result)
+ self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+ test_iteration_result = iteration_result['test.package.TestName'][0]
+ self.assertTrue('status' in test_iteration_result)
+ self.assertEquals('SUCCESS', test_iteration_result['status'])
+
+ def testGenerateResultsDict_skippedResult(self):
+ result = base_test_result.BaseTestResult(
+ 'test.package.TestName', base_test_result.ResultType.SKIP)
+
+ all_results = base_test_result.TestRunResults()
+ all_results.AddResult(result)
+
+ results_dict = json_results.GenerateResultsDict([all_results])
+ self.assertEquals(
+ ['test.package.TestName'],
+ results_dict['all_tests'])
+ self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+ iteration_result = results_dict['per_iteration_data'][0]
+ self.assertTrue('test.package.TestName' in iteration_result)
+ self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+ test_iteration_result = iteration_result['test.package.TestName'][0]
+ self.assertTrue('status' in test_iteration_result)
+ self.assertEquals('SKIPPED', test_iteration_result['status'])
+
+ def testGenerateResultsDict_failedResult(self):
+ result = base_test_result.BaseTestResult(
+ 'test.package.TestName', base_test_result.ResultType.FAIL)
+
+ all_results = base_test_result.TestRunResults()
+ all_results.AddResult(result)
+
+ results_dict = json_results.GenerateResultsDict([all_results])
+ self.assertEquals(
+ ['test.package.TestName'],
+ results_dict['all_tests'])
+ self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+ iteration_result = results_dict['per_iteration_data'][0]
+ self.assertTrue('test.package.TestName' in iteration_result)
+ self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+ test_iteration_result = iteration_result['test.package.TestName'][0]
+ self.assertTrue('status' in test_iteration_result)
+ self.assertEquals('FAILURE', test_iteration_result['status'])
+
+ def testGenerateResultsDict_duration(self):
+ result = base_test_result.BaseTestResult(
+ 'test.package.TestName', base_test_result.ResultType.PASS, duration=123)
+
+ all_results = base_test_result.TestRunResults()
+ all_results.AddResult(result)
+
+ results_dict = json_results.GenerateResultsDict([all_results])
+ self.assertEquals(
+ ['test.package.TestName'],
+ results_dict['all_tests'])
+ self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+ iteration_result = results_dict['per_iteration_data'][0]
+ self.assertTrue('test.package.TestName' in iteration_result)
+ self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+ test_iteration_result = iteration_result['test.package.TestName'][0]
+ self.assertTrue('elapsed_time_ms' in test_iteration_result)
+ self.assertEquals(123, test_iteration_result['elapsed_time_ms'])
+
+ def testGenerateResultsDict_multipleResults(self):
+ result1 = base_test_result.BaseTestResult(
+ 'test.package.TestName1', base_test_result.ResultType.PASS)
+ result2 = base_test_result.BaseTestResult(
+ 'test.package.TestName2', base_test_result.ResultType.PASS)
+
+ all_results = base_test_result.TestRunResults()
+ all_results.AddResult(result1)
+ all_results.AddResult(result2)
+
+ results_dict = json_results.GenerateResultsDict([all_results])
+ self.assertEquals(
+ ['test.package.TestName1', 'test.package.TestName2'],
+ results_dict['all_tests'])
+
+ self.assertTrue('per_iteration_data' in results_dict)
+ iterations = results_dict['per_iteration_data']
+ self.assertEquals(1, len(iterations))
+
+ expected_tests = set([
+ 'test.package.TestName1',
+ 'test.package.TestName2',
+ ])
+
+ for test_name, iteration_result in iterations[0].iteritems():
+ self.assertTrue(test_name in expected_tests)
+ expected_tests.remove(test_name)
+ self.assertEquals(1, len(iteration_result))
+
+ test_iteration_result = iteration_result[0]
+ self.assertTrue('status' in test_iteration_result)
+ self.assertEquals('SUCCESS', test_iteration_result['status'])
+
+ def testGenerateResultsDict_passOnRetry(self):
+ raw_results = []
+
+ result1 = base_test_result.BaseTestResult(
+ 'test.package.TestName1', base_test_result.ResultType.FAIL)
+ run_results1 = base_test_result.TestRunResults()
+ run_results1.AddResult(result1)
+ raw_results.append(run_results1)
+
+ result2 = base_test_result.BaseTestResult(
+ 'test.package.TestName1', base_test_result.ResultType.PASS)
+ run_results2 = base_test_result.TestRunResults()
+ run_results2.AddResult(result2)
+ raw_results.append(run_results2)
+
+ results_dict = json_results.GenerateResultsDict([raw_results])
+ self.assertEquals(['test.package.TestName1'], results_dict['all_tests'])
+
+ # Check that there's only one iteration.
+ self.assertIn('per_iteration_data', results_dict)
+ iterations = results_dict['per_iteration_data']
+ self.assertEquals(1, len(iterations))
+
+ # Check that test.package.TestName1 is the only test in the iteration.
+ self.assertEquals(1, len(iterations[0]))
+ self.assertIn('test.package.TestName1', iterations[0])
+
+ # Check that there are two results for test.package.TestName1.
+ actual_test_results = iterations[0]['test.package.TestName1']
+ self.assertEquals(2, len(actual_test_results))
+
+ # Check that the first result is a failure.
+ self.assertIn('status', actual_test_results[0])
+ self.assertEquals('FAILURE', actual_test_results[0]['status'])
+
+ # Check that the second result is a success.
+ self.assertIn('status', actual_test_results[1])
+ self.assertEquals('SUCCESS', actual_test_results[1]['status'])
+
+ def testGenerateResultsDict_globalTags(self):
+ raw_results = []
+ global_tags = ['UNRELIABLE_RESULTS']
+
+ results_dict = json_results.GenerateResultsDict(
+ [raw_results], global_tags=global_tags)
+ self.assertEquals(['UNRELIABLE_RESULTS'], results_dict['global_tags'])
+
+ def testGenerateResultsDict_loslessSnippet(self):
+ result = base_test_result.BaseTestResult(
+ 'test.package.TestName', base_test_result.ResultType.FAIL)
+ log = 'blah-blah'
+ result.SetLog(log)
+
+ all_results = base_test_result.TestRunResults()
+ all_results.AddResult(result)
+
+ results_dict = json_results.GenerateResultsDict([all_results])
+ self.assertEquals(
+ ['test.package.TestName'],
+ results_dict['all_tests'])
+ self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+ iteration_result = results_dict['per_iteration_data'][0]
+ self.assertTrue('test.package.TestName' in iteration_result)
+ self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+ test_iteration_result = iteration_result['test.package.TestName'][0]
+ self.assertTrue('losless_snippet' in test_iteration_result)
+ self.assertTrue(test_iteration_result['losless_snippet'])
+ self.assertTrue('output_snippet' in test_iteration_result)
+ self.assertEquals(log, test_iteration_result['output_snippet'])
+ self.assertTrue('output_snippet_base64' in test_iteration_result)
+ self.assertEquals('', test_iteration_result['output_snippet_base64'])
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/deps/v8/build/android/pylib/results/presentation/__init__.py b/deps/v8/build/android/pylib/results/presentation/__init__.py
new file mode 100644
index 0000000000..a22a6ee39a
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/presentation/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/results/presentation/javascript/main_html.js b/deps/v8/build/android/pylib/results/presentation/javascript/main_html.js
new file mode 100644
index 0000000000..76f22f09d5
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/presentation/javascript/main_html.js
@@ -0,0 +1,214 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+function getArguments() {
+ // Returns the URL arguments as a dictionary.
+ args = {}
+ var s = location.search;
+ if (s) {
+ var vals = s.substring(1).split('&');
+ for (var i = 0; i < vals.length; i++) {
+ var pair = vals[i].split('=');
+ args[pair[0]] = pair[1];
+ }
+ }
+ return args;
+}
+
+function showSuiteTable(show_the_table) {
+ document.getElementById('suite-table').style.display = (
+ show_the_table ? 'table' : 'none');
+}
+
+function showTestTable(show_the_table) {
+ document.getElementById('test-table').style.display = (
+ show_the_table ? 'table' : 'none');
+}
+
+function showTestsOfOneSuiteOnly(suite_name) {
+ setTitle('Test Results of Suite: ' + suite_name)
+ show_all = (suite_name == 'TOTAL')
+ var testTableBlocks = document.getElementById('test-table')
+ .getElementsByClassName('row_block');
+ Array.prototype.slice.call(testTableBlocks)
+ .forEach(function(testTableBlock) {
+ if (!show_all) {
+ var table_block_in_suite = (testTableBlock.firstElementChild
+ .firstElementChild.firstElementChild.innerHTML)
+ .startsWith(suite_name);
+ if (!table_block_in_suite) {
+ testTableBlock.style.display = 'none';
+ return;
+ }
+ }
+ testTableBlock.style.display = 'table-row-group';
+ });
+ showTestTable(true);
+ showSuiteTable(false);
+ window.scrollTo(0, 0);
+}
+
+function showTestsOfOneSuiteOnlyWithNewState(suite_name) {
+ showTestsOfOneSuiteOnly(suite_name);
+ history.pushState({suite: suite_name}, suite_name, '');
+}
+
+function showSuiteTableOnly() {
+ setTitle('Suites Summary')
+ showTestTable(false);
+ showSuiteTable(true);
+ window.scrollTo(0, 0);
+}
+
+function showSuiteTableOnlyWithReplaceState() {
+ showSuiteTableOnly();
+ history.replaceState({}, 'suite_table', '');
+}
+
+function setBrowserBackButtonLogic() {
+ window.onpopstate = function(event) {
+ if (!event.state || !event.state.suite) {
+ showSuiteTableOnly();
+ } else {
+ showTestsOfOneSuiteOnly(event.state.suite);
+ }
+ };
+}
+
+function setTitle(title) {
+ document.getElementById('summary-header').textContent = title;
+}
+
+function sortByColumn(head) {
+ var table = head.parentNode.parentNode.parentNode;
+ var rowBlocks = Array.prototype.slice.call(
+ table.getElementsByTagName('tbody'));
+
+ // Determine whether to asc or desc and set arrows.
+ var headers = head.parentNode.getElementsByTagName('th');
+ var headIndex = Array.prototype.slice.call(headers).indexOf(head);
+ var asc = -1;
+ for (var i = 0; i < headers.length; i++) {
+ if (headers[i].dataset.ascSorted != 0) {
+ if (headers[i].dataset.ascSorted == 1) {
+ headers[i].getElementsByClassName('up')[0]
+ .style.display = 'none';
+ } else {
+ headers[i].getElementsByClassName('down')[0]
+ .style.display = 'none';
+ }
+ if (headers[i] == head) {
+ asc = headers[i].dataset.ascSorted * -1;
+ } else {
+ headers[i].dataset.ascSorted = 0;
+ }
+ break;
+ }
+ }
+ headers[headIndex].dataset.ascSorted = asc;
+ if (asc == 1) {
+ headers[headIndex].getElementsByClassName('up')[0]
+ .style.display = 'inline';
+ } else {
+ headers[headIndex].getElementsByClassName('down')[0]
+ .style.display = 'inline';
+ }
+
+ // Sort the array by the specified column number (col) and order (asc).
+ rowBlocks.sort(function (a, b) {
+ if (a.style.display == 'none') {
+ return -1;
+ } else if (b.style.display == 'none') {
+ return 1;
+ }
+ var a_rows = Array.prototype.slice.call(a.children);
+ var b_rows = Array.prototype.slice.call(b.children);
+ if (head.className == "text") {
+ // If sorting by text, we only compare the entry on the first row.
+ var aInnerHTML = a_rows[0].children[headIndex].innerHTML;
+ var bInnerHTML = b_rows[0].children[headIndex].innerHTML;
+ return (aInnerHTML == bInnerHTML) ? 0 : (
+ (aInnerHTML > bInnerHTML) ? asc : -1 * asc);
+ } else if (head.className == "number") {
+ // If sorting by number, for example, duration,
+ // we will sum up the durations of different test runs
+ // for one specific test case and sort by the sum.
+ var avalue = 0;
+ var bvalue = 0;
+ a_rows.forEach(function (row, i) {
+ var index = (i > 0) ? headIndex - 1 : headIndex;
+ avalue += Number(row.children[index].innerHTML);
+ });
+ b_rows.forEach(function (row, i) {
+ var index = (i > 0) ? headIndex - 1 : headIndex;
+ bvalue += Number(row.children[index].innerHTML);
+ });
+ } else if (head.className == "flaky") {
+ // Flakiness = (#total - #success - #skipped) / (#total - #skipped)
+ var a_success_or_skipped = 0;
+ var a_skipped = 0;
+ var b_success_or_skipped = 0;
+ var b_skipped = 0;
+ a_rows.forEach(function (row, i) {
+ var index = (i > 0) ? headIndex - 1 : headIndex;
+ var status = row.children[index].innerHTML.trim();
+ if (status == 'SUCCESS') {
+ a_success_or_skipped += 1;
+ }
+ if (status == 'SKIPPED') {
+ a_success_or_skipped += 1;
+ a_skipped += 1;
+ }
+ });
+ b_rows.forEach(function (row, i) {
+ var index = (i > 0) ? headIndex - 1 : headIndex;
+ var status = row.children[index].innerHTML.trim();
+ if (status == 'SUCCESS') {
+ b_success_or_skipped += 1;
+ }
+ if (status == 'SKIPPED') {
+ b_success_or_skipped += 1;
+ b_skipped += 1;
+ }
+ });
+ var atotal_minus_skipped = a_rows.length - a_skipped;
+ var btotal_minus_skipped = b_rows.length - b_skipped;
+
+ var avalue = ((atotal_minus_skipped == 0) ? -1 :
+ (a_rows.length - a_success_or_skipped) / atotal_minus_skipped);
+ var bvalue = ((btotal_minus_skipped == 0) ? -1 :
+ (b_rows.length - b_success_or_skipped) / btotal_minus_skipped);
+ }
+ return asc * (avalue - bvalue);
+ });
+
+ for (var i = 0; i < rowBlocks.length; i++) {
+ table.appendChild(rowBlocks[i]);
+ }
+}
+
+function sortSuiteTableByFailedTestCases() {
+ sortByColumn(document.getElementById('number_fail_tests'));
+}
+
+function setTableCellsAsClickable() {
+ const tableCells = document.getElementsByTagName('td');
+ for(let i = 0; i < tableCells.length; i++) {
+ const links = tableCells[i].getElementsByTagName('a');
+ // Only make the cell clickable if there is only one link.
+ if (links.length == 1) {
+ tableCells[i].addEventListener('click', function() {
+ links[0].click();
+ });
+ tableCells[i].addEventListener('mouseover', function() {
+ tableCells[i].style.cursor = 'pointer';
+ links[0].style.textDecoration = 'underline';
+ });
+ tableCells[i].addEventListener('mouseout', function() {
+ tableCells[i].style.cursor = 'initial';
+ links[0].style.textDecoration = 'initial';
+ });
+ }
+ }
+}
diff --git a/deps/v8/build/android/pylib/results/presentation/standard_gtest_merge.py b/deps/v8/build/android/pylib/results/presentation/standard_gtest_merge.py
new file mode 100755
index 0000000000..5dba4df326
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/presentation/standard_gtest_merge.py
@@ -0,0 +1,168 @@
+#! /usr/bin/env python
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import json
+import os
+import sys
+
+
+def merge_shard_results(summary_json, jsons_to_merge):
+ """Reads JSON test output from all shards and combines them into one.
+
+ Returns dict with merged test output on success or None on failure. Emits
+ annotations.
+ """
+ try:
+ with open(summary_json) as f:
+ summary = json.load(f)
+ except (IOError, ValueError):
+ raise Exception('Summary json cannot be loaded.')
+
+ # Merge all JSON files together. Keep track of missing shards.
+ merged = {
+ 'all_tests': set(),
+ 'disabled_tests': set(),
+ 'global_tags': set(),
+ 'missing_shards': [],
+ 'per_iteration_data': [],
+ 'swarming_summary': summary,
+ 'links': set()
+ }
+ for index, result in enumerate(summary['shards']):
+ if result is None:
+ merged['missing_shards'].append(index)
+ continue
+
+ # Author note: this code path doesn't trigger convert_to_old_format() in
+ # client/swarming.py, which means the state enum is saved in its string
+ # name form, not in the number form.
+ state = result.get('state')
+ if state == u'BOT_DIED':
+ print >> sys.stderr, 'Shard #%d had a Swarming internal failure' % index
+ elif state == u'EXPIRED':
+ print >> sys.stderr, 'There wasn\'t enough capacity to run your test'
+ elif state == u'TIMED_OUT':
+ print >> sys.stderr, (
+ 'Test runtime exceeded allocated time'
+ 'Either it ran for too long (hard timeout) or it didn\'t produce '
+ 'I/O for an extended period of time (I/O timeout)')
+ elif state != u'COMPLETED':
+ print >> sys.stderr, 'Invalid Swarming task state: %s' % state
+
+ json_data, err_msg = load_shard_json(index, result.get('task_id'),
+ jsons_to_merge)
+ if json_data:
+ # Set-like fields.
+ for key in ('all_tests', 'disabled_tests', 'global_tags', 'links'):
+ merged[key].update(json_data.get(key), [])
+
+ # 'per_iteration_data' is a list of dicts. Dicts should be merged
+ # together, not the 'per_iteration_data' list itself.
+ merged['per_iteration_data'] = merge_list_of_dicts(
+ merged['per_iteration_data'], json_data.get('per_iteration_data', []))
+ else:
+ merged['missing_shards'].append(index)
+ print >> sys.stderr, 'No result was found: %s' % err_msg
+
+ # If some shards are missing, make it known. Continue parsing anyway. Step
+ # should be red anyway, since swarming.py return non-zero exit code in that
+ # case.
+ if merged['missing_shards']:
+ as_str = ', '.join([str(shard) for shard in merged['missing_shards']])
+ print >> sys.stderr, ('some shards did not complete: %s' % as_str)
+ # Not all tests run, combined JSON summary can not be trusted.
+ merged['global_tags'].add('UNRELIABLE_RESULTS')
+
+ # Convert to jsonish dict.
+ for key in ('all_tests', 'disabled_tests', 'global_tags', 'links'):
+ merged[key] = sorted(merged[key])
+ return merged
+
+
+OUTPUT_JSON_SIZE_LIMIT = 100 * 1024 * 1024 # 100 MB
+
+
+def load_shard_json(index, task_id, jsons_to_merge):
+ """Reads JSON output of the specified shard.
+
+ Args:
+ output_dir: The directory in which to look for the JSON output to load.
+ index: The index of the shard to load data for, this is for old api.
+ task_id: The directory of the shard to load data for, this is for new api.
+
+ Returns: A tuple containing:
+ * The contents of path, deserialized into a python object.
+ * An error string.
+ (exactly one of the tuple elements will be non-None).
+ """
+ matching_json_files = [
+ j for j in jsons_to_merge
+ if (os.path.basename(j) == 'output.json' and
+ (os.path.basename(os.path.dirname(j)) == str(index) or
+ os.path.basename(os.path.dirname(j)) == task_id))]
+
+ if not matching_json_files:
+ print >> sys.stderr, 'shard %s test output missing' % index
+ return (None, 'shard %s test output was missing' % index)
+ elif len(matching_json_files) > 1:
+ print >> sys.stderr, 'duplicate test output for shard %s' % index
+ return (None, 'shard %s test output was duplicated' % index)
+
+ path = matching_json_files[0]
+
+ try:
+ filesize = os.stat(path).st_size
+ if filesize > OUTPUT_JSON_SIZE_LIMIT:
+ print >> sys.stderr, 'output.json is %d bytes. Max size is %d' % (
+ filesize, OUTPUT_JSON_SIZE_LIMIT)
+ return (None, 'shard %s test output exceeded the size limit' % index)
+
+ with open(path) as f:
+ return (json.load(f), None)
+ except (IOError, ValueError, OSError) as e:
+ print >> sys.stderr, 'Missing or invalid gtest JSON file: %s' % path
+ print >> sys.stderr, '%s: %s' % (type(e).__name__, e)
+
+ return (None, 'shard %s test output was missing or invalid' % index)
+
+
+def merge_list_of_dicts(left, right):
+ """Merges dicts left[0] with right[0], left[1] with right[1], etc."""
+ output = []
+ for i in xrange(max(len(left), len(right))):
+ left_dict = left[i] if i < len(left) else {}
+ right_dict = right[i] if i < len(right) else {}
+ merged_dict = left_dict.copy()
+ merged_dict.update(right_dict)
+ output.append(merged_dict)
+ return output
+
+
+def standard_gtest_merge(
+ output_json, summary_json, jsons_to_merge):
+
+ output = merge_shard_results(summary_json, jsons_to_merge)
+ with open(output_json, 'wb') as f:
+ json.dump(output, f)
+
+ return 0
+
+
+def main(raw_args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--summary-json')
+ parser.add_argument('-o', '--output-json', required=True)
+ parser.add_argument('jsons_to_merge', nargs='*')
+
+ args = parser.parse_args(raw_args)
+
+ return standard_gtest_merge(
+ args.output_json, args.summary_json, args.jsons_to_merge)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/pylib/results/presentation/template/main.html b/deps/v8/build/android/pylib/results/presentation/template/main.html
new file mode 100644
index 0000000000..5c8df5e121
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/presentation/template/main.html
@@ -0,0 +1,97 @@
+<!DOCTYPE html>
+<html>
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+ <style>
+ body {
+ background-color: #fff;
+ color: #333;
+ font-family: Verdana, sans-serif;
+ font-size: 10px;
+ margin-left: 30px;
+ margin-right: 30px;
+ margin-top: 20px;
+ margin-bottom: 50px;
+ padding: 0;
+ }
+ table, th, td {
+ border: 1px solid black;
+ border-collapse: collapse;
+ text-align: center;
+ }
+ table, td {
+ padding: 0.1em 1em 0.1em 1em;
+ }
+ th {
+ cursor: pointer;
+ padding: 0.2em 1.5em 0.2em 1.5em;
+ }
+ table {
+ width: 100%;
+ }
+ .center {
+ text-align: center;
+ }
+ .left {
+ text-align: left;
+ }
+ a {
+ text-decoration: none;
+ }
+ a:hover {
+ text-decoration: underline;
+ cursor: pointer;
+ }
+ a:link,a:visited,a:active {
+ color: #444;
+ }
+ .row_block:hover {
+ background-color: #F6F6F6;
+ }
+ .skipped, .success, .failure {
+ border-color: #000000;
+ }
+ .success {
+ color: #000;
+ background-color: #8d4;
+ }
+ .failure {
+ color: #000;
+ background-color: #e88;
+ }
+ .skipped {
+ color: #000;
+ background: #AADDEE;
+ }
+ </style>
+ <script type="text/javascript">
+ {% include "javascript/main_html.js" %}
+ </script>
+ </head>
+ <body>
+ <div>
+ <h2 id="summary-header"></h2>
+ {% for tb_value in tb_values %}
+ {% include 'template/table.html' %}
+ {% endfor %}
+ </div>
+ {% if feedback_url %}
+ </br>
+ <a href="{{feedback_url}}" target="_blank"><b>Feedback</b></a>
+ </body>
+ {%- endif %}
+ <script>
+ sortSuiteTableByFailedTestCases();
+ showSuiteTableOnlyWithReplaceState();
+ // Enable sorting for each column of tables.
+ Array.prototype.slice.call(document.getElementsByTagName('th'))
+ .forEach(function(head) {
+ head.addEventListener(
+ "click",
+ function() { sortByColumn(head); });
+ }
+ );
+ setBrowserBackButtonLogic();
+ setTableCellsAsClickable();
+ </script>
+</html> \ No newline at end of file
diff --git a/deps/v8/build/android/pylib/results/presentation/template/table.html b/deps/v8/build/android/pylib/results/presentation/template/table.html
new file mode 100644
index 0000000000..4240043490
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/presentation/template/table.html
@@ -0,0 +1,60 @@
+<table id="{{tb_value.table_id}}" style="display:none;">
+ <thead class="heads">
+ <tr>
+ {% for cell in tb_value.table_headers -%}
+ <th class="{{cell.class}}" id="{{cell.data}}" data-asc-sorted=0>
+ {{cell.data}}
+ <span class="up" style="display:none;"> &#8593</span>
+ <span class="down" style="display:none;"> &#8595</span>
+ </th>
+ {%- endfor %}
+ </tr>
+ </thead>
+ {% for block in tb_value.table_row_blocks -%}
+ <tbody class="row_block">
+ {% for row in block -%}
+ <tr class="{{tb_value.table_id}}-body-row">
+ {% for cell in row -%}
+ {% if cell.rowspan -%}
+ <td rowspan="{{cell.rowspan}}" class="{{tb_value.table_id}}-body-column-{{loop.index0}} {{cell.class}}">
+ {%- else -%}
+ <td rowspan="1" class="{{tb_value.table_id}}-body-column-{{loop.index0}} {{cell.class}}">
+ {%- endif %}
+ {% if cell.cell_type == 'pre' -%}
+ <pre>{{cell.data}}</pre>
+ {%- elif cell.cell_type == 'links' -%}
+ {% for link in cell.links -%}
+ <a href="{{link.href}}" target="{{link.target}}">{{link.data}}</a>
+ {% if not loop.last -%}
+ <br />
+ {%- endif %}
+ {%- endfor %}
+ {%- elif cell.cell_type == 'action' -%}
+ <a onclick="{{cell.action}}">{{cell.data}}</a>
+ {%- else -%}
+ {{cell.data}}
+ {%- endif %}
+ </td>
+ {%- endfor %}
+ </tr>
+ {%- endfor %}
+ </tbody>
+ {%- endfor %}
+ <tfoot>
+ <tr>
+ {% for cell in tb_value.table_footer -%}
+ <td class="{{tb_value.table_id}}-summary-column-{{loop.index0}} {{cell.class}}">
+ {% if cell.cell_type == 'links' -%}
+ {% for link in cell.links -%}
+ <a href="{{link.href}}" target="{{link.target}}"><b>{{link.data}}</b></a>
+ {%- endfor %}
+ {%- elif cell.cell_type == 'action' -%}
+ <a onclick="{{cell.action}}">{{cell.data}}</a>
+ {%- else -%}
+ <b>{{cell.data}}</b>
+ {%- endif %}
+ </td>
+ {%- endfor %}
+ </tr>
+ </tfoot>
+</table>
diff --git a/deps/v8/build/android/pylib/results/presentation/test_results_presentation.py b/deps/v8/build/android/pylib/results/presentation/test_results_presentation.py
new file mode 100755
index 0000000000..82d6c88470
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/presentation/test_results_presentation.py
@@ -0,0 +1,543 @@
+#!/usr/bin/env python
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import contextlib
+import json
+import logging
+import tempfile
+import os
+import sys
+import urllib
+
+
+CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
+BASE_DIR = os.path.abspath(os.path.join(
+ CURRENT_DIR, '..', '..', '..', '..', '..'))
+
+sys.path.append(os.path.join(BASE_DIR, 'build', 'android'))
+from pylib.results.presentation import standard_gtest_merge
+from pylib.utils import google_storage_helper # pylint: disable=import-error
+
+sys.path.append(os.path.join(BASE_DIR, 'third_party'))
+import jinja2 # pylint: disable=import-error
+JINJA_ENVIRONMENT = jinja2.Environment(
+ loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
+ autoescape=True)
+
+
+def cell(data, html_class='center'):
+ """Formats table cell data for processing in jinja template."""
+ return {
+ 'data': data,
+ 'class': html_class,
+ }
+
+
+def pre_cell(data, html_class='center'):
+ """Formats table <pre> cell data for processing in jinja template."""
+ return {
+ 'cell_type': 'pre',
+ 'data': data,
+ 'class': html_class,
+ }
+
+
+class LinkTarget(object):
+ # Opens the linked document in a new window or tab.
+ NEW_TAB = '_blank'
+ # Opens the linked document in the same frame as it was clicked.
+ CURRENT_TAB = '_self'
+
+
+def link(data, href, target=LinkTarget.CURRENT_TAB):
+ """Formats <a> tag data for processing in jinja template.
+
+ Args:
+ data: String link appears as on HTML page.
+ href: URL where link goes.
+ target: Where link should be opened (e.g. current tab or new tab).
+ """
+ return {
+ 'data': data,
+ 'href': href,
+ 'target': target,
+ }
+
+
+def links_cell(links, html_class='center', rowspan=None):
+ """Formats table cell with links for processing in jinja template.
+
+ Args:
+ links: List of link dictionaries. Use |link| function to generate them.
+ html_class: Class for table cell.
+ rowspan: Rowspan HTML attribute.
+ """
+ return {
+ 'cell_type': 'links',
+ 'class': html_class,
+ 'links': links,
+ 'rowspan': rowspan,
+ }
+
+
+def action_cell(action, data, html_class):
+ """Formats table cell with javascript actions.
+
+ Args:
+ action: Javscript action.
+ data: Data in cell.
+ class: Class for table cell.
+ """
+ return {
+ 'cell_type': 'action',
+ 'action': action,
+ 'data': data,
+ 'class': html_class,
+ }
+
+
+def flakiness_dashbord_link(test_name, suite_name):
+ url_args = urllib.urlencode([
+ ('testType', suite_name),
+ ('tests', test_name)])
+ return ('https://test-results.appspot.com/'
+ 'dashboards/flakiness_dashboard.html#%s' % url_args)
+
+
+def logs_cell(result, test_name, suite_name):
+ """Formats result logs data for processing in jinja template."""
+ link_list = []
+ result_link_dict = result.get('links', {})
+ result_link_dict['flakiness'] = flakiness_dashbord_link(
+ test_name, suite_name)
+ for name, href in sorted(result_link_dict.items()):
+ link_list.append(link(
+ data=name,
+ href=href,
+ target=LinkTarget.NEW_TAB))
+ if link_list:
+ return links_cell(link_list)
+ else:
+ return cell('(no logs)')
+
+
+def code_search(test, cs_base_url):
+ """Returns URL for test on codesearch."""
+ search = test.replace('#', '.')
+ return '%s/?q=%s&type=cs' % (cs_base_url, search)
+
+
+def status_class(status):
+ """Returns HTML class for test status."""
+ if not status:
+ return 'failure unknown'
+ status = status.lower()
+ if status not in ('success', 'skipped'):
+ return 'failure %s' % status
+ return status
+
+
+def create_test_table(results_dict, cs_base_url, suite_name):
+ """Format test data for injecting into HTML table."""
+
+ header_row = [
+ cell(data='test_name', html_class='text'),
+ cell(data='status', html_class='flaky'),
+ cell(data='elapsed_time_ms', html_class='number'),
+ cell(data='logs', html_class='text'),
+ cell(data='output_snippet', html_class='text'),
+ ]
+
+ test_row_blocks = []
+ for test_name, test_results in results_dict.iteritems():
+ test_runs = []
+ for index, result in enumerate(test_results):
+ if index == 0:
+ test_run = [links_cell(
+ links=[
+ link(href=code_search(test_name, cs_base_url),
+ target=LinkTarget.NEW_TAB,
+ data=test_name)],
+ rowspan=len(test_results),
+ html_class='left %s' % test_name
+ )] # test_name
+ else:
+ test_run = []
+
+ test_run.extend([
+ cell(data=result['status'] or 'UNKNOWN',
+ # status
+ html_class=('center %s' %
+ status_class(result['status']))),
+ cell(data=result['elapsed_time_ms']), # elapsed_time_ms
+ logs_cell(result, test_name, suite_name), # logs
+ pre_cell(data=result['output_snippet'], # output_snippet
+ html_class='left'),
+ ])
+ test_runs.append(test_run)
+ test_row_blocks.append(test_runs)
+ return header_row, test_row_blocks
+
+
+def create_suite_table(results_dict):
+ """Format test suite data for injecting into HTML table."""
+
+ SUCCESS_COUNT_INDEX = 1
+ FAIL_COUNT_INDEX = 2
+ ALL_COUNT_INDEX = 3
+ TIME_INDEX = 4
+
+ header_row = [
+ cell(data='suite_name', html_class='text'),
+ cell(data='number_success_tests', html_class='number'),
+ cell(data='number_fail_tests', html_class='number'),
+ cell(data='all_tests', html_class='number'),
+ cell(data='elapsed_time_ms', html_class='number'),
+ ]
+
+ footer_row = [
+ action_cell(
+ 'showTestsOfOneSuiteOnlyWithNewState("TOTAL")',
+ 'TOTAL',
+ 'center'
+ ), # TOTAL
+ cell(data=0), # number_success_tests
+ cell(data=0), # number_fail_tests
+ cell(data=0), # all_tests
+ cell(data=0), # elapsed_time_ms
+ ]
+
+ suite_row_dict = {}
+ for test_name, test_results in results_dict.iteritems():
+ # TODO(mikecase): This logic doesn't work if there are multiple test runs.
+ # That is, if 'per_iteration_data' has multiple entries.
+ # Since we only care about the result of the last test run.
+ result = test_results[-1]
+
+ suite_name = (test_name.split('#')[0] if '#' in test_name
+ else test_name.split('.')[0])
+ if suite_name in suite_row_dict:
+ suite_row = suite_row_dict[suite_name]
+ else:
+ suite_row = [
+ action_cell(
+ 'showTestsOfOneSuiteOnlyWithNewState("%s")' % suite_name,
+ suite_name,
+ 'left'
+ ), # suite_name
+ cell(data=0), # number_success_tests
+ cell(data=0), # number_fail_tests
+ cell(data=0), # all_tests
+ cell(data=0), # elapsed_time_ms
+ ]
+
+ suite_row_dict[suite_name] = suite_row
+
+ suite_row[ALL_COUNT_INDEX]['data'] += 1
+ footer_row[ALL_COUNT_INDEX]['data'] += 1
+
+ if result['status'] == 'SUCCESS':
+ suite_row[SUCCESS_COUNT_INDEX]['data'] += 1
+ footer_row[SUCCESS_COUNT_INDEX]['data'] += 1
+ elif result['status'] != 'SKIPPED':
+ suite_row[FAIL_COUNT_INDEX]['data'] += 1
+ footer_row[FAIL_COUNT_INDEX]['data'] += 1
+
+ suite_row[TIME_INDEX]['data'] += result['elapsed_time_ms']
+ footer_row[TIME_INDEX]['data'] += result['elapsed_time_ms']
+
+ for suite in suite_row_dict.values():
+ if suite[FAIL_COUNT_INDEX]['data'] > 0:
+ suite[FAIL_COUNT_INDEX]['class'] += ' failure'
+ else:
+ suite[FAIL_COUNT_INDEX]['class'] += ' success'
+
+ if footer_row[FAIL_COUNT_INDEX]['data'] > 0:
+ footer_row[FAIL_COUNT_INDEX]['class'] += ' failure'
+ else:
+ footer_row[FAIL_COUNT_INDEX]['class'] += ' success'
+
+ return (header_row,
+ [[suite_row] for suite_row in suite_row_dict.values()],
+ footer_row)
+
+
+def feedback_url(result_details_link):
+ # pylint: disable=redefined-variable-type
+ url_args = [
+ ('labels', 'Pri-2,Type-Bug,Restrict-View-Google'),
+ ('summary', 'Result Details Feedback:'),
+ ('components', 'Test>Android'),
+ ]
+ if result_details_link:
+ url_args.append(('comment', 'Please check out: %s' % result_details_link))
+ url_args = urllib.urlencode(url_args)
+ # pylint: enable=redefined-variable-type
+ return 'https://bugs.chromium.org/p/chromium/issues/entry?%s' % url_args
+
+
+def results_to_html(results_dict, cs_base_url, bucket, test_name,
+ builder_name, build_number, local_output):
+ """Convert list of test results into html format.
+
+ Args:
+ local_output: Whether this results file is uploaded to Google Storage or
+ just a local file.
+ """
+ test_rows_header, test_rows = create_test_table(
+ results_dict, cs_base_url, test_name)
+ suite_rows_header, suite_rows, suite_row_footer = create_suite_table(
+ results_dict)
+
+ suite_table_values = {
+ 'table_id': 'suite-table',
+ 'table_headers': suite_rows_header,
+ 'table_row_blocks': suite_rows,
+ 'table_footer': suite_row_footer,
+ }
+
+ test_table_values = {
+ 'table_id': 'test-table',
+ 'table_headers': test_rows_header,
+ 'table_row_blocks': test_rows,
+ }
+
+ main_template = JINJA_ENVIRONMENT.get_template(
+ os.path.join('template', 'main.html'))
+
+ if local_output:
+ html_render = main_template.render( # pylint: disable=no-member
+ {
+ 'tb_values': [suite_table_values, test_table_values],
+ 'feedback_url': feedback_url(None),
+ })
+ return (html_render, None, None)
+ else:
+ dest = google_storage_helper.unique_name(
+ '%s_%s_%s' % (test_name, builder_name, build_number))
+ result_details_link = google_storage_helper.get_url_link(
+ dest, '%s/html' % bucket)
+ html_render = main_template.render( # pylint: disable=no-member
+ {
+ 'tb_values': [suite_table_values, test_table_values],
+ 'feedback_url': feedback_url(result_details_link),
+ })
+ return (html_render, dest, result_details_link)
+
+
+def result_details(json_path, test_name, cs_base_url, bucket=None,
+ builder_name=None, build_number=None, local_output=False):
+ """Get result details from json path and then convert results to html.
+
+ Args:
+ local_output: Whether this results file is uploaded to Google Storage or
+ just a local file.
+ """
+
+ with open(json_path) as json_file:
+ json_object = json.loads(json_file.read())
+
+ if not 'per_iteration_data' in json_object:
+ return 'Error: json file missing per_iteration_data.'
+
+ results_dict = collections.defaultdict(list)
+ for testsuite_run in json_object['per_iteration_data']:
+ for test, test_runs in testsuite_run.iteritems():
+ results_dict[test].extend(test_runs)
+ return results_to_html(results_dict, cs_base_url, bucket, test_name,
+ builder_name, build_number, local_output)
+
+
+def upload_to_google_bucket(html, bucket, dest):
+ with tempfile.NamedTemporaryFile(suffix='.html') as temp_file:
+ temp_file.write(html)
+ temp_file.flush()
+ return google_storage_helper.upload(
+ name=dest,
+ filepath=temp_file.name,
+ bucket='%s/html' % bucket,
+ content_type='text/html',
+ authenticated_link=True)
+
+
+def ui_screenshot_set(json_path):
+ with open(json_path) as json_file:
+ json_object = json.loads(json_file.read())
+ if not 'per_iteration_data' in json_object:
+ # This will be reported as an error by result_details, no need to duplicate.
+ return None
+ ui_screenshots = []
+ # pylint: disable=too-many-nested-blocks
+ for testsuite_run in json_object['per_iteration_data']:
+ for _, test_runs in testsuite_run.iteritems():
+ for test_run in test_runs:
+ if 'ui screenshot' in test_run['links']:
+ screenshot_link = test_run['links']['ui screenshot']
+ if screenshot_link.startswith('file:'):
+ with contextlib.closing(urllib.urlopen(screenshot_link)) as f:
+ test_screenshots = json.load(f)
+ else:
+ # Assume anything that isn't a file link is a google storage link
+ screenshot_string = google_storage_helper.read_from_link(
+ screenshot_link)
+ if not screenshot_string:
+ logging.error('Bad screenshot link %s', screenshot_link)
+ continue
+ test_screenshots = json.loads(
+ screenshot_string)
+ ui_screenshots.extend(test_screenshots)
+ # pylint: enable=too-many-nested-blocks
+
+ if ui_screenshots:
+ return json.dumps(ui_screenshots)
+ return None
+
+
+def upload_screenshot_set(json_path, test_name, bucket, builder_name,
+ build_number):
+ screenshot_set = ui_screenshot_set(json_path)
+ if not screenshot_set:
+ return None
+ dest = google_storage_helper.unique_name(
+ 'screenshots_%s_%s_%s' % (test_name, builder_name, build_number),
+ suffix='.json')
+ with tempfile.NamedTemporaryFile(suffix='.json') as temp_file:
+ temp_file.write(screenshot_set)
+ temp_file.flush()
+ return google_storage_helper.upload(
+ name=dest,
+ filepath=temp_file.name,
+ bucket='%s/json' % bucket,
+ content_type='application/json',
+ authenticated_link=True)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--json-file', help='Path of json file.')
+ parser.add_argument('--cs-base-url', help='Base url for code search.',
+ default='http://cs.chromium.org')
+ parser.add_argument('--bucket', help='Google storage bucket.', required=True)
+ parser.add_argument('--builder-name', help='Builder name.')
+ parser.add_argument('--build-number', help='Build number.')
+ parser.add_argument('--test-name', help='The name of the test.',
+ required=True)
+ parser.add_argument(
+ '-o', '--output-json',
+ help='(Swarming Merge Script API) '
+ 'Output JSON file to create.')
+ parser.add_argument(
+ '--build-properties',
+ help='(Swarming Merge Script API) '
+ 'Build property JSON file provided by recipes.')
+ parser.add_argument(
+ '--summary-json',
+ help='(Swarming Merge Script API) '
+ 'Summary of shard state running on swarming. '
+ '(Output of the swarming.py collect '
+ '--task-summary-json=XXX command.)')
+ parser.add_argument(
+ '--task-output-dir',
+ help='(Swarming Merge Script API) '
+ 'Directory containing all swarming task results.')
+ parser.add_argument(
+ 'positional', nargs='*',
+ help='output.json from shards.')
+
+ args = parser.parse_args()
+
+ if ((args.build_properties is None) ==
+ (args.build_number is None or args.builder_name is None)):
+ raise parser.error('Exactly one of build_perperties or '
+ '(build_number or builder_name) should be given.')
+
+ if (args.build_number is None) != (args.builder_name is None):
+ raise parser.error('args.build_number and args.builder_name '
+ 'has to be be given together'
+ 'or not given at all.')
+
+ if len(args.positional) == 0 and args.json_file is None:
+ if args.output_json:
+ with open(args.output_json, 'w') as f:
+ json.dump({}, f)
+ return
+ elif len(args.positional) != 0 and args.json_file:
+ raise parser.error('Exactly one of args.positional and '
+ 'args.json_file should be given.')
+
+ if args.build_properties:
+ build_properties = json.loads(args.build_properties)
+ if ((not 'buildnumber' in build_properties) or
+ (not 'buildername' in build_properties)):
+ raise parser.error('Build number/builder name not specified.')
+ build_number = build_properties['buildnumber']
+ builder_name = build_properties['buildername']
+ elif args.build_number and args.builder_name:
+ build_number = args.build_number
+ builder_name = args.builder_name
+
+ if args.positional:
+ if len(args.positional) == 1:
+ json_file = args.positional[0]
+ else:
+ if args.output_json and args.summary_json:
+ standard_gtest_merge.standard_gtest_merge(
+ args.output_json, args.summary_json, args.positional)
+ json_file = args.output_json
+ elif not args.output_json:
+ raise Exception('output_json required by merge API is missing.')
+ else:
+ raise Exception('summary_json required by merge API is missing.')
+ elif args.json_file:
+ json_file = args.json_file
+
+ if not os.path.exists(json_file):
+ raise IOError('--json-file %s not found.' % json_file)
+
+ # Link to result details presentation page is a part of the page.
+ result_html_string, dest, result_details_link = result_details(
+ json_file, args.test_name, args.cs_base_url, args.bucket,
+ builder_name, build_number)
+
+ result_details_link_2 = upload_to_google_bucket(
+ result_html_string.encode('UTF-8'),
+ args.bucket, dest)
+ assert result_details_link == result_details_link_2, (
+ 'Result details link do not match. The link returned by get_url_link'
+ ' should be the same as that returned by upload.')
+
+ ui_screenshot_set_link = upload_screenshot_set(json_file, args.test_name,
+ args.bucket, builder_name, build_number)
+
+ if ui_screenshot_set_link:
+ ui_catalog_url = 'https://chrome-ui-catalog.appspot.com/'
+ ui_catalog_query = urllib.urlencode(
+ {'screenshot_source': ui_screenshot_set_link})
+ ui_screenshot_link = '%s?%s' % (ui_catalog_url, ui_catalog_query)
+
+ if args.output_json:
+ with open(json_file) as original_json_file:
+ json_object = json.load(original_json_file)
+ json_object['links'] = {
+ 'result_details (logcats, flakiness links)': result_details_link
+ }
+
+ if ui_screenshot_set_link:
+ json_object['links']['ui screenshots'] = ui_screenshot_link
+
+ with open(args.output_json, 'w') as f:
+ json.dump(json_object, f)
+ else:
+ print 'Result Details: %s' % result_details_link
+
+ if ui_screenshot_set_link:
+ print 'UI Screenshots %s' % ui_screenshot_link
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/pylib/results/report_results.py b/deps/v8/build/android/pylib/results/report_results.py
new file mode 100644
index 0000000000..e886b72985
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/report_results.py
@@ -0,0 +1,131 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing utility functions for reporting results."""
+
+import logging
+import os
+import re
+
+from pylib import constants
+from pylib.results.flakiness_dashboard import results_uploader
+from pylib.utils import logging_utils
+
+
+def _LogToFile(results, test_type, suite_name):
+ """Log results to local files which can be used for aggregation later."""
+ log_file_path = os.path.join(constants.GetOutDirectory(), 'test_logs')
+ if not os.path.exists(log_file_path):
+ os.mkdir(log_file_path)
+ full_file_name = os.path.join(
+ log_file_path, re.sub(r'\W', '_', test_type).lower() + '.log')
+ if not os.path.exists(full_file_name):
+ with open(full_file_name, 'w') as log_file:
+ print >> log_file, '\n%s results for %s build %s:' % (
+ test_type, os.environ.get('BUILDBOT_BUILDERNAME'),
+ os.environ.get('BUILDBOT_BUILDNUMBER'))
+ logging.info('Writing results to %s.', full_file_name)
+
+ logging.info('Writing results to %s.', full_file_name)
+ with open(full_file_name, 'a') as log_file:
+ shortened_suite_name = suite_name[:25] + (suite_name[25:] and '...')
+ print >> log_file, '%s%s' % (shortened_suite_name.ljust(30),
+ results.GetShortForm())
+
+
+def _LogToFlakinessDashboard(results, test_type, test_package,
+ flakiness_server):
+ """Upload results to the flakiness dashboard"""
+ logging.info('Upload results for test type "%s", test package "%s" to %s',
+ test_type, test_package, flakiness_server)
+
+ try:
+ # TODO(jbudorick): remove Instrumentation once instrumentation tests
+ # switch to platform mode.
+ if test_type in ('instrumentation', 'Instrumentation'):
+ if flakiness_server == constants.UPSTREAM_FLAKINESS_SERVER:
+ assert test_package in ['ContentShellTest',
+ 'ChromePublicTest',
+ 'ChromeSyncShellTest',
+ 'SystemWebViewShellLayoutTest',
+ 'WebViewInstrumentationTest']
+ dashboard_test_type = ('%s_instrumentation_tests' %
+ test_package.lower().rstrip('test'))
+ # Downstream server.
+ else:
+ dashboard_test_type = 'Chromium_Android_Instrumentation'
+
+ elif test_type == 'gtest':
+ dashboard_test_type = test_package
+
+ else:
+ logging.warning('Invalid test type')
+ return
+
+ results_uploader.Upload(
+ results, flakiness_server, dashboard_test_type)
+
+ except Exception: # pylint: disable=broad-except
+ logging.exception('Failure while logging to %s', flakiness_server)
+
+
+def LogFull(results, test_type, test_package, annotation=None,
+ flakiness_server=None):
+ """Log the tests results for the test suite.
+
+ The results will be logged three different ways:
+ 1. Log to stdout.
+ 2. Log to local files for aggregating multiple test steps
+ (on buildbots only).
+ 3. Log to flakiness dashboard (on buildbots only).
+
+ Args:
+ results: An instance of TestRunResults object.
+ test_type: Type of the test (e.g. 'Instrumentation', 'Unit test', etc.).
+ test_package: Test package name (e.g. 'ipc_tests' for gtests,
+ 'ContentShellTest' for instrumentation tests)
+ annotation: If instrumenation test type, this is a list of annotations
+ (e.g. ['Feature', 'SmallTest']).
+ flakiness_server: If provider, upload the results to flakiness dashboard
+ with this URL.
+ """
+ # pylint doesn't like how colorama set up its color enums.
+ # pylint: disable=no-member
+ black_on_white = (logging_utils.BACK.WHITE, logging_utils.FORE.BLACK)
+ with logging_utils.OverrideColor(logging.CRITICAL, black_on_white):
+ if not results.DidRunPass():
+ logging.critical('*' * 80)
+ logging.critical('Detailed Logs')
+ logging.critical('*' * 80)
+ for line in results.GetLogs().splitlines():
+ logging.critical(line)
+ logging.critical('*' * 80)
+ logging.critical('Summary')
+ logging.critical('*' * 80)
+ for line in results.GetGtestForm().splitlines():
+ color = black_on_white
+ if 'FAILED' in line:
+ # Red on white, dim.
+ color = (logging_utils.BACK.WHITE, logging_utils.FORE.RED,
+ logging_utils.STYLE.DIM)
+ elif 'PASSED' in line:
+ # Green on white, dim.
+ color = (logging_utils.BACK.WHITE, logging_utils.FORE.GREEN,
+ logging_utils.STYLE.DIM)
+ with logging_utils.OverrideColor(logging.CRITICAL, color):
+ logging.critical(line)
+ logging.critical('*' * 80)
+
+ if os.environ.get('BUILDBOT_BUILDERNAME'):
+ # It is possible to have multiple buildbot steps for the same
+ # instrumenation test package using different annotations.
+ if annotation and len(annotation) == 1:
+ suite_name = annotation[0]
+ else:
+ suite_name = test_package
+ _LogToFile(results, test_type, suite_name)
+
+ if flakiness_server:
+ _LogToFlakinessDashboard(results, test_type, test_package,
+ flakiness_server)
diff --git a/deps/v8/build/android/pylib/symbols/__init__.py b/deps/v8/build/android/pylib/symbols/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/__init__.py
diff --git a/deps/v8/build/android/pylib/symbols/apk_lib_dump.py b/deps/v8/build/android/pylib/symbols/apk_lib_dump.py
new file mode 100755
index 0000000000..956ee07702
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/apk_lib_dump.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Dump shared library information from an APK file.
+
+This script is used to dump which *uncompressed* native shared libraries an
+APK contains, as well as their position within the file. This is mostly useful
+to diagnose logcat and tombstone symbolization issues when the libraries are
+loaded directly from the APK at runtime.
+
+The default format will print one line per uncompressed shared library with the
+following format:
+
+ 0x<start-offset> 0x<end-offset> 0x<file-size> <file-path>
+
+The --format=python option can be used to dump the same information that is
+easy to use in a Python script, e.g. with a line like:
+
+ (0x<start-offset>, 0x<end-offset>, 0x<file-size>, <file-path>),
+"""
+
+import argparse
+import os
+import sys
+
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
+
+from pylib.symbols import apk_native_libs
+
+def main():
+ parser = argparse.ArgumentParser(
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
+
+ parser.add_argument('apk', help='Input APK file path.')
+
+ parser.add_argument('--format', help='Select output format',
+ default='default', choices=['default', 'python'])
+
+ args = parser.parse_args()
+
+ apk_reader = apk_native_libs.ApkReader(args.apk)
+ lib_map = apk_native_libs.ApkNativeLibraries(apk_reader)
+ for lib_path, file_offset, file_size in lib_map.GetDumpList():
+ if args.format == 'python':
+ print '(0x%08x, 0x%08x, 0x%08x, \'%s\'),' % (
+ file_offset, file_offset + file_size, file_size, lib_path)
+ else:
+ print '0x%08x 0x%08x 0x%08x %s' % (
+ file_offset, file_offset + file_size, file_size, lib_path)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/pylib/symbols/apk_native_libs.py b/deps/v8/build/android/pylib/symbols/apk_native_libs.py
new file mode 100644
index 0000000000..c4af202906
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/apk_native_libs.py
@@ -0,0 +1,419 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import re
+import struct
+import zipfile
+
+# The default zipfile python module cannot open APKs properly, but this
+# fixes it. Note that simply importing this file is sufficient to
+# ensure that zip works correctly for all other modules. See:
+# http://bugs.python.org/issue14315
+# https://hg.python.org/cpython/rev/6dd5e9556a60#l2.8
+def _PatchZipFile():
+ # pylint: disable=protected-access
+ oldDecodeExtra = zipfile.ZipInfo._decodeExtra
+ def decodeExtra(self):
+ try:
+ oldDecodeExtra(self)
+ except struct.error:
+ pass
+ zipfile.ZipInfo._decodeExtra = decodeExtra
+_PatchZipFile()
+
+
+class ApkZipInfo(object):
+ """Models a single file entry from an ApkReader.
+
+ This is very similar to the zipfile.ZipInfo class. It provides a few
+ properties describing the entry:
+ - filename (same as ZipInfo.filename)
+ - file_size (same as ZipInfo.file_size)
+ - compress_size (same as ZipInfo.file_size)
+ - file_offset (note: not provided by ZipInfo)
+
+ And a few useful methods: IsCompressed() and IsElfFile().
+
+ Entries can be created by using ApkReader() methods.
+ """
+ def __init__(self, zip_file, zip_info):
+ """Construct instance. Do not call this directly. Use ApkReader methods."""
+ self._file = zip_file
+ self._info = zip_info
+ self._file_offset = None
+
+ @property
+ def filename(self):
+ """Entry's file path within APK."""
+ return self._info.filename
+
+ @property
+ def file_size(self):
+ """Entry's extracted file size in bytes."""
+ return self._info.file_size
+
+ @property
+ def compress_size(self):
+ """Entry' s compressed file size in bytes."""
+ return self._info.compress_size
+
+ @property
+ def file_offset(self):
+ """Entry's starting file offset in the APK."""
+ if self._file_offset is None:
+ self._file_offset = self._ZipFileOffsetFromLocalHeader(
+ self._file.fp, self._info.header_offset)
+ return self._file_offset
+
+ def __repr__(self):
+ """Convert to string for debugging."""
+ return 'ApkZipInfo["%s",size=0x%x,compressed=0x%x,offset=0x%x]' % (
+ self.filename, self.file_size, self.compress_size, self.file_offset)
+
+ def IsCompressed(self):
+ """Returns True iff the entry is compressed."""
+ return self._info.compress_type != zipfile.ZIP_STORED
+
+ def IsElfFile(self):
+ """Returns True iff the entry is an ELF file."""
+ with self._file.open(self._info, 'r') as f:
+ return f.read(4) == '\x7fELF'
+
+ @staticmethod
+ def _ZipFileOffsetFromLocalHeader(fd, local_header_offset):
+ """Return a file's start offset from its zip archive local header.
+
+ Args:
+ fd: Input file object.
+ local_header_offset: Local header offset (from its ZipInfo entry).
+ Returns:
+ file start offset.
+ """
+ FILE_NAME_LEN_OFFSET = 26
+ FILE_NAME_OFFSET = 30
+ fd.seek(local_header_offset + FILE_NAME_LEN_OFFSET)
+ file_name_len = struct.unpack('H', fd.read(2))[0]
+ extra_field_len = struct.unpack('H', fd.read(2))[0]
+ file_offset = (local_header_offset + FILE_NAME_OFFSET +
+ file_name_len + extra_field_len)
+ return file_offset
+
+
+class ApkReader(object):
+ """A convenience class used to read the content of APK files.
+
+ Its design is very similar to the one from zipfile.ZipFile, except
+ that its returns ApkZipInfo entries which provide a |file_offset|
+ property that can be used to know where a given file is located inside
+ the archive.
+
+ It is also easy to mock for unit-testing (see MockApkReader in
+ apk_utils_unittest.py) without creating any files on disk.
+
+ Usage is the following:
+ - Create an instance using a with statement (for proper unit-testing).
+ - Call ListEntries() to list all entries in the archive. This returns
+ a list of ApkZipInfo entries.
+ - Or call FindEntry() corresponding to a given path within the archive.
+
+ For example:
+ with ApkReader(input_apk_path) as reader:
+ info = reader.FindEntry('lib/armeabi-v7a/libfoo.so')
+ if info.IsCompressed() or not info.IsElfFile():
+ raise Exception('Invalid library path")
+
+ The ApkZipInfo can be used to inspect the entry's metadata, or read its
+ content with the ReadAll() method. See its documentation for all details.
+ """
+ def __init__(self, apk_path):
+ """Initialize instance."""
+ self._zip_file = zipfile.ZipFile(apk_path, 'r')
+ self._path = apk_path
+
+ def __enter__(self):
+ """Python context manager entry."""
+ return self
+
+ def __exit__(self, *kwargs):
+ """Python context manager exit."""
+ self.Close()
+
+ @property
+ def path(self):
+ """The corresponding input APK path."""
+ return self._path
+
+ def Close(self):
+ """Close the reader (and underlying ZipFile instance)."""
+ self._zip_file.close()
+
+ def ListEntries(self):
+ """Return a list of ApkZipInfo entries for this APK."""
+ result = []
+ for info in self._zip_file.infolist():
+ result.append(ApkZipInfo(self._zip_file, info))
+ return result
+
+ def FindEntry(self, file_path):
+ """Return an ApkZipInfo instance for a given archive file path.
+
+ Args:
+ file_path: zip file path.
+ Return:
+ A new ApkZipInfo entry on success.
+ Raises:
+ KeyError on failure (entry not found).
+ """
+ info = self._zip_file.getinfo(file_path)
+ return ApkZipInfo(self._zip_file, info)
+
+
+
+class ApkNativeLibraries(object):
+ """A class for the list of uncompressed shared libraries inside an APK.
+
+ Create a new instance by passing the path to an input APK, then use
+ the FindLibraryByOffset() method to find the native shared library path
+ corresponding to a given file offset.
+
+ GetAbiList() and GetLibrariesList() can also be used to inspect
+ the state of the instance.
+ """
+ def __init__(self, apk_reader):
+ """Initialize instance.
+
+ Args:
+ apk_reader: An ApkReader instance corresponding to the input APK.
+ """
+ self._native_libs = []
+ for entry in apk_reader.ListEntries():
+ # Chromium uses so-called 'placeholder' native shared libraries
+ # that have a size of 0, and are only used to deal with bugs in
+ # older Android system releases (they are never loaded and cannot
+ # appear in stack traces). Ignore these here to avoid generating
+ # confusing results.
+ if entry.file_size == 0:
+ continue
+
+ # Only uncompressed libraries can appear in stack traces.
+ if entry.IsCompressed():
+ continue
+
+ # Only consider files within lib/ and with a filename ending with .so
+ # at the moment. NOTE: Do not require a 'lib' prefix, since that would
+ # prevent finding the 'crazy.libXXX.so' libraries used by Chromium.
+ if (not entry.filename.startswith('lib/') or
+ not entry.filename.endswith('.so')):
+ continue
+
+ lib_path = entry.filename
+
+ self._native_libs.append(
+ (lib_path, entry.file_offset, entry.file_offset + entry.file_size))
+
+ def IsEmpty(self):
+ """Return true iff the list is empty."""
+ return not bool(self._native_libs)
+
+ def GetLibraries(self):
+ """Return the list of all library paths in this instance."""
+ return sorted([x[0] for x in self._native_libs])
+
+ def GetDumpList(self):
+ """Retrieve full library map.
+
+ Returns:
+ A list of (lib_path, file_offset, file_size) tuples, sorted
+ in increasing |file_offset| values.
+ """
+ result = []
+ for entry in self._native_libs:
+ lib_path, file_start, file_end = entry
+ result.append((lib_path, file_start, file_end - file_start))
+
+ return sorted(result, lambda x, y: cmp(x[1], y[1]))
+
+ def FindLibraryByOffset(self, file_offset):
+ """Find the native library at a given file offset.
+
+ Args:
+ file_offset: File offset within the original APK.
+ Returns:
+ Returns a (lib_path, lib_offset) tuple on success, or (None, 0)
+ on failure. Note that lib_path will omit the 'lib/$ABI/' prefix,
+ lib_offset is the adjustment of file_offset within the library.
+ """
+ for lib_path, start_offset, end_offset in self._native_libs:
+ if file_offset >= start_offset and file_offset < end_offset:
+ return (lib_path, file_offset - start_offset)
+
+ return (None, 0)
+
+
+class ApkLibraryPathTranslator(object):
+ """Translates APK file paths + byte offsets into library path + offset.
+
+ The purpose of this class is to translate a native shared library path
+ that points to an APK into a new device-specific path that points to a
+ native shared library, as if it was installed there. E.g.:
+
+ ('/data/data/com.example.app-1/base.apk', 0x123be00)
+
+ would be translated into:
+
+ ('/data/data/com.example.app-1/base.apk!lib/libfoo.so', 0x3be00)
+
+ If the original APK (installed as base.apk) contains an uncompressed shared
+ library under lib/armeabi-v7a/libfoo.so at offset 0x120000.
+
+ Note that the virtual device path after the ! doesn't necessarily match
+ the path inside the .apk. This doesn't really matter for the rest of
+ the symbolization functions since only the file's base name can be used
+ to find the corresponding file on the host.
+
+ Usage is the following:
+
+ 1/ Create new instance.
+
+ 2/ Call AddHostApk() one or several times to add the host path
+ of an APK, its package name, and device-installed named.
+
+ 3/ Call TranslatePath() to translate a (path, offset) tuple corresponding
+ to an on-device APK, into the corresponding virtual device library
+ path and offset.
+ """
+
+ # Depending on the version of the system, a non-system APK might be installed
+ # on a path that looks like the following:
+ #
+ # * /data/..../<package_name>-<number>.apk, where <number> is used to
+ # distinguish several versions of the APK during package updates.
+ #
+ # * /data/..../<package_name>-<suffix>/base.apk, where <suffix> is a
+ # string of random ASCII characters following the dash after the
+ # package name. This serves as a way to distinguish the installation
+ # paths during package update, and randomize its final location
+ # (to prevent apps from hard-coding the paths to other apps).
+ #
+ # Note that the 'base.apk' name comes from the system.
+ #
+ # * /data/.../<package_name>-<suffix>/<split_name>.apk, where <suffix>
+ # is the same as above, and <split_name> is the name of am app bundle
+ # split APK.
+ #
+ # System APKs are installed on paths that look like /system/app/Foo.apk
+ # but this class ignores them intentionally.
+
+ # Compiler regular expression for the first format above.
+ _RE_APK_PATH_1 = re.compile(
+ r'/data/.*/(?P<package_name>[A-Za-z0-9_.]+)-(?P<version>[0-9]+)\.apk')
+
+ # Compiled regular expression for the second and third formats above.
+ _RE_APK_PATH_2 = re.compile(
+ r'/data/.*/(?P<package_name>[A-Za-z0-9_.]+)-(?P<suffix>[^/]+)/' +
+ r'(?P<apk_name>.+\.apk)')
+
+ def __init__(self):
+ """Initialize instance. Call AddHostApk() to add host apk file paths."""
+ self._path_map = {} # Maps (package_name, apk_name) to host-side APK path.
+ self._libs_map = {} # Maps APK host path to ApkNativeLibrariesMap instance.
+
+ def AddHostApk(self, package_name, native_libs, device_apk_name=None):
+ """Add a file path to the host APK search list.
+
+ Args:
+ package_name: Corresponding apk package name.
+ native_libs: ApkNativeLibraries instance for the corresponding APK.
+ device_apk_name: Optional expected name of the installed APK on the
+ device. This is only useful when symbolizing app bundle that run on
+ Android L+. I.e. it will be ignored in other cases.
+ """
+ if native_libs.IsEmpty():
+ logging.debug('Ignoring host APK without any uncompressed native ' +
+ 'libraries: %s', device_apk_name)
+ return
+
+ # If the APK name is not provided, use the default of 'base.apk'. This
+ # will be ignored if we find <package_name>-<number>.apk file paths
+ # in the input, but will work properly for Android L+, as long as we're
+ # not using Android app bundles.
+ device_apk_name = device_apk_name or 'base.apk'
+
+ key = "%s/%s" % (package_name, device_apk_name)
+ if key in self._libs_map:
+ raise KeyError('There is already an APK associated with (%s)' % key)
+
+ self._libs_map[key] = native_libs
+
+ @staticmethod
+ def _MatchApkDeviceInstallPath(apk_path):
+ """Check whether a given path matches an installed APK device file path.
+
+ Args:
+ apk_path: Device-specific file path.
+ Returns:
+ On success, a (package_name, apk_name) tuple. On failure, (None. None).
+ """
+ m = ApkLibraryPathTranslator._RE_APK_PATH_1.match(apk_path)
+ if m:
+ return (m.group('package_name'), 'base.apk')
+
+ m = ApkLibraryPathTranslator._RE_APK_PATH_2.match(apk_path)
+ if m:
+ return (m.group('package_name'), m.group('apk_name'))
+
+ return (None, None)
+
+ def TranslatePath(self, apk_path, apk_offset):
+ """Translate a potential apk file path + offset into library path + offset.
+
+ Args:
+ apk_path: Library or apk file path on the device (e.g.
+ '/data/data/com.example.app-XSAHKSJH/base.apk').
+ apk_offset: Byte offset within the library or apk.
+
+ Returns:
+ a new (lib_path, lib_offset) tuple. If |apk_path| points to an APK,
+ then this function searches inside the corresponding host-side APKs
+ (added with AddHostApk() above) for the corresponding uncompressed
+ native shared library at |apk_offset|, if found, this returns a new
+ device-specific path corresponding to a virtual installation of said
+ library with an adjusted offset.
+
+ Otherwise, just return the original (apk_path, apk_offset) values.
+ """
+ if not apk_path.endswith('.apk'):
+ return (apk_path, apk_offset)
+
+ apk_package, apk_name = self._MatchApkDeviceInstallPath(apk_path)
+ if not apk_package:
+ return (apk_path, apk_offset)
+
+ key = '%s/%s' % (apk_package, apk_name)
+ native_libs = self._libs_map.get(key)
+ if not native_libs:
+ logging.debug('Unknown %s package', key)
+ return (apk_path, apk_offset)
+
+ lib_name, new_offset = native_libs.FindLibraryByOffset(apk_offset)
+ if not lib_name:
+ logging.debug('Invalid offset in %s.apk package: %d', key, apk_offset)
+ return (apk_path, apk_offset)
+
+ lib_name = os.path.basename(lib_name)
+
+ # Some libraries are stored with a crazy. prefix inside the APK, this
+ # is done to prevent the PackageManager from extracting the libraries
+ # at installation time when running on pre Android M systems, where the
+ # system linker cannot load libraries directly from APKs.
+ crazy_prefix = 'crazy.'
+ if lib_name.startswith(crazy_prefix):
+ lib_name = lib_name[len(crazy_prefix):]
+
+ # Put this in a fictional lib sub-directory for good measure.
+ new_path = '%s!lib/%s' % (apk_path, lib_name)
+
+ return (new_path, new_offset)
diff --git a/deps/v8/build/android/pylib/symbols/apk_native_libs_unittest.py b/deps/v8/build/android/pylib/symbols/apk_native_libs_unittest.py
new file mode 100644
index 0000000000..416918d8a1
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/apk_native_libs_unittest.py
@@ -0,0 +1,396 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import unittest
+
+from pylib.symbols import apk_native_libs
+
+# Mock ELF-like data
+MOCK_ELF_DATA = '\x7fELFFFFFFFFFFFFFFFF'
+
+class MockApkZipInfo(object):
+ """A mock ApkZipInfo class, returned by MockApkReaderFactory instances."""
+ def __init__(self, filename, file_size, compress_size, file_offset,
+ file_data):
+ self.filename = filename
+ self.file_size = file_size
+ self.compress_size = compress_size
+ self.file_offset = file_offset
+ self._data = file_data
+
+ def __repr__(self):
+ """Convert to string for debugging."""
+ return 'MockApkZipInfo["%s",size=%d,compressed=%d,offset=%d]' % (
+ self.filename, self.file_size, self.compress_size, self.file_offset)
+
+ def IsCompressed(self):
+ """Returns True iff the entry is compressed."""
+ return self.file_size != self.compress_size
+
+ def IsElfFile(self):
+ """Returns True iff the entry is an ELF file."""
+ if not self._data or len(self._data) < 4:
+ return False
+
+ return self._data[0:4] == '\x7fELF'
+
+
+class MockApkReader(object):
+ """A mock ApkReader instance used during unit-testing.
+
+ Do not use directly, but use a MockApkReaderFactory context, as in:
+
+ with MockApkReaderFactory() as mock:
+ mock.AddTestEntry(file_path, file_size, compress_size, file_data)
+ ...
+
+ # Actually returns the mock instance.
+ apk_reader = apk_native_libs.ApkReader('/some/path.apk')
+ """
+ def __init__(self, apk_path='test.apk'):
+ """Initialize instance."""
+ self._entries = []
+ self._fake_offset = 0
+ self._path = apk_path
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *kwarg):
+ self.Close()
+ return
+
+ @property
+ def path(self):
+ return self._path
+
+ def AddTestEntry(self, filepath, file_size, compress_size, file_data):
+ """Add a new entry to the instance for unit-tests.
+
+ Do not call this directly, use the AddTestEntry() method on the parent
+ MockApkReaderFactory instance.
+
+ Args:
+ filepath: archive file path.
+ file_size: uncompressed file size in bytes.
+ compress_size: compressed size in bytes.
+ file_data: file data to be checked by IsElfFile()
+
+ Note that file_data can be None, or that its size can be actually
+ smaller than |compress_size| when used during unit-testing.
+ """
+ self._entries.append(MockApkZipInfo(filepath, file_size, compress_size,
+ self._fake_offset, file_data))
+ self._fake_offset += compress_size
+
+ def Close(self): # pylint: disable=no-self-use
+ """Close this reader instance."""
+ return
+
+ def ListEntries(self):
+ """Return a list of MockApkZipInfo instances for this input APK."""
+ return self._entries
+
+ def FindEntry(self, file_path):
+ """Find the MockApkZipInfo instance corresponds to a given file path."""
+ for entry in self._entries:
+ if entry.filename == file_path:
+ return entry
+ raise KeyError('Could not find mock zip archive member for: ' + file_path)
+
+
+class MockApkReaderTest(unittest.TestCase):
+
+ def testEmpty(self):
+ with MockApkReader() as reader:
+ entries = reader.ListEntries()
+ self.assertTrue(len(entries) == 0)
+ with self.assertRaises(KeyError):
+ reader.FindEntry('non-existent-entry.txt')
+
+ def testSingleEntry(self):
+ with MockApkReader() as reader:
+ reader.AddTestEntry('some-path/some-file', 20000, 12345, file_data=None)
+ entries = reader.ListEntries()
+ self.assertTrue(len(entries) == 1)
+ entry = entries[0]
+ self.assertEqual(entry.filename, 'some-path/some-file')
+ self.assertEqual(entry.file_size, 20000)
+ self.assertEqual(entry.compress_size, 12345)
+ self.assertTrue(entry.IsCompressed())
+
+ entry2 = reader.FindEntry('some-path/some-file')
+ self.assertEqual(entry, entry2)
+
+ def testMultipleEntries(self):
+ with MockApkReader() as reader:
+ _ENTRIES = {
+ 'foo.txt': (1024, 1024, 'FooFooFoo'),
+ 'lib/bar/libcode.so': (16000, 3240, 1024, '\x7fELFFFFFFFFFFFF'),
+ }
+ for path, props in _ENTRIES.iteritems():
+ reader.AddTestEntry(path, props[0], props[1], props[2])
+
+ entries = reader.ListEntries()
+ self.assertEqual(len(entries), len(_ENTRIES))
+ for path, props in _ENTRIES.iteritems():
+ entry = reader.FindEntry(path)
+ self.assertEqual(entry.filename, path)
+ self.assertEqual(entry.file_size, props[0])
+ self.assertEqual(entry.compress_size, props[1])
+
+
+class ApkNativeLibrariesTest(unittest.TestCase):
+
+ def setUp(self):
+ logging.getLogger().setLevel(logging.ERROR)
+
+ def testEmptyApk(self):
+ with MockApkReader() as reader:
+ libs_map = apk_native_libs.ApkNativeLibraries(reader)
+ self.assertTrue(libs_map.IsEmpty())
+ self.assertEqual(len(libs_map.GetLibraries()), 0)
+ lib_path, lib_offset = libs_map.FindLibraryByOffset(0)
+ self.assertIsNone(lib_path)
+ self.assertEqual(lib_offset, 0)
+
+ def testSimpleApk(self):
+ with MockApkReader() as reader:
+ _MOCK_ENTRIES = [
+ # Top-level library should be ignored.
+ ('libfoo.so', 1000, 1000, MOCK_ELF_DATA, False),
+ # Library not under lib/ should be ignored.
+ ('badlib/test-abi/libfoo2.so', 1001, 1001, MOCK_ELF_DATA, False),
+ # Library under lib/<abi>/ but without .so extension should be ignored.
+ ('lib/test-abi/libfoo4.so.1', 1003, 1003, MOCK_ELF_DATA, False),
+ # Library under lib/<abi>/ with .so suffix, but compressed -> ignored.
+ ('lib/test-abi/libfoo5.so', 1004, 1003, MOCK_ELF_DATA, False),
+ # First correct library
+ ('lib/test-abi/libgood1.so', 1005, 1005, MOCK_ELF_DATA, True),
+ # Second correct library: support sub-directories
+ ('lib/test-abi/subdir/libgood2.so', 1006, 1006, MOCK_ELF_DATA, True),
+ # Third correct library, no lib prefix required
+ ('lib/test-abi/crazy.libgood3.so', 1007, 1007, MOCK_ELF_DATA, True),
+ ]
+ file_offsets = []
+ prev_offset = 0
+ for ent in _MOCK_ENTRIES:
+ reader.AddTestEntry(ent[0], ent[1], ent[2], ent[3])
+ file_offsets.append(prev_offset)
+ prev_offset += ent[2]
+
+ libs_map = apk_native_libs.ApkNativeLibraries(reader)
+ self.assertFalse(libs_map.IsEmpty())
+ self.assertEqual(libs_map.GetLibraries(), [
+ 'lib/test-abi/crazy.libgood3.so',
+ 'lib/test-abi/libgood1.so',
+ 'lib/test-abi/subdir/libgood2.so',
+ ])
+
+ BIAS = 10
+ for mock_ent, file_offset in zip(_MOCK_ENTRIES, file_offsets):
+ if mock_ent[4]:
+ lib_path, lib_offset = libs_map.FindLibraryByOffset(
+ file_offset + BIAS)
+ self.assertEqual(lib_path, mock_ent[0])
+ self.assertEqual(lib_offset, BIAS)
+
+
+ def testMultiAbiApk(self):
+ with MockApkReader() as reader:
+ _MOCK_ENTRIES = [
+ ('lib/abi1/libfoo.so', 1000, 1000, MOCK_ELF_DATA),
+ ('lib/abi2/libfoo.so', 1000, 1000, MOCK_ELF_DATA),
+ ]
+ for ent in _MOCK_ENTRIES:
+ reader.AddTestEntry(ent[0], ent[1], ent[2], ent[3])
+
+ libs_map = apk_native_libs.ApkNativeLibraries(reader)
+ self.assertFalse(libs_map.IsEmpty())
+ self.assertEqual(libs_map.GetLibraries(), [
+ 'lib/abi1/libfoo.so', 'lib/abi2/libfoo.so'])
+
+ lib1_name, lib1_offset = libs_map.FindLibraryByOffset(10)
+ self.assertEqual(lib1_name, 'lib/abi1/libfoo.so')
+ self.assertEqual(lib1_offset, 10)
+
+ lib2_name, lib2_offset = libs_map.FindLibraryByOffset(1000)
+ self.assertEqual(lib2_name, 'lib/abi2/libfoo.so')
+ self.assertEqual(lib2_offset, 0)
+
+
+class MockApkNativeLibraries(apk_native_libs.ApkNativeLibraries):
+ """A mock ApkNativeLibraries instance that can be used as input to
+ ApkLibraryPathTranslator without creating an ApkReader instance.
+
+ Create a new instance, then call AddTestEntry or AddTestEntries
+ as many times as necessary, before using it as a regular
+ ApkNativeLibraries instance.
+ """
+ # pylint: disable=super-init-not-called
+ def __init__(self):
+ self._native_libs = []
+
+ # pylint: enable=super-init-not-called
+
+ def AddTestEntry(self, lib_path, file_offset, file_size):
+ """Add a new test entry.
+
+ Args:
+ entry: A tuple of (library-path, file-offset, file-size) values,
+ (e.g. ('lib/armeabi-v8a/libfoo.so', 0x10000, 0x2000)).
+ """
+ self._native_libs.append((lib_path, file_offset, file_offset + file_size))
+
+ def AddTestEntries(self, entries):
+ """Add a list of new test entries.
+
+ Args:
+ entries: A list of (library-path, file-offset, file-size) values.
+ """
+ for entry in entries:
+ self.AddTestEntry(entry[0], entry[1], entry[2])
+
+
+class MockApkNativeLibrariesTest(unittest.TestCase):
+
+ def testEmptyInstance(self):
+ mock = MockApkNativeLibraries()
+ self.assertTrue(mock.IsEmpty())
+ self.assertEqual(mock.GetLibraries(), [])
+ self.assertEqual(mock.GetDumpList(), [])
+
+ def testAddTestEntry(self):
+ mock = MockApkNativeLibraries()
+ mock.AddTestEntry('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000)
+ mock.AddTestEntry('lib/x86/libzoo.so', 0x10000, 0x10000)
+ mock.AddTestEntry('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000)
+ self.assertFalse(mock.IsEmpty())
+ self.assertEqual(mock.GetLibraries(), ['lib/armeabi-v7a/libbar.so',
+ 'lib/armeabi-v7a/libfoo.so',
+ 'lib/x86/libzoo.so'])
+ self.assertEqual(mock.GetDumpList(), [
+ ('lib/x86/libzoo.so', 0x10000, 0x10000),
+ ('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000),
+ ('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000),
+ ])
+
+ def testAddTestEntries(self):
+ mock = MockApkNativeLibraries()
+ mock.AddTestEntries([
+ ('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000),
+ ('lib/x86/libzoo.so', 0x10000, 0x10000),
+ ('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000),
+ ])
+ self.assertFalse(mock.IsEmpty())
+ self.assertEqual(mock.GetLibraries(), ['lib/armeabi-v7a/libbar.so',
+ 'lib/armeabi-v7a/libfoo.so',
+ 'lib/x86/libzoo.so'])
+ self.assertEqual(mock.GetDumpList(), [
+ ('lib/x86/libzoo.so', 0x10000, 0x10000),
+ ('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000),
+ ('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000),
+ ])
+
+
+class ApkLibraryPathTranslatorTest(unittest.TestCase):
+
+ def _CheckUntranslated(self, translator, path, offset):
+ """Check that a given (path, offset) is not modified by translation."""
+ self.assertEqual(translator.TranslatePath(path, offset), (path, offset))
+
+
+ def _CheckTranslated(self, translator, path, offset, new_path, new_offset):
+ """Check that (path, offset) is translated into (new_path, new_offset)."""
+ self.assertEqual(translator.TranslatePath(path, offset),
+ (new_path, new_offset))
+
+ def testEmptyInstance(self):
+ translator = apk_native_libs.ApkLibraryPathTranslator()
+ self._CheckUntranslated(
+ translator, '/data/data/com.example.app-1/base.apk', 0x123456)
+
+ def testSimpleApk(self):
+ mock_libs = MockApkNativeLibraries()
+ mock_libs.AddTestEntries([
+ ('lib/test-abi/libfoo.so', 200, 2000),
+ ('lib/test-abi/libbar.so', 3200, 3000),
+ ('lib/test-abi/crazy.libzoo.so', 6200, 2000),
+ ])
+ translator = apk_native_libs.ApkLibraryPathTranslator()
+ translator.AddHostApk('com.example.app', mock_libs)
+
+ # Offset is within the first uncompressed library
+ self._CheckTranslated(
+ translator,
+ '/data/data/com.example.app-9.apk', 757,
+ '/data/data/com.example.app-9.apk!lib/libfoo.so', 557)
+
+ # Offset is within the second compressed library.
+ self._CheckUntranslated(
+ translator,
+ '/data/data/com.example.app-9/base.apk', 2800)
+
+ # Offset is within the third uncompressed library.
+ self._CheckTranslated(
+ translator,
+ '/data/data/com.example.app-1/base.apk', 3628,
+ '/data/data/com.example.app-1/base.apk!lib/libbar.so', 428)
+
+ # Offset is within the fourth uncompressed library with crazy. prefix
+ self._CheckTranslated(
+ translator,
+ '/data/data/com.example.app-XX/base.apk', 6500,
+ '/data/data/com.example.app-XX/base.apk!lib/libzoo.so', 300)
+
+ # Out-of-bounds apk offset.
+ self._CheckUntranslated(
+ translator,
+ '/data/data/com.example.app-1/base.apk', 10000)
+
+ # Invalid package name.
+ self._CheckUntranslated(
+ translator, '/data/data/com.example2.app-1/base.apk', 757)
+
+ # Invalid apk name.
+ self._CheckUntranslated(
+ translator, '/data/data/com.example.app-2/not-base.apk', 100)
+
+ # Invalid file extensions.
+ self._CheckUntranslated(
+ translator, '/data/data/com.example.app-2/base', 100)
+
+ self._CheckUntranslated(
+ translator, '/data/data/com.example.app-2/base.apk.dex', 100)
+
+ def testBundleApks(self):
+ mock_libs1 = MockApkNativeLibraries()
+ mock_libs1.AddTestEntries([
+ ('lib/test-abi/libfoo.so', 200, 2000),
+ ('lib/test-abi/libbbar.so', 3200, 3000),
+ ])
+ mock_libs2 = MockApkNativeLibraries()
+ mock_libs2.AddTestEntries([
+ ('lib/test-abi/libzoo.so', 200, 2000),
+ ('lib/test-abi/libtool.so', 3000, 4000),
+ ])
+ translator = apk_native_libs.ApkLibraryPathTranslator()
+ translator.AddHostApk('com.example.app', mock_libs1, 'base-master.apk')
+ translator.AddHostApk('com.example.app', mock_libs2, 'feature-master.apk')
+
+ self._CheckTranslated(
+ translator,
+ '/data/app/com.example.app-XUIYIUW/base-master.apk', 757,
+ '/data/app/com.example.app-XUIYIUW/base-master.apk!lib/libfoo.so', 557)
+
+ self._CheckTranslated(
+ translator,
+ '/data/app/com.example.app-XUIYIUW/feature-master.apk', 3200,
+ '/data/app/com.example.app-XUIYIUW/feature-master.apk!lib/libtool.so',
+ 200)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/symbols/deobfuscator.py b/deps/v8/build/android/pylib/symbols/deobfuscator.py
new file mode 100644
index 0000000000..ac4ff7e4b4
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/deobfuscator.py
@@ -0,0 +1,165 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import subprocess
+import threading
+import time
+import uuid
+
+from devil.utils import reraiser_thread
+from pylib import constants
+
+
+_MINIUMUM_TIMEOUT = 3.0
+_PER_LINE_TIMEOUT = .002 # Should be able to process 500 lines per second.
+_PROCESS_START_TIMEOUT = 10.0
+
+
+class Deobfuscator(object):
+ def __init__(self, mapping_path):
+ script_path = os.path.join(
+ constants.GetOutDirectory(), 'bin', 'java_deobfuscate')
+ cmd = [script_path, mapping_path]
+ # Allow only one thread to call TransformLines() at a time.
+ self._lock = threading.Lock()
+ # Ensure that only one thread attempts to kill self._proc in Close().
+ self._close_lock = threading.Lock()
+ self._closed_called = False
+ # Assign to None so that attribute exists if Popen() throws.
+ self._proc = None
+ # Start process eagerly to hide start-up latency.
+ self._proc_start_time = time.time()
+ self._proc = subprocess.Popen(
+ cmd, bufsize=1, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+ close_fds=True)
+
+ def IsClosed(self):
+ return self._closed_called or self._proc.returncode is not None
+
+ def IsBusy(self):
+ return self._lock.locked()
+
+ def IsReady(self):
+ return not self.IsClosed() and not self.IsBusy()
+
+ def TransformLines(self, lines):
+ """Deobfuscates obfuscated names found in the given lines.
+
+ If anything goes wrong (process crashes, timeout, etc), returns |lines|.
+
+ Args:
+ lines: A list of strings without trailing newlines.
+
+ Returns:
+ A list of strings without trailing newlines.
+ """
+ if not lines:
+ return []
+
+ # Deobfuscated stacks contain more frames than obfuscated ones when method
+ # inlining occurs. To account for the extra output lines, keep reading until
+ # this eof_line token is reached.
+ eof_line = uuid.uuid4().hex
+ out_lines = []
+
+ def deobfuscate_reader():
+ while True:
+ line = self._proc.stdout.readline()
+ # Return an empty string at EOF (when stdin is closed).
+ if not line:
+ break
+ line = line[:-1]
+ if line == eof_line:
+ break
+ out_lines.append(line)
+
+ if self.IsBusy():
+ logging.warning('deobfuscator: Having to wait for Java deobfuscation.')
+
+ # Allow only one thread to operate at a time.
+ with self._lock:
+ if self.IsClosed():
+ if not self._closed_called:
+ logging.warning('deobfuscator: Process exited with code=%d.',
+ self._proc.returncode)
+ self.Close()
+ return lines
+
+ # TODO(agrieve): Can probably speed this up by only sending lines through
+ # that might contain an obfuscated name.
+ reader_thread = reraiser_thread.ReraiserThread(deobfuscate_reader)
+ reader_thread.start()
+
+ try:
+ self._proc.stdin.write('\n'.join(lines))
+ self._proc.stdin.write('\n{}\n'.format(eof_line))
+ self._proc.stdin.flush()
+ time_since_proc_start = time.time() - self._proc_start_time
+ timeout = (max(0, _PROCESS_START_TIMEOUT - time_since_proc_start) +
+ max(_MINIUMUM_TIMEOUT, len(lines) * _PER_LINE_TIMEOUT))
+ reader_thread.join(timeout)
+ if self.IsClosed():
+ logging.warning(
+ 'deobfuscator: Close() called by another thread during join().')
+ return lines
+ if reader_thread.is_alive():
+ logging.error('deobfuscator: Timed out.')
+ self.Close()
+ return lines
+ return out_lines
+ except IOError:
+ logging.exception('deobfuscator: Exception during java_deobfuscate')
+ self.Close()
+ return lines
+
+ def Close(self):
+ with self._close_lock:
+ needs_closing = not self.IsClosed()
+ self._closed_called = True
+
+ if needs_closing:
+ self._proc.stdin.close()
+ self._proc.kill()
+ self._proc.wait()
+
+ def __del__(self):
+ # self._proc is None when Popen() fails.
+ if not self._closed_called and self._proc:
+ logging.error('deobfuscator: Forgot to Close()')
+ self.Close()
+
+
+class DeobfuscatorPool(object):
+ # As of Sep 2017, each instance requires about 500MB of RAM, as measured by:
+ # /usr/bin/time -v out/Release/bin/java_deobfuscate \
+ # out/Release/apks/ChromePublic.apk.mapping
+ def __init__(self, mapping_path, pool_size=4):
+ self._mapping_path = mapping_path
+ self._pool = [Deobfuscator(mapping_path) for _ in xrange(pool_size)]
+ # Allow only one thread to select from the pool at a time.
+ self._lock = threading.Lock()
+
+ def TransformLines(self, lines):
+ with self._lock:
+ assert self._pool, 'TransformLines() called on a closed DeobfuscatorPool.'
+ # Restart any closed Deobfuscators.
+ for i, d in enumerate(self._pool):
+ if d.IsClosed():
+ logging.warning('deobfuscator: Restarting closed instance.')
+ self._pool[i] = Deobfuscator(self._mapping_path)
+
+ selected = next((x for x in self._pool if x.IsReady()), self._pool[0])
+ # Rotate the order so that next caller will not choose the same one.
+ self._pool.remove(selected)
+ self._pool.append(selected)
+
+ return selected.TransformLines(lines)
+
+ def Close(self):
+ with self._lock:
+ for d in self._pool:
+ d.Close()
+ self._pool = None
diff --git a/deps/v8/build/android/pylib/symbols/elf_symbolizer.py b/deps/v8/build/android/pylib/symbols/elf_symbolizer.py
new file mode 100644
index 0000000000..1f2f918255
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/elf_symbolizer.py
@@ -0,0 +1,487 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import datetime
+import logging
+import multiprocessing
+import os
+import posixpath
+import Queue
+import re
+import subprocess
+import sys
+import threading
+import time
+
+
+# addr2line builds a possibly infinite memory cache that can exhaust
+# the computer's memory if allowed to grow for too long. This constant
+# controls how many lookups we do before restarting the process. 4000
+# gives near peak performance without extreme memory usage.
+ADDR2LINE_RECYCLE_LIMIT = 4000
+
+
+ELF_MAGIC = '\x7f\x45\x4c\x46'
+
+
+def ContainsElfMagic(file_path):
+ if os.path.getsize(file_path) < 4:
+ return False
+ try:
+ with open(file_path, 'r') as f:
+ b = f.read(4)
+ return b == ELF_MAGIC
+ except IOError:
+ return False
+
+
+class ELFSymbolizer(object):
+ """An uber-fast (multiprocessing, pipelined and asynchronous) ELF symbolizer.
+
+ This class is a frontend for addr2line (part of GNU binutils), designed to
+ symbolize batches of large numbers of symbols for a given ELF file. It
+ supports sharding symbolization against many addr2line instances and
+ pipelining of multiple requests per each instance (in order to hide addr2line
+ internals and OS pipe latencies).
+
+ The interface exhibited by this class is a very simple asynchronous interface,
+ which is based on the following three methods:
+ - SymbolizeAsync(): used to request (enqueue) resolution of a given address.
+ - The |callback| method: used to communicated back the symbol information.
+ - Join(): called to conclude the batch to gather the last outstanding results.
+ In essence, before the Join method returns, this class will have issued as
+ many callbacks as the number of SymbolizeAsync() calls. In this regard, note
+ that due to multiprocess sharding, callbacks can be delivered out of order.
+
+ Some background about addr2line:
+ - it is invoked passing the elf path in the cmdline, piping the addresses in
+ its stdin and getting results on its stdout.
+ - it has pretty large response times for the first requests, but it
+ works very well in streaming mode once it has been warmed up.
+ - it doesn't scale by itself (on more cores). However, spawning multiple
+ instances at the same time on the same file is pretty efficient as they
+ keep hitting the pagecache and become mostly CPU bound.
+ - it might hang or crash, mostly for OOM. This class deals with both of these
+ problems.
+
+ Despite the "scary" imports and the multi* words above, (almost) no multi-
+ threading/processing is involved from the python viewpoint. Concurrency
+ here is achieved by spawning several addr2line subprocesses and handling their
+ output pipes asynchronously. Therefore, all the code here (with the exception
+ of the Queue instance in Addr2Line) should be free from mind-blowing
+ thread-safety concerns.
+
+ The multiprocess sharding works as follows:
+ The symbolizer tries to use the lowest number of addr2line instances as
+ possible (with respect of |max_concurrent_jobs|) and enqueue all the requests
+ in a single addr2line instance. For few symbols (i.e. dozens) sharding isn't
+ worth the startup cost.
+ The multiprocess logic kicks in as soon as the queues for the existing
+ instances grow. Specifically, once all the existing instances reach the
+ |max_queue_size| bound, a new addr2line instance is kicked in.
+ In the case of a very eager producer (i.e. all |max_concurrent_jobs| instances
+ have a backlog of |max_queue_size|), back-pressure is applied on the caller by
+ blocking the SymbolizeAsync method.
+
+ This module has been deliberately designed to be dependency free (w.r.t. of
+ other modules in this project), to allow easy reuse in external projects.
+ """
+
+ def __init__(self, elf_file_path, addr2line_path, callback, inlines=False,
+ max_concurrent_jobs=None, addr2line_timeout=30, max_queue_size=50,
+ source_root_path=None, strip_base_path=None):
+ """Args:
+ elf_file_path: path of the elf file to be symbolized.
+ addr2line_path: path of the toolchain's addr2line binary.
+ callback: a callback which will be invoked for each resolved symbol with
+ the two args (sym_info, callback_arg). The former is an instance of
+ |ELFSymbolInfo| and contains the symbol information. The latter is an
+ embedder-provided argument which is passed to SymbolizeAsync().
+ inlines: when True, the ELFSymbolInfo will contain also the details about
+ the outer inlining functions. When False, only the innermost function
+ will be provided.
+ max_concurrent_jobs: Max number of addr2line instances spawned.
+ Parallelize responsibly, addr2line is a memory and I/O monster.
+ max_queue_size: Max number of outstanding requests per addr2line instance.
+ addr2line_timeout: Max time (in seconds) to wait for a addr2line response.
+ After the timeout, the instance will be considered hung and respawned.
+ source_root_path: In some toolchains only the name of the source file is
+ is output, without any path information; disambiguation searches
+ through the source directory specified by |source_root_path| argument
+ for files whose name matches, adding the full path information to the
+ output. For example, if the toolchain outputs "unicode.cc" and there
+ is a file called "unicode.cc" located under |source_root_path|/foo,
+ the tool will replace "unicode.cc" with
+ "|source_root_path|/foo/unicode.cc". If there are multiple files with
+ the same name, disambiguation will fail because the tool cannot
+ determine which of the files was the source of the symbol.
+ strip_base_path: Rebases the symbols source paths onto |source_root_path|
+ (i.e replace |strip_base_path| with |source_root_path).
+ """
+ assert(os.path.isfile(addr2line_path)), 'Cannot find ' + addr2line_path
+ self.elf_file_path = elf_file_path
+ self.addr2line_path = addr2line_path
+ self.callback = callback
+ self.inlines = inlines
+ self.max_concurrent_jobs = (max_concurrent_jobs or
+ min(multiprocessing.cpu_count(), 4))
+ self.max_queue_size = max_queue_size
+ self.addr2line_timeout = addr2line_timeout
+ self.requests_counter = 0 # For generating monotonic request IDs.
+ self._a2l_instances = [] # Up to |max_concurrent_jobs| _Addr2Line inst.
+
+ # If necessary, create disambiguation lookup table
+ self.disambiguate = source_root_path is not None
+ self.disambiguation_table = {}
+ self.strip_base_path = strip_base_path
+ if self.disambiguate:
+ self.source_root_path = os.path.abspath(source_root_path)
+ self._CreateDisambiguationTable()
+
+ # Create one addr2line instance. More instances will be created on demand
+ # (up to |max_concurrent_jobs|) depending on the rate of the requests.
+ self._CreateNewA2LInstance()
+
+ def SymbolizeAsync(self, addr, callback_arg=None):
+ """Requests symbolization of a given address.
+
+ This method is not guaranteed to return immediately. It generally does, but
+ in some scenarios (e.g. all addr2line instances have full queues) it can
+ block to create back-pressure.
+
+ Args:
+ addr: address to symbolize.
+ callback_arg: optional argument which will be passed to the |callback|."""
+ assert isinstance(addr, int)
+
+ # Process all the symbols that have been resolved in the meanwhile.
+ # Essentially, this drains all the addr2line(s) out queues.
+ for a2l_to_purge in self._a2l_instances:
+ a2l_to_purge.ProcessAllResolvedSymbolsInQueue()
+ a2l_to_purge.RecycleIfNecessary()
+
+ # Find the best instance according to this logic:
+ # 1. Find an existing instance with the shortest queue.
+ # 2. If all of instances' queues are full, but there is room in the pool,
+ # (i.e. < |max_concurrent_jobs|) create a new instance.
+ # 3. If there were already |max_concurrent_jobs| instances and all of them
+ # had full queues, make back-pressure.
+
+ # 1.
+ def _SortByQueueSizeAndReqID(a2l):
+ return (a2l.queue_size, a2l.first_request_id)
+ a2l = min(self._a2l_instances, key=_SortByQueueSizeAndReqID)
+
+ # 2.
+ if (a2l.queue_size >= self.max_queue_size and
+ len(self._a2l_instances) < self.max_concurrent_jobs):
+ a2l = self._CreateNewA2LInstance()
+
+ # 3.
+ if a2l.queue_size >= self.max_queue_size:
+ a2l.WaitForNextSymbolInQueue()
+
+ a2l.EnqueueRequest(addr, callback_arg)
+
+ def WaitForIdle(self):
+ """Waits for all the outstanding requests to complete."""
+ for a2l in self._a2l_instances:
+ a2l.WaitForIdle()
+
+ def Join(self):
+ """Waits for all the outstanding requests to complete and terminates."""
+ for a2l in self._a2l_instances:
+ a2l.WaitForIdle()
+ a2l.Terminate()
+
+ def _CreateNewA2LInstance(self):
+ assert len(self._a2l_instances) < self.max_concurrent_jobs
+ a2l = ELFSymbolizer.Addr2Line(self)
+ self._a2l_instances.append(a2l)
+ return a2l
+
+ def _CreateDisambiguationTable(self):
+ """ Non-unique file names will result in None entries"""
+ start_time = time.time()
+ logging.info('Collecting information about available source files...')
+ self.disambiguation_table = {}
+
+ for root, _, filenames in os.walk(self.source_root_path):
+ for f in filenames:
+ self.disambiguation_table[f] = os.path.join(root, f) if (f not in
+ self.disambiguation_table) else None
+ logging.info('Finished collecting information about '
+ 'possible files (took %.1f s).',
+ (time.time() - start_time))
+
+
+ class Addr2Line(object):
+ """A python wrapper around an addr2line instance.
+
+ The communication with the addr2line process looks as follows:
+ [STDIN] [STDOUT] (from addr2line's viewpoint)
+ > f001111
+ > f002222
+ < Symbol::Name(foo, bar) for f001111
+ < /path/to/source/file.c:line_number
+ > f003333
+ < Symbol::Name2() for f002222
+ < /path/to/source/file.c:line_number
+ < Symbol::Name3() for f003333
+ < /path/to/source/file.c:line_number
+ """
+
+ SYM_ADDR_RE = re.compile(r'([^:]+):(\?|\d+).*')
+
+ def __init__(self, symbolizer):
+ self._symbolizer = symbolizer
+ self._lib_file_name = posixpath.basename(symbolizer.elf_file_path)
+
+ # The request queue (i.e. addresses pushed to addr2line's stdin and not
+ # yet retrieved on stdout)
+ self._request_queue = collections.deque()
+
+ # This is essentially len(self._request_queue). It has been optimized to a
+ # separate field because turned out to be a perf hot-spot.
+ self.queue_size = 0
+
+ # Keep track of the number of symbols a process has processed to
+ # avoid a single process growing too big and using all the memory.
+ self._processed_symbols_count = 0
+
+ # Objects required to handle the addr2line subprocess.
+ self._proc = None # Subprocess.Popen(...) instance.
+ self._thread = None # Threading.thread instance.
+ self._out_queue = None # Queue.Queue instance (for buffering a2l stdout).
+ self._RestartAddr2LineProcess()
+
+ def EnqueueRequest(self, addr, callback_arg):
+ """Pushes an address to addr2line's stdin (and keeps track of it)."""
+ self._symbolizer.requests_counter += 1 # For global "age" of requests.
+ req_idx = self._symbolizer.requests_counter
+ self._request_queue.append((addr, callback_arg, req_idx))
+ self.queue_size += 1
+ self._WriteToA2lStdin(addr)
+
+ def WaitForIdle(self):
+ """Waits until all the pending requests have been symbolized."""
+ while self.queue_size > 0:
+ self.WaitForNextSymbolInQueue()
+
+ def WaitForNextSymbolInQueue(self):
+ """Waits for the next pending request to be symbolized."""
+ if not self.queue_size:
+ return
+
+ # This outer loop guards against a2l hanging (detecting stdout timeout).
+ while True:
+ start_time = datetime.datetime.now()
+ timeout = datetime.timedelta(seconds=self._symbolizer.addr2line_timeout)
+
+ # The inner loop guards against a2l crashing (checking if it exited).
+ while datetime.datetime.now() - start_time < timeout:
+ # poll() returns !None if the process exited. a2l should never exit.
+ if self._proc.poll():
+ logging.warning('addr2line crashed, respawning (lib: %s).',
+ self._lib_file_name)
+ self._RestartAddr2LineProcess()
+ # TODO(primiano): the best thing to do in this case would be
+ # shrinking the pool size as, very likely, addr2line is crashed
+ # due to low memory (and the respawned one will die again soon).
+
+ try:
+ lines = self._out_queue.get(block=True, timeout=0.25)
+ except Queue.Empty:
+ # On timeout (1/4 s.) repeat the inner loop and check if either the
+ # addr2line process did crash or we waited its output for too long.
+ continue
+
+ # In nominal conditions, we get straight to this point.
+ self._ProcessSymbolOutput(lines)
+ return
+
+ # If this point is reached, we waited more than |addr2line_timeout|.
+ logging.warning('Hung addr2line process, respawning (lib: %s).',
+ self._lib_file_name)
+ self._RestartAddr2LineProcess()
+
+ def ProcessAllResolvedSymbolsInQueue(self):
+ """Consumes all the addr2line output lines produced (without blocking)."""
+ if not self.queue_size:
+ return
+ while True:
+ try:
+ lines = self._out_queue.get_nowait()
+ except Queue.Empty:
+ break
+ self._ProcessSymbolOutput(lines)
+
+ def RecycleIfNecessary(self):
+ """Restarts the process if it has been used for too long.
+
+ A long running addr2line process will consume excessive amounts
+ of memory without any gain in performance."""
+ if self._processed_symbols_count >= ADDR2LINE_RECYCLE_LIMIT:
+ self._RestartAddr2LineProcess()
+
+
+ def Terminate(self):
+ """Kills the underlying addr2line process.
+
+ The poller |_thread| will terminate as well due to the broken pipe."""
+ try:
+ self._proc.kill()
+ self._proc.communicate() # Essentially wait() without risking deadlock.
+ except Exception: # pylint: disable=broad-except
+ # An exception while terminating? How interesting.
+ pass
+ self._proc = None
+
+ def _WriteToA2lStdin(self, addr):
+ self._proc.stdin.write('%s\n' % hex(addr))
+ if self._symbolizer.inlines:
+ # In the case of inlines we output an extra blank line, which causes
+ # addr2line to emit a (??,??:0) tuple that we use as a boundary marker.
+ self._proc.stdin.write('\n')
+ self._proc.stdin.flush()
+
+ def _ProcessSymbolOutput(self, lines):
+ """Parses an addr2line symbol output and triggers the client callback."""
+ (_, callback_arg, _) = self._request_queue.popleft()
+ self.queue_size -= 1
+
+ innermost_sym_info = None
+ sym_info = None
+ for (line1, line2) in lines:
+ prev_sym_info = sym_info
+ name = line1 if not line1.startswith('?') else None
+ source_path = None
+ source_line = None
+ m = ELFSymbolizer.Addr2Line.SYM_ADDR_RE.match(line2)
+ if m:
+ if not m.group(1).startswith('?'):
+ source_path = m.group(1)
+ if not m.group(2).startswith('?'):
+ source_line = int(m.group(2))
+ else:
+ logging.warning('Got invalid symbol path from addr2line: %s', line2)
+
+ # In case disambiguation is on, and needed
+ was_ambiguous = False
+ disambiguated = False
+ if self._symbolizer.disambiguate:
+ if source_path and not posixpath.isabs(source_path):
+ path = self._symbolizer.disambiguation_table.get(source_path)
+ was_ambiguous = True
+ disambiguated = path is not None
+ source_path = path if disambiguated else source_path
+
+ # Use absolute paths (so that paths are consistent, as disambiguation
+ # uses absolute paths)
+ if source_path and not was_ambiguous:
+ source_path = os.path.abspath(source_path)
+
+ if source_path and self._symbolizer.strip_base_path:
+ # Strip the base path
+ source_path = re.sub('^' + self._symbolizer.strip_base_path,
+ self._symbolizer.source_root_path or '', source_path)
+
+ sym_info = ELFSymbolInfo(name, source_path, source_line, was_ambiguous,
+ disambiguated)
+ if prev_sym_info:
+ prev_sym_info.inlined_by = sym_info
+ if not innermost_sym_info:
+ innermost_sym_info = sym_info
+
+ self._processed_symbols_count += 1
+ self._symbolizer.callback(innermost_sym_info, callback_arg)
+
+ def _RestartAddr2LineProcess(self):
+ if self._proc:
+ self.Terminate()
+
+ # The only reason of existence of this Queue (and the corresponding
+ # Thread below) is the lack of a subprocess.stdout.poll_avail_lines().
+ # Essentially this is a pipe able to extract a couple of lines atomically.
+ self._out_queue = Queue.Queue()
+
+ # Start the underlying addr2line process in line buffered mode.
+
+ cmd = [self._symbolizer.addr2line_path, '--functions', '--demangle',
+ '--exe=' + self._symbolizer.elf_file_path]
+ if self._symbolizer.inlines:
+ cmd += ['--inlines']
+ self._proc = subprocess.Popen(cmd, bufsize=1, stdout=subprocess.PIPE,
+ stdin=subprocess.PIPE, stderr=sys.stderr, close_fds=True)
+
+ # Start the poller thread, which simply moves atomically the lines read
+ # from the addr2line's stdout to the |_out_queue|.
+ self._thread = threading.Thread(
+ target=ELFSymbolizer.Addr2Line.StdoutReaderThread,
+ args=(self._proc.stdout, self._out_queue, self._symbolizer.inlines))
+ self._thread.daemon = True # Don't prevent early process exit.
+ self._thread.start()
+
+ self._processed_symbols_count = 0
+
+ # Replay the pending requests on the new process (only for the case
+ # of a hung addr2line timing out during the game).
+ for (addr, _, _) in self._request_queue:
+ self._WriteToA2lStdin(addr)
+
+ @staticmethod
+ def StdoutReaderThread(process_pipe, queue, inlines):
+ """The poller thread fn, which moves the addr2line stdout to the |queue|.
+
+ This is the only piece of code not running on the main thread. It merely
+ writes to a Queue, which is thread-safe. In the case of inlines, it
+ detects the ??,??:0 marker and sends the lines atomically, such that the
+ main thread always receives all the lines corresponding to one symbol in
+ one shot."""
+ try:
+ lines_for_one_symbol = []
+ while True:
+ line1 = process_pipe.readline().rstrip('\r\n')
+ line2 = process_pipe.readline().rstrip('\r\n')
+ if not line1 or not line2:
+ break
+ inline_has_more_lines = inlines and (len(lines_for_one_symbol) == 0 or
+ (line1 != '??' and line2 != '??:0'))
+ if not inlines or inline_has_more_lines:
+ lines_for_one_symbol += [(line1, line2)]
+ if inline_has_more_lines:
+ continue
+ queue.put(lines_for_one_symbol)
+ lines_for_one_symbol = []
+ process_pipe.close()
+
+ # Every addr2line processes will die at some point, please die silently.
+ except (IOError, OSError):
+ pass
+
+ @property
+ def first_request_id(self):
+ """Returns the request_id of the oldest pending request in the queue."""
+ return self._request_queue[0][2] if self._request_queue else 0
+
+
+class ELFSymbolInfo(object):
+ """The result of the symbolization passed as first arg. of each callback."""
+
+ def __init__(self, name, source_path, source_line, was_ambiguous=False,
+ disambiguated=False):
+ """All the fields here can be None (if addr2line replies with '??')."""
+ self.name = name
+ self.source_path = source_path
+ self.source_line = source_line
+ # In the case of |inlines|=True, the |inlined_by| points to the outer
+ # function inlining the current one (and so on, to form a chain).
+ self.inlined_by = None
+ self.disambiguated = disambiguated
+ self.was_ambiguous = was_ambiguous
+
+ def __str__(self):
+ return '%s [%s:%d]' % (
+ self.name or '??', self.source_path or '??', self.source_line or 0)
diff --git a/deps/v8/build/android/pylib/symbols/elf_symbolizer_unittest.py b/deps/v8/build/android/pylib/symbols/elf_symbolizer_unittest.py
new file mode 100755
index 0000000000..765b5989cb
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/elf_symbolizer_unittest.py
@@ -0,0 +1,196 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import functools
+import logging
+import os
+import unittest
+
+from pylib.symbols import elf_symbolizer
+from pylib.symbols import mock_addr2line
+
+
+_MOCK_A2L_PATH = os.path.join(os.path.dirname(mock_addr2line.__file__),
+ 'mock_addr2line')
+_INCOMPLETE_MOCK_ADDR = 1024 * 1024
+_UNKNOWN_MOCK_ADDR = 2 * 1024 * 1024
+_INLINE_MOCK_ADDR = 3 * 1024 * 1024
+
+
+class ELFSymbolizerTest(unittest.TestCase):
+ def setUp(self):
+ self._callback = functools.partial(
+ ELFSymbolizerTest._SymbolizeCallback, self)
+ self._resolved_addresses = set()
+ # Mute warnings, we expect them due to the crash/hang tests.
+ logging.getLogger().setLevel(logging.ERROR)
+
+ def testParallelism1(self):
+ self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+
+ def testParallelism4(self):
+ self._RunTest(max_concurrent_jobs=4, num_symbols=100)
+
+ def testParallelism8(self):
+ self._RunTest(max_concurrent_jobs=8, num_symbols=100)
+
+ def testCrash(self):
+ os.environ['MOCK_A2L_CRASH_EVERY'] = '99'
+ self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+ os.environ['MOCK_A2L_CRASH_EVERY'] = '0'
+
+ def testHang(self):
+ os.environ['MOCK_A2L_HANG_EVERY'] = '99'
+ self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+ os.environ['MOCK_A2L_HANG_EVERY'] = '0'
+
+ def testInlines(self):
+ """Stimulate the inline processing logic."""
+ symbolizer = elf_symbolizer.ELFSymbolizer(
+ elf_file_path='/path/doesnt/matter/mock_lib1.so',
+ addr2line_path=_MOCK_A2L_PATH,
+ callback=self._callback,
+ inlines=True,
+ max_concurrent_jobs=4)
+
+ for addr in xrange(1000):
+ exp_inline = False
+ exp_unknown = False
+
+ # First 100 addresses with inlines.
+ if addr < 100:
+ addr += _INLINE_MOCK_ADDR
+ exp_inline = True
+
+ # Followed by 100 without inlines.
+ elif addr < 200:
+ pass
+
+ # Followed by 100 interleaved inlines and not inlines.
+ elif addr < 300:
+ if addr & 1:
+ addr += _INLINE_MOCK_ADDR
+ exp_inline = True
+
+ # Followed by 100 interleaved inlines and unknonwn.
+ elif addr < 400:
+ if addr & 1:
+ addr += _INLINE_MOCK_ADDR
+ exp_inline = True
+ else:
+ addr += _UNKNOWN_MOCK_ADDR
+ exp_unknown = True
+
+ exp_name = 'mock_sym_for_addr_%d' % addr if not exp_unknown else None
+ exp_source_path = 'mock_src/mock_lib1.so.c' if not exp_unknown else None
+ exp_source_line = addr if not exp_unknown else None
+ cb_arg = (addr, exp_name, exp_source_path, exp_source_line, exp_inline)
+ symbolizer.SymbolizeAsync(addr, cb_arg)
+
+ symbolizer.Join()
+
+ def testIncompleteSyminfo(self):
+ """Stimulate the symbol-not-resolved logic."""
+ symbolizer = elf_symbolizer.ELFSymbolizer(
+ elf_file_path='/path/doesnt/matter/mock_lib1.so',
+ addr2line_path=_MOCK_A2L_PATH,
+ callback=self._callback,
+ max_concurrent_jobs=1)
+
+ # Test symbols with valid name but incomplete path.
+ addr = _INCOMPLETE_MOCK_ADDR
+ exp_name = 'mock_sym_for_addr_%d' % addr
+ exp_source_path = None
+ exp_source_line = None
+ cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+ symbolizer.SymbolizeAsync(addr, cb_arg)
+
+ # Test symbols with no name or sym info.
+ addr = _UNKNOWN_MOCK_ADDR
+ exp_name = None
+ exp_source_path = None
+ exp_source_line = None
+ cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+ symbolizer.SymbolizeAsync(addr, cb_arg)
+
+ symbolizer.Join()
+
+ def testWaitForIdle(self):
+ symbolizer = elf_symbolizer.ELFSymbolizer(
+ elf_file_path='/path/doesnt/matter/mock_lib1.so',
+ addr2line_path=_MOCK_A2L_PATH,
+ callback=self._callback,
+ max_concurrent_jobs=1)
+
+ # Test symbols with valid name but incomplete path.
+ addr = _INCOMPLETE_MOCK_ADDR
+ exp_name = 'mock_sym_for_addr_%d' % addr
+ exp_source_path = None
+ exp_source_line = None
+ cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+ symbolizer.SymbolizeAsync(addr, cb_arg)
+ symbolizer.WaitForIdle()
+
+ # Test symbols with no name or sym info.
+ addr = _UNKNOWN_MOCK_ADDR
+ exp_name = None
+ exp_source_path = None
+ exp_source_line = None
+ cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+ symbolizer.SymbolizeAsync(addr, cb_arg)
+ symbolizer.Join()
+
+ def _RunTest(self, max_concurrent_jobs, num_symbols):
+ symbolizer = elf_symbolizer.ELFSymbolizer(
+ elf_file_path='/path/doesnt/matter/mock_lib1.so',
+ addr2line_path=_MOCK_A2L_PATH,
+ callback=self._callback,
+ max_concurrent_jobs=max_concurrent_jobs,
+ addr2line_timeout=0.5)
+
+ for addr in xrange(num_symbols):
+ exp_name = 'mock_sym_for_addr_%d' % addr
+ exp_source_path = 'mock_src/mock_lib1.so.c'
+ exp_source_line = addr
+ cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+ symbolizer.SymbolizeAsync(addr, cb_arg)
+
+ symbolizer.Join()
+
+ # Check that all the expected callbacks have been received.
+ for addr in xrange(num_symbols):
+ self.assertIn(addr, self._resolved_addresses)
+ self._resolved_addresses.remove(addr)
+
+ # Check for unexpected callbacks.
+ self.assertEqual(len(self._resolved_addresses), 0)
+
+ def _SymbolizeCallback(self, sym_info, cb_arg):
+ self.assertTrue(isinstance(sym_info, elf_symbolizer.ELFSymbolInfo))
+ self.assertTrue(isinstance(cb_arg, tuple))
+ self.assertEqual(len(cb_arg), 5)
+
+ # Unpack expectations from the callback extra argument.
+ (addr, exp_name, exp_source_path, exp_source_line, exp_inlines) = cb_arg
+ if exp_name is None:
+ self.assertIsNone(sym_info.name)
+ else:
+ self.assertTrue(sym_info.name.startswith(exp_name))
+ self.assertEqual(sym_info.source_path, exp_source_path)
+ self.assertEqual(sym_info.source_line, exp_source_line)
+
+ if exp_inlines:
+ self.assertEqual(sym_info.name, exp_name + '_inner')
+ self.assertEqual(sym_info.inlined_by.name, exp_name + '_middle')
+ self.assertEqual(sym_info.inlined_by.inlined_by.name,
+ exp_name + '_outer')
+
+ # Check against duplicate callbacks.
+ self.assertNotIn(addr, self._resolved_addresses)
+ self._resolved_addresses.add(addr)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/symbols/mock_addr2line/__init__.py b/deps/v8/build/android/pylib/symbols/mock_addr2line/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/mock_addr2line/__init__.py
diff --git a/deps/v8/build/android/pylib/symbols/mock_addr2line/mock_addr2line b/deps/v8/build/android/pylib/symbols/mock_addr2line/mock_addr2line
new file mode 100755
index 0000000000..cd58f56d57
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/mock_addr2line/mock_addr2line
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Simple mock for addr2line.
+
+Outputs mock symbol information, with each symbol being a function of the
+original address (so it is easy to double-check consistency in unittests).
+"""
+
+import optparse
+import os
+import posixpath
+import sys
+import time
+
+
+def main(argv):
+ parser = optparse.OptionParser()
+ parser.add_option('-e', '--exe', dest='exe') # Path of the debug-library.so.
+ # Silently swallow the other unnecessary arguments.
+ parser.add_option('-C', '--demangle', action='store_true')
+ parser.add_option('-f', '--functions', action='store_true')
+ parser.add_option('-i', '--inlines', action='store_true')
+ options, _ = parser.parse_args(argv[1:])
+ lib_file_name = posixpath.basename(options.exe)
+ processed_sym_count = 0
+ crash_every = int(os.environ.get('MOCK_A2L_CRASH_EVERY', 0))
+ hang_every = int(os.environ.get('MOCK_A2L_HANG_EVERY', 0))
+
+ while(True):
+ line = sys.stdin.readline().rstrip('\r')
+ if not line:
+ break
+
+ # An empty line should generate '??,??:0' (is used as marker for inlines).
+ if line == '\n':
+ print '??'
+ print '??:0'
+ sys.stdout.flush()
+ continue
+
+ addr = int(line, 16)
+ processed_sym_count += 1
+ if crash_every and processed_sym_count % crash_every == 0:
+ sys.exit(1)
+ if hang_every and processed_sym_count % hang_every == 0:
+ time.sleep(1)
+
+ # Addresses < 1M will return good mock symbol information.
+ if addr < 1024 * 1024:
+ print 'mock_sym_for_addr_%d' % addr
+ print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+
+ # Addresses 1M <= x < 2M will return symbols with a name but a missing path.
+ elif addr < 2 * 1024 * 1024:
+ print 'mock_sym_for_addr_%d' % addr
+ print '??:0'
+
+ # Addresses 2M <= x < 3M will return unknown symbol information.
+ elif addr < 3 * 1024 * 1024:
+ print '??'
+ print '??'
+
+ # Addresses 3M <= x < 4M will return inlines.
+ elif addr < 4 * 1024 * 1024:
+ print 'mock_sym_for_addr_%d_inner' % addr
+ print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+ print 'mock_sym_for_addr_%d_middle' % addr
+ print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+ print 'mock_sym_for_addr_%d_outer' % addr
+ print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+
+ sys.stdout.flush()
+
+
+if __name__ == '__main__':
+ main(sys.argv) \ No newline at end of file
diff --git a/deps/v8/build/android/pylib/symbols/stack_symbolizer.py b/deps/v8/build/android/pylib/symbols/stack_symbolizer.py
new file mode 100644
index 0000000000..123b726130
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/stack_symbolizer.py
@@ -0,0 +1,81 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import re
+import tempfile
+import time
+
+from devil.utils import cmd_helper
+from pylib import constants
+
+_STACK_TOOL = os.path.join(os.path.dirname(__file__), '..', '..', '..', '..',
+ 'third_party', 'android_platform', 'development',
+ 'scripts', 'stack')
+ABI_REG = re.compile('ABI: \'(.+?)\'')
+
+
+def _DeviceAbiToArch(device_abi):
+ # The order of this list is significant to find the more specific match
+ # (e.g., arm64) before the less specific (e.g., arm).
+ arches = ['arm64', 'arm', 'x86_64', 'x86_64', 'x86', 'mips']
+ for arch in arches:
+ if arch in device_abi:
+ return arch
+ raise RuntimeError('Unknown device ABI: %s' % device_abi)
+
+
+class Symbolizer(object):
+ """A helper class to symbolize stack."""
+
+ def __init__(self, apk_under_test=None):
+ self._apk_under_test = apk_under_test
+ self._time_spent_symbolizing = 0
+
+
+ def __del__(self):
+ self.CleanUp()
+
+
+ def CleanUp(self):
+ """Clean up the temporary directory of apk libs."""
+ if self._time_spent_symbolizing > 0:
+ logging.info(
+ 'Total time spent symbolizing: %.2fs', self._time_spent_symbolizing)
+
+
+ def ExtractAndResolveNativeStackTraces(self, data_to_symbolize,
+ device_abi, include_stack=True):
+ """Run the stack tool for given input.
+
+ Args:
+ data_to_symbolize: a list of strings to symbolize.
+ include_stack: boolean whether to include stack data in output.
+ device_abi: the default ABI of the device which generated the tombstone.
+
+ Yields:
+ A string for each line of resolved stack output.
+ """
+ arch = _DeviceAbiToArch(device_abi)
+ if not arch:
+ logging.warning('No device_abi can be found.')
+ return
+
+ cmd = [_STACK_TOOL, '--arch', arch, '--output-directory',
+ constants.GetOutDirectory(), '--more-info']
+ env = dict(os.environ)
+ env['PYTHONDONTWRITEBYTECODE'] = '1'
+ with tempfile.NamedTemporaryFile() as f:
+ f.write('\n'.join(data_to_symbolize))
+ f.flush()
+ start = time.time()
+ try:
+ _, output = cmd_helper.GetCmdStatusAndOutput(cmd + [f.name], env=env)
+ finally:
+ self._time_spent_symbolizing += time.time() - start
+ for line in output.splitlines():
+ if not include_stack and 'Stack Data:' in line:
+ break
+ yield line
diff --git a/deps/v8/build/android/pylib/symbols/symbol_utils.py b/deps/v8/build/android/pylib/symbols/symbol_utils.py
new file mode 100644
index 0000000000..e4e3faac80
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/symbol_utils.py
@@ -0,0 +1,812 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import bisect
+import collections
+import logging
+import os
+import re
+
+from pylib.constants import host_paths
+from pylib.symbols import elf_symbolizer
+
+
+def _AndroidAbiToCpuArch(android_abi):
+ """Return the Chromium CPU architecture name for a given Android ABI."""
+ _ARCH_MAP = {
+ 'armeabi': 'arm',
+ 'armeabi-v7a': 'arm',
+ 'arm64-v8a': 'arm64',
+ 'x86_64': 'x64',
+ }
+ return _ARCH_MAP.get(android_abi, android_abi)
+
+
+def _HexAddressRegexpFor(android_abi):
+ """Return a regexp matching hexadecimal addresses for a given Android ABI."""
+ if android_abi in ['x86_64', 'arm64-v8a', 'mips64']:
+ width = 16
+ else:
+ width = 8
+ return '[0-9a-f]{%d}' % width
+
+
+class HostLibraryFinder(object):
+ """Translate device library path to matching host unstripped library path.
+
+ Usage is the following:
+ 1) Create instance.
+ 2) Call AddSearchDir() once or more times to add host directory path to
+ look for unstripped native libraries.
+ 3) Call Find(device_libpath) repeatedly to translate a device-specific
+ library path into the corresponding host path to the unstripped
+ version.
+ """
+ def __init__(self):
+ """Initialize instance."""
+ self._search_dirs = []
+ self._lib_map = {} # Map of library name to host file paths.
+
+ def AddSearchDir(self, lib_dir):
+ """Add a directory to the search path for host native shared libraries.
+
+ Args:
+ lib_dir: host path containing native libraries.
+ """
+ if not os.path.exists(lib_dir):
+ logging.warning('Ignoring missing host library directory: %s', lib_dir)
+ return
+ if not os.path.isdir(lib_dir):
+ logging.warning('Ignoring invalid host library directory: %s', lib_dir)
+ return
+ self._search_dirs.append(lib_dir)
+ self._lib_map = {} # Reset the map.
+
+ def Find(self, device_libpath):
+ """Find the host file path matching a specific device library path.
+
+ Args:
+ device_libpath: device-specific file path to library or executable.
+ Returns:
+ host file path to the unstripped version of the library, or None.
+ """
+ host_lib_path = None
+ lib_name = os.path.basename(device_libpath)
+ host_lib_path = self._lib_map.get(lib_name)
+ if not host_lib_path:
+ for search_dir in self._search_dirs:
+ lib_path = os.path.join(search_dir, lib_name)
+ if os.path.exists(lib_path):
+ host_lib_path = lib_path
+ break
+
+ if not host_lib_path:
+ logging.debug('Could not find host library for: %s', lib_name)
+ self._lib_map[lib_name] = host_lib_path
+
+ return host_lib_path
+
+
+
+class SymbolResolver(object):
+ """A base class for objets that can symbolize library (path, offset)
+ pairs into symbol information strings. Usage is the following:
+
+ 1) Create new instance (by calling the constructor of a derived
+ class, since this is only the base one).
+
+ 2) Call SetAndroidAbi() before any call to FindSymbolInfo() in order
+ to set the Android CPU ABI used for symbolization.
+
+ 3) Before the first call to FindSymbolInfo(), one can call
+ AddLibraryOffset(), or AddLibraryOffsets() to record a set of offsets
+ that you will want to symbolize later through FindSymbolInfo(). Doing
+ so allows some SymbolResolver derived classes to work faster (e.g. the
+ one that invokes the 'addr2line' program, since the latter works faster
+ if the offsets provided as inputs are sorted in increasing order).
+
+ 3) Call FindSymbolInfo(path, offset) to return the corresponding
+ symbol information string, or None if this doesn't correspond
+ to anything the instance can handle.
+
+ Note that whether the path is specific to the device or to the
+ host depends on the derived class implementation.
+ """
+ def __init__(self):
+ self._android_abi = None
+ self._lib_offsets_map = collections.defaultdict(set)
+
+ def SetAndroidAbi(self, android_abi):
+ """Set the Android ABI value for this instance.
+
+ Calling this function before FindSymbolInfo() is required by some
+ derived class implementations.
+
+ Args:
+ android_abi: Native Android CPU ABI name (e.g. 'armeabi-v7a').
+ Raises:
+ Exception if the ABI was already set with a different value.
+ """
+ if self._android_abi and self._android_abi != android_abi:
+ raise Exception('Cannot reset Android ABI to new value %s, already set '
+ 'to %s' % (android_abi, self._android_abi))
+
+ self._android_abi = android_abi
+
+ def AddLibraryOffset(self, lib_path, offset):
+ """Associate a single offset to a given device library.
+
+ This must be called before FindSymbolInfo(), otherwise its input arguments
+ will be ignored.
+
+ Args:
+ lib_path: A library path.
+ offset: An integer offset within the corresponding library that will be
+ symbolized by future calls to FindSymbolInfo.
+ """
+ self._lib_offsets_map[lib_path].add(offset)
+
+ def AddLibraryOffsets(self, lib_path, lib_offsets):
+ """Associate a set of wanted offsets to a given device library.
+
+ This must be called before FindSymbolInfo(), otherwise its input arguments
+ will be ignored.
+
+ Args:
+ lib_path: A library path.
+ lib_offsets: An iterable of integer offsets within the corresponding
+ library that will be symbolized by future calls to FindSymbolInfo.
+ """
+ self._lib_offsets_map[lib_path].update(lib_offsets)
+
+ # pylint: disable=unused-argument,no-self-use
+ def FindSymbolInfo(self, lib_path, lib_offset):
+ """Symbolize a device library path and offset.
+
+ Args:
+ lib_path: Library path (device or host specific, depending on the
+ derived class implementation).
+ lib_offset: Integer offset within the library.
+ Returns:
+ Corresponding symbol information string, or None.
+ """
+ # The base implementation cannot symbolize anything.
+ return None
+ # pylint: enable=unused-argument,no-self-use
+
+
+class ElfSymbolResolver(SymbolResolver):
+ """A SymbolResolver that can symbolize host path + offset values using
+ an elf_symbolizer.ELFSymbolizer instance.
+ """
+ def __init__(self, addr2line_path_for_tests=None):
+ super(ElfSymbolResolver, self).__init__()
+ self._addr2line_path = addr2line_path_for_tests
+
+ # Used to cache one ELFSymbolizer instance per library path.
+ self._elf_symbolizer_cache = {}
+
+ # Used to cache FindSymbolInfo() results. Maps host library paths
+ # to (offset -> symbol info string) dictionaries.
+ self._symbol_info_cache = collections.defaultdict(dict)
+ self._allow_symbolizer = True
+
+ def _CreateSymbolizerFor(self, host_path):
+ """Create the ELFSymbolizer instance associated with a given lib path."""
+ addr2line_path = self._addr2line_path
+ if not addr2line_path:
+ if not self._android_abi:
+ raise Exception(
+ 'Android CPU ABI must be set before calling FindSymbolInfo!')
+
+ cpu_arch = _AndroidAbiToCpuArch(self._android_abi)
+ self._addr2line_path = host_paths.ToolPath('addr2line', cpu_arch)
+
+ return elf_symbolizer.ELFSymbolizer(
+ elf_file_path=host_path, addr2line_path=self._addr2line_path,
+ callback=ElfSymbolResolver._Callback, inlines=True)
+
+ def DisallowSymbolizerForTesting(self):
+ """Disallow FindSymbolInfo() from using a symbolizer.
+
+ This is used during unit-testing to ensure that the offsets that were
+ recorded via AddLibraryOffset()/AddLibraryOffsets() are properly
+ symbolized, but not anything else.
+ """
+ self._allow_symbolizer = False
+
+ def FindSymbolInfo(self, host_path, offset):
+ """Override SymbolResolver.FindSymbolInfo.
+
+ Args:
+ host_path: Host-specific path to the native shared library.
+ offset: Integer offset within the native library.
+ Returns:
+ A symbol info string, or None.
+ """
+ offset_map = self._symbol_info_cache[host_path]
+ symbol_info = offset_map.get(offset)
+ if symbol_info:
+ return symbol_info
+
+ # Create symbolizer on demand.
+ symbolizer = self._elf_symbolizer_cache.get(host_path)
+ if not symbolizer:
+ symbolizer = self._CreateSymbolizerFor(host_path)
+ self._elf_symbolizer_cache[host_path] = symbolizer
+
+ # If there are pre-recorded offsets for this path, symbolize them now.
+ offsets = self._lib_offsets_map.get(host_path)
+ if offsets:
+ offset_map = {}
+ for pre_offset in offsets:
+ symbolizer.SymbolizeAsync(
+ pre_offset, callback_arg=(offset_map, pre_offset))
+ symbolizer.WaitForIdle()
+ self._symbol_info_cache[host_path] = offset_map
+
+ symbol_info = offset_map.get(offset)
+ if symbol_info:
+ return symbol_info
+
+ if not self._allow_symbolizer:
+ return None
+
+ # Symbolize single offset. Slower if addresses are not provided in
+ # increasing order to addr2line.
+ symbolizer.SymbolizeAsync(offset,
+ callback_arg=(offset_map, offset))
+ symbolizer.WaitForIdle()
+ return offset_map.get(offset)
+
+ @staticmethod
+ def _Callback(sym_info, callback_arg):
+ offset_map, offset = callback_arg
+ offset_map[offset] = str(sym_info)
+
+
+class DeviceSymbolResolver(SymbolResolver):
+ """A SymbolResolver instance that accepts device-specific path.
+
+ Usage is the following:
+ 1) Create new instance, passing a parent SymbolResolver instance that
+ accepts host-specific paths, and a HostLibraryFinder instance.
+
+ 2) Optional: call AddApkOffsets() to add offsets from within an APK
+ that contains uncompressed native shared libraries.
+
+ 3) Use it as any SymbolResolver instance.
+ """
+ def __init__(self, host_resolver, host_lib_finder):
+ """Initialize instance.
+
+ Args:
+ host_resolver: A parent SymbolResolver instance that will be used
+ to resolve symbols from host library paths.
+ host_lib_finder: A HostLibraryFinder instance used to locate
+ unstripped libraries on the host.
+ """
+ super(DeviceSymbolResolver, self).__init__()
+ self._host_lib_finder = host_lib_finder
+ self._bad_device_lib_paths = set()
+ self._host_resolver = host_resolver
+
+ def SetAndroidAbi(self, android_abi):
+ super(DeviceSymbolResolver, self).SetAndroidAbi(android_abi)
+ self._host_resolver.SetAndroidAbi(android_abi)
+
+ def AddLibraryOffsets(self, device_lib_path, lib_offsets):
+ """Associate a set of wanted offsets to a given device library.
+
+ This must be called before FindSymbolInfo(), otherwise its input arguments
+ will be ignored.
+
+ Args:
+ device_lib_path: A device-specific library path.
+ lib_offsets: An iterable of integer offsets within the corresponding
+ library that will be symbolized by future calls to FindSymbolInfo.
+ want to symbolize.
+ """
+ if device_lib_path in self._bad_device_lib_paths:
+ return
+
+ host_lib_path = self._host_lib_finder.Find(device_lib_path)
+ if not host_lib_path:
+ # NOTE: self._bad_device_lib_paths is only used to only print this
+ # warning once per bad library.
+ logging.warning('Could not find host library matching device path: %s',
+ device_lib_path)
+ self._bad_device_lib_paths.add(device_lib_path)
+ return
+
+ self._host_resolver.AddLibraryOffsets(host_lib_path, lib_offsets)
+
+ def AddApkOffsets(self, device_apk_path, apk_offsets, apk_translator):
+ """Associate a set of wanted offsets to a given device APK path.
+
+ This converts the APK-relative offsets into offsets relative to the
+ uncompressed libraries it contains, then calls AddLibraryOffsets()
+ for each one of the libraries.
+
+ Must be called before FindSymbolInfo() as well, otherwise input arguments
+ will be ignored.
+
+ Args:
+ device_apk_path: Device-specific APK path.
+ apk_offsets: Iterable of offsets within the APK file.
+ apk_translator: An ApkLibraryPathTranslator instance used to extract
+ library paths from the APK.
+ """
+ libraries_map = collections.defaultdict(set)
+ for offset in apk_offsets:
+ lib_path, lib_offset = apk_translator.TranslatePath(device_apk_path,
+ offset)
+ libraries_map[lib_path].add(lib_offset)
+
+ for lib_path, lib_offsets in libraries_map.iteritems():
+ self.AddLibraryOffsets(lib_path, lib_offsets)
+
+ def FindSymbolInfo(self, device_path, offset):
+ """Overrides SymbolResolver.FindSymbolInfo.
+
+ Args:
+ device_path: Device-specific library path (e.g.
+ '/data/app/com.example.app-1/lib/x86/libfoo.so')
+ offset: Offset in device library path.
+ Returns:
+ Corresponding symbol information string, or None.
+ """
+ host_path = self._host_lib_finder.Find(device_path)
+ if not host_path:
+ return None
+
+ return self._host_resolver.FindSymbolInfo(host_path, offset)
+
+
+class MemoryMap(object):
+ """Models the memory map of a given process. Usage is:
+
+ 1) Create new instance, passing the Android ABI.
+
+ 2) Call TranslateLine() whenever you want to detect and translate any
+ memory map input line.
+
+ 3) Otherwise, it is possible to parse the whole memory map input with
+ ParseLines(), then call FindSectionForAddress() repeatedly in order
+ to translate a memory address into the corresponding mapping and
+ file information tuple (e.g. to symbolize stack entries).
+ """
+
+ # A named tuple describing interesting memory map line items.
+ # Fields:
+ # addr_start: Mapping start address in memory.
+ # file_offset: Corresponding file offset.
+ # file_size: Corresponding mapping size in bytes.
+ # file_path: Input file path.
+ # match: Corresponding regular expression match object.
+ LineTuple = collections.namedtuple('MemoryMapLineTuple',
+ 'addr_start,file_offset,file_size,'
+ 'file_path, match')
+
+ # A name tuple describing a memory map section.
+ # Fields:
+ # address: Memory address.
+ # size: Size in bytes in memory
+ # offset: Starting file offset.
+ # path: Input file path.
+ SectionTuple = collections.namedtuple('MemoryMapSection',
+ 'address,size,offset,path')
+
+ def __init__(self, android_abi):
+ """Initializes instance.
+
+ Args:
+ android_abi: Android CPU ABI name (e.g. 'armeabi-v7a')
+ """
+ hex_addr = _HexAddressRegexpFor(android_abi)
+
+ # pylint: disable=line-too-long
+ # A regular expression used to match memory map entries which look like:
+ # b278c000-b2790fff r-- 4fda000 5000 /data/app/com.google.android.apps.chrome-2/base.apk
+ # pylint: enable=line-too-long
+ self._re_map_section = re.compile(
+ r'\s*(?P<addr_start>' + hex_addr + r')-(?P<addr_end>' + hex_addr + ')' +
+ r'\s+' +
+ r'(?P<perm>...)\s+' +
+ r'(?P<file_offset>[0-9a-f]+)\s+' +
+ r'(?P<file_size>[0-9a-f]+)\s*' +
+ r'(?P<file_path>[^ \t]+)?')
+
+ self._addr_map = [] # Sorted list of (address, size, path, offset) tuples.
+ self._sorted_addresses = [] # Sorted list of address fields in _addr_map.
+ self._in_section = False
+
+ def TranslateLine(self, line, apk_path_translator):
+ """Try to translate a memory map input line, if detected.
+
+ This only takes care of converting mapped APK file path and offsets
+ into a corresponding uncompressed native library file path + new offsets,
+ e.g. '..... <offset> <size> /data/.../base.apk' gets
+ translated into '.... <new-offset> <size> /data/.../base.apk!lib/libfoo.so'
+
+ This function should always work, even if ParseLines() was not called
+ previously.
+
+ Args:
+ line: Input memory map / tombstone line.
+ apk_translator: An ApkLibraryPathTranslator instance, used to map
+ APK offsets into uncompressed native libraries + new offsets.
+ Returns:
+ Translated memory map line, if relevant, or unchanged input line
+ otherwise.
+ """
+ t = self._ParseLine(line.rstrip())
+ if not t:
+ return line
+
+ new_path, new_offset = apk_path_translator.TranslatePath(
+ t.file_path, t.file_offset)
+
+ if new_path == t.file_path:
+ return line
+
+ pos = t.match.start('file_path')
+ return '%s%s (offset 0x%x)%s' % (line[0:pos], new_path, new_offset,
+ line[t.match.end('file_path'):])
+
+ def ParseLines(self, input_lines, in_section=False):
+ """Parse a list of input lines and extract the APK memory map out of it.
+
+ Args:
+ input_lines: list, or iterable, of input lines.
+ in_section: Optional. If true, considers that the input lines are
+ already part of the memory map. Otherwise, wait until the start of
+ the section appears in the input before trying to record data.
+ Returns:
+ True iff APK-related memory map entries were found. False otherwise.
+ """
+ addr_list = [] # list of (address, size, file_path, file_offset) tuples.
+ self._in_section = in_section
+ for line in input_lines:
+ t = self._ParseLine(line.rstrip())
+ if not t:
+ continue
+
+ addr_list.append(t)
+
+ self._addr_map = sorted(addr_list,
+ lambda x, y: cmp(x.addr_start, y.addr_start))
+ self._sorted_addresses = [e.addr_start for e in self._addr_map]
+ return bool(self._addr_map)
+
+ def _ParseLine(self, line):
+ """Used internally to recognized memory map input lines.
+
+ Args:
+ line: Input logcat or tomstone line.
+ Returns:
+ A LineTuple instance on success, or None on failure.
+ """
+ if not self._in_section:
+ self._in_section = line.startswith('memory map:')
+ return None
+
+ m = self._re_map_section.match(line)
+ if not m:
+ self._in_section = False # End of memory map section
+ return None
+
+ # Only accept .apk and .so files that are not from the system partitions.
+ file_path = m.group('file_path')
+ if not file_path:
+ return None
+
+ if file_path.startswith('/system') or file_path.startswith('/vendor'):
+ return None
+
+ if not (file_path.endswith('.apk') or file_path.endswith('.so')):
+ return None
+
+ addr_start = int(m.group('addr_start'), 16)
+ file_offset = int(m.group('file_offset'), 16)
+ file_size = int(m.group('file_size'), 16)
+
+ return self.LineTuple(addr_start, file_offset, file_size, file_path, m)
+
+ def Dump(self):
+ """Print memory map for debugging."""
+ print 'MEMORY MAP ['
+ for t in self._addr_map:
+ print '[%08x-%08x %08x %08x %s]' % (
+ t.addr_start, t.addr_start + t.file_size, t.file_size, t.file_offset,
+ t.file_path)
+ print '] MEMORY MAP'
+
+ def FindSectionForAddress(self, addr):
+ """Find the map section corresponding to a specific memory address.
+
+ Call this method only after using ParseLines() was called to extract
+ relevant information from the memory map.
+
+ Args:
+ addr: Memory address
+ Returns:
+ A SectionTuple instance on success, or None on failure.
+ """
+ pos = bisect.bisect_right(self._sorted_addresses, addr)
+ if pos > 0:
+ # All values in [0,pos) are <= addr, just ensure that the last
+ # one contains the address as well.
+ entry = self._addr_map[pos - 1]
+ if entry.addr_start + entry.file_size > addr:
+ return self.SectionTuple(entry.addr_start, entry.file_size,
+ entry.file_offset, entry.file_path)
+ return None
+
+
+class BacktraceTranslator(object):
+ """Translates backtrace-related lines in a tombstone or crash report.
+
+ Usage is the following:
+ 1) Create new instance with appropriate arguments.
+ 2) If the tombstone / logcat input is available, one can call
+ FindLibraryOffsets() in order to detect which library offsets
+ will need to be symbolized during a future parse. Doing so helps
+ speed up the ELF symbolizer.
+ 3) For each tombstone/logcat input line, call TranslateLine() to
+ try to detect and symbolize backtrace lines.
+ """
+
+ # A named tuple for relevant input backtrace lines.
+ # Fields:
+ # rel_pc: Instruction pointer, relative to offset in library start.
+ # location: Library or APK file path.
+ # offset: Load base of executable code in library or apk file path.
+ # match: The corresponding regular expression match object.
+ # Note:
+ # The actual instruction pointer always matches the position at
+ # |offset + rel_pc| in |location|.
+ LineTuple = collections.namedtuple('BacktraceLineTuple',
+ 'rel_pc,location,offset,match')
+
+ def __init__(self, android_abi, apk_translator):
+ """Initialize instance.
+
+ Args:
+ android_abi: Android CPU ABI name (e.g. 'armeabi-v7a').
+ apk_translator: ApkLibraryPathTranslator instance used to convert
+ mapped APK file offsets into uncompressed library file paths with
+ new offsets.
+ """
+ hex_addr = _HexAddressRegexpFor(android_abi)
+
+ # A regular expression used to match backtrace lines.
+ self._re_backtrace = re.compile(
+ r'.*#(?P<frame>[0-9]{2})\s+' +
+ r'(..)\s+' +
+ r'(?P<rel_pc>' + hex_addr + r')\s+' +
+ r'(?P<location>[^ \t]+)' +
+ r'(\s+\(offset 0x(?P<offset>[0-9a-f]+)\))?')
+
+ # In certain cases, offset will be provided as <location>+0x<offset>
+ # instead of <location> (offset 0x<offset>). This is a regexp to detect
+ # this.
+ self._re_location_offset = re.compile(
+ r'.*\+0x(?P<offset>[0-9a-f]+)$')
+
+ self._apk_translator = apk_translator
+ self._in_section = False
+
+ def _ParseLine(self, line):
+ """Used internally to detect and decompose backtrace input lines.
+
+ Args:
+ line: input tombstone line.
+ Returns:
+ A LineTuple instance on success, None on failure.
+ """
+ if not self._in_section:
+ self._in_section = line.startswith('backtrace:')
+ return None
+
+ line = line.rstrip()
+ m = self._re_backtrace.match(line)
+ if not m:
+ self._in_section = False
+ return None
+
+ location = m.group('location')
+ offset = m.group('offset')
+ if not offset:
+ m2 = self._re_location_offset.match(location)
+ if m2:
+ offset = m2.group('offset')
+ location = location[0:m2.start('offset') - 3]
+
+ if not offset:
+ return None
+
+ offset = int(offset, 16)
+ rel_pc = int(m.group('rel_pc'), 16)
+
+ # Two cases to consider here:
+ #
+ # * If this is a library file directly mapped in memory, then |rel_pc|
+ # if the direct offset within the library, and doesn't need any kind
+ # of adjustement.
+ #
+ # * If this is a library mapped directly from an .apk file, then
+ # |rel_pc| is the offset in the APK, and |offset| happens to be the
+ # load base of the corresponding library.
+ #
+ if location.endswith('.so'):
+ # For a native library directly mapped from the file system,
+ return self.LineTuple(rel_pc, location, offset, m)
+
+ if location.endswith('.apk'):
+ # For a native library inside an memory-mapped APK file,
+ new_location, new_offset = self._apk_translator.TranslatePath(
+ location, offset)
+
+ return self.LineTuple(rel_pc, new_location, new_offset, m)
+
+ # Ignore anything else (e.g. .oat or .odex files).
+ return None
+
+ def FindLibraryOffsets(self, input_lines, in_section=False):
+ """Parse a tombstone's backtrace section and find all library offsets in it.
+
+ Args:
+ input_lines: List or iterables of intput tombstone lines.
+ in_section: Optional. If True, considers that the stack section has
+ already started.
+ Returns:
+ A dictionary mapping device library paths to sets of offsets within
+ then.
+ """
+ self._in_section = in_section
+ result = collections.defaultdict(set)
+ for line in input_lines:
+ t = self._ParseLine(line)
+ if not t:
+ continue
+
+ result[t.location].add(t.offset + t.rel_pc)
+ return result
+
+ def TranslateLine(self, line, symbol_resolver):
+ """Symbolize backtrace line if recognized.
+
+ Args:
+ line: input backtrace line.
+ symbol_resolver: symbol resolver instance to use. This method will
+ call its FindSymbolInfo(device_lib_path, lib_offset) method to
+ convert offsets into symbol informations strings.
+ Returns:
+ Translated line (unchanged if not recognized as a back trace).
+ """
+ t = self._ParseLine(line)
+ if not t:
+ return line
+
+ symbol_info = symbol_resolver.FindSymbolInfo(t.location,
+ t.offset + t.rel_pc)
+ if not symbol_info:
+ symbol_info = 'offset 0x%x' % t.offset
+
+ pos = t.match.start('location')
+ pos2 = t.match.end('offset') + 1
+ if pos2 <= 0:
+ pos2 = t.match.end('location')
+ return '%s%s (%s)%s' % (line[:pos], t.location, symbol_info, line[pos2:])
+
+
+class StackTranslator(object):
+ """Translates stack-related lines in a tombstone or crash report."""
+
+ # A named tuple describing relevant stack input lines.
+ # Fields:
+ # address: Address as it appears in the stack.
+ # lib_path: Library path where |address| is mapped.
+ # lib_offset: Library load base offset. for |lib_path|.
+ # match: Corresponding regular expression match object.
+ LineTuple = collections.namedtuple('StackLineTuple',
+ 'address, lib_path, lib_offset, match')
+
+ def __init__(self, android_abi, memory_map, apk_translator):
+ """Initialize instance."""
+ hex_addr = _HexAddressRegexpFor(android_abi)
+
+ # pylint: disable=line-too-long
+ # A regular expression used to recognize stack entries like:
+ #
+ # #05 bf89a180 bf89a1e4 [stack]
+ # bf89a1c8 a0c01c51 /data/app/com.google.android.apps.chrome-2/base.apk
+ # bf89a080 00000000
+ # ........ ........
+ # pylint: enable=line-too-long
+ self._re_stack_line = re.compile(
+ r'\s+(?P<frame_number>#[0-9]+)?\s*' +
+ r'(?P<stack_addr>' + hex_addr + r')\s+' +
+ r'(?P<stack_value>' + hex_addr + r')' +
+ r'(\s+(?P<location>[^ \t]+))?')
+
+ self._re_stack_abbrev = re.compile(r'\s+[.]+\s+[.]+')
+
+ self._memory_map = memory_map
+ self._apk_translator = apk_translator
+ self._in_section = False
+
+ def _ParseLine(self, line):
+ """Check a given input line for a relevant _re_stack_line match.
+
+ Args:
+ line: input tombstone line.
+ Returns:
+ A LineTuple instance on success, None on failure.
+ """
+ line = line.rstrip()
+ if not self._in_section:
+ self._in_section = line.startswith('stack:')
+ return None
+
+ m = self._re_stack_line.match(line)
+ if not m:
+ if not self._re_stack_abbrev.match(line):
+ self._in_section = False
+ return None
+
+ location = m.group('location')
+ if not location:
+ return None
+
+ if not location.endswith('.apk') and not location.endswith('.so'):
+ return None
+
+ addr = int(m.group('stack_value'), 16)
+ t = self._memory_map.FindSectionForAddress(addr)
+ if t is None:
+ return None
+
+ lib_path = t.path
+ lib_offset = t.offset + (addr - t.address)
+
+ if lib_path.endswith('.apk'):
+ lib_path, lib_offset = self._apk_translator.TranslatePath(
+ lib_path, lib_offset)
+
+ return self.LineTuple(addr, lib_path, lib_offset, m)
+
+ def FindLibraryOffsets(self, input_lines, in_section=False):
+ """Parse a tombstone's stack section and find all library offsets in it.
+
+ Args:
+ input_lines: List or iterables of intput tombstone lines.
+ in_section: Optional. If True, considers that the stack section has
+ already started.
+ Returns:
+ A dictionary mapping device library paths to sets of offsets within
+ then.
+ """
+ result = collections.defaultdict(set)
+ self._in_section = in_section
+ for line in input_lines:
+ t = self._ParseLine(line)
+ if t:
+ result[t.lib_path].add(t.lib_offset)
+ return result
+
+ def TranslateLine(self, line, symbol_resolver=None):
+ """Try to translate a line of the stack dump."""
+ t = self._ParseLine(line)
+ if not t:
+ return line
+
+ symbol_info = symbol_resolver.FindSymbolInfo(t.lib_path, t.lib_offset)
+ if not symbol_info:
+ return line
+
+ pos = t.match.start('location')
+ pos2 = t.match.end('location')
+ return '%s%s (%s)%s' % (line[:pos], t.lib_path, symbol_info, line[pos2:])
diff --git a/deps/v8/build/android/pylib/symbols/symbol_utils_unittest.py b/deps/v8/build/android/pylib/symbols/symbol_utils_unittest.py
new file mode 100644
index 0000000000..82a7e313ef
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/symbol_utils_unittest.py
@@ -0,0 +1,943 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import contextlib
+import logging
+import os
+import re
+import shutil
+import tempfile
+import unittest
+
+from pylib.symbols import apk_native_libs_unittest
+from pylib.symbols import mock_addr2line
+from pylib.symbols import symbol_utils
+
+_MOCK_ELF_DATA = apk_native_libs_unittest.MOCK_ELF_DATA
+
+_MOCK_A2L_PATH = os.path.join(os.path.dirname(mock_addr2line.__file__),
+ 'mock_addr2line')
+
+
+# pylint: disable=line-too-long
+
+# list of (start_offset, end_offset, size, libpath) tuples corresponding
+# to the content of base.apk. This was taken from an x86 ChromeModern.apk
+# component build.
+_TEST_APK_LIBS = [
+ (0x01331000, 0x013696bc, 0x000386bc, 'libaccessibility.cr.so'),
+ (0x0136a000, 0x013779c4, 0x0000d9c4, 'libanimation.cr.so'),
+ (0x01378000, 0x0137f7e8, 0x000077e8, 'libapdu.cr.so'),
+ (0x01380000, 0x0155ccc8, 0x001dccc8, 'libbase.cr.so'),
+ (0x0155d000, 0x015ab98c, 0x0004e98c, 'libbase_i18n.cr.so'),
+ (0x015ac000, 0x015dff4c, 0x00033f4c, 'libbindings.cr.so'),
+ (0x015e0000, 0x015f5a54, 0x00015a54, 'libbindings_base.cr.so'),
+ (0x0160e000, 0x01731960, 0x00123960, 'libblink_common.cr.so'),
+ (0x01732000, 0x0174ce54, 0x0001ae54, 'libblink_controller.cr.so'),
+ (0x0174d000, 0x0318c528, 0x01a3f528, 'libblink_core.cr.so'),
+ (0x0318d000, 0x03191700, 0x00004700, 'libblink_mojom_broadcastchannel_bindings_shared.cr.so'),
+ (0x03192000, 0x03cd7918, 0x00b45918, 'libblink_modules.cr.so'),
+ (0x03cd8000, 0x03d137d0, 0x0003b7d0, 'libblink_mojo_bindings_shared.cr.so'),
+ (0x03d14000, 0x03d2670c, 0x0001270c, 'libblink_offscreen_canvas_mojo_bindings_shared.cr.so'),
+ (0x03d27000, 0x046c7054, 0x009a0054, 'libblink_platform.cr.so'),
+ (0x046c8000, 0x0473fbfc, 0x00077bfc, 'libbluetooth.cr.so'),
+ (0x04740000, 0x04878f40, 0x00138f40, 'libboringssl.cr.so'),
+ (0x04879000, 0x0498466c, 0x0010b66c, 'libc++_shared.so'),
+ (0x04985000, 0x0498d93c, 0x0000893c, 'libcaptive_portal.cr.so'),
+ (0x0498e000, 0x049947cc, 0x000067cc, 'libcapture_base.cr.so'),
+ (0x04995000, 0x04b39f18, 0x001a4f18, 'libcapture_lib.cr.so'),
+ (0x04b3a000, 0x04b488ec, 0x0000e8ec, 'libcbor.cr.so'),
+ (0x04b49000, 0x04e9ea5c, 0x00355a5c, 'libcc.cr.so'),
+ (0x04e9f000, 0x04ed6404, 0x00037404, 'libcc_animation.cr.so'),
+ (0x04ed7000, 0x04ef5ab4, 0x0001eab4, 'libcc_base.cr.so'),
+ (0x04ef6000, 0x04fd9364, 0x000e3364, 'libcc_blink.cr.so'),
+ (0x04fda000, 0x04fe2758, 0x00008758, 'libcc_debug.cr.so'),
+ (0x04fe3000, 0x0500ae0c, 0x00027e0c, 'libcc_ipc.cr.so'),
+ (0x0500b000, 0x05078f38, 0x0006df38, 'libcc_paint.cr.so'),
+ (0x05079000, 0x0507e734, 0x00005734, 'libcdm_manager.cr.so'),
+ (0x0507f000, 0x06f4d744, 0x01ece744, 'libchrome.cr.so'),
+ (0x06f54000, 0x06feb830, 0x00097830, 'libchromium_sqlite3.cr.so'),
+ (0x06fec000, 0x0706f554, 0x00083554, 'libclient.cr.so'),
+ (0x07070000, 0x0708da60, 0x0001da60, 'libcloud_policy_proto_generated_compile.cr.so'),
+ (0x0708e000, 0x07121f28, 0x00093f28, 'libcodec.cr.so'),
+ (0x07122000, 0x07134ab8, 0x00012ab8, 'libcolor_space.cr.so'),
+ (0x07135000, 0x07138614, 0x00003614, 'libcommon.cr.so'),
+ (0x07139000, 0x0717c938, 0x00043938, 'libcompositor.cr.so'),
+ (0x0717d000, 0x0923d78c, 0x020c078c, 'libcontent.cr.so'),
+ (0x0923e000, 0x092ae87c, 0x0007087c, 'libcontent_common_mojo_bindings_shared.cr.so'),
+ (0x092af000, 0x092be718, 0x0000f718, 'libcontent_public_common_mojo_bindings_shared.cr.so'),
+ (0x092bf000, 0x092d9a20, 0x0001aa20, 'libcrash_key.cr.so'),
+ (0x092da000, 0x092eda58, 0x00013a58, 'libcrcrypto.cr.so'),
+ (0x092ee000, 0x092f16e0, 0x000036e0, 'libdevice_base.cr.so'),
+ (0x092f2000, 0x092fe8d8, 0x0000c8d8, 'libdevice_event_log.cr.so'),
+ (0x092ff000, 0x093026a4, 0x000036a4, 'libdevice_features.cr.so'),
+ (0x09303000, 0x093f1220, 0x000ee220, 'libdevice_gamepad.cr.so'),
+ (0x093f2000, 0x09437f54, 0x00045f54, 'libdevice_vr_mojo_bindings.cr.so'),
+ (0x09438000, 0x0954c168, 0x00114168, 'libdevice_vr_mojo_bindings_blink.cr.so'),
+ (0x0954d000, 0x0955d720, 0x00010720, 'libdevice_vr_mojo_bindings_shared.cr.so'),
+ (0x0955e000, 0x0956b9c0, 0x0000d9c0, 'libdevices.cr.so'),
+ (0x0956c000, 0x0957cae8, 0x00010ae8, 'libdiscardable_memory_client.cr.so'),
+ (0x0957d000, 0x09588854, 0x0000b854, 'libdiscardable_memory_common.cr.so'),
+ (0x09589000, 0x0959cbb4, 0x00013bb4, 'libdiscardable_memory_service.cr.so'),
+ (0x0959d000, 0x095b6b90, 0x00019b90, 'libdisplay.cr.so'),
+ (0x095b7000, 0x095be930, 0x00007930, 'libdisplay_types.cr.so'),
+ (0x095bf000, 0x095c46c4, 0x000056c4, 'libdisplay_util.cr.so'),
+ (0x095c5000, 0x095f54a4, 0x000304a4, 'libdomain_reliability.cr.so'),
+ (0x095f6000, 0x0966fe08, 0x00079e08, 'libembedder.cr.so'),
+ (0x09670000, 0x096735f8, 0x000035f8, 'libembedder_switches.cr.so'),
+ (0x09674000, 0x096a3460, 0x0002f460, 'libevents.cr.so'),
+ (0x096a4000, 0x096b6d40, 0x00012d40, 'libevents_base.cr.so'),
+ (0x096b7000, 0x0981a778, 0x00163778, 'libffmpeg.cr.so'),
+ (0x0981b000, 0x09945c94, 0x0012ac94, 'libfido.cr.so'),
+ (0x09946000, 0x09a330dc, 0x000ed0dc, 'libfingerprint.cr.so'),
+ (0x09a34000, 0x09b53170, 0x0011f170, 'libfreetype_harfbuzz.cr.so'),
+ (0x09b54000, 0x09bc5c5c, 0x00071c5c, 'libgcm.cr.so'),
+ (0x09bc6000, 0x09cc8584, 0x00102584, 'libgeolocation.cr.so'),
+ (0x09cc9000, 0x09cdc8d4, 0x000138d4, 'libgeometry.cr.so'),
+ (0x09cdd000, 0x09cec8b4, 0x0000f8b4, 'libgeometry_skia.cr.so'),
+ (0x09ced000, 0x09d10e14, 0x00023e14, 'libgesture_detection.cr.so'),
+ (0x09d11000, 0x09d7595c, 0x0006495c, 'libgfx.cr.so'),
+ (0x09d76000, 0x09d7d7cc, 0x000077cc, 'libgfx_ipc.cr.so'),
+ (0x09d7e000, 0x09d82708, 0x00004708, 'libgfx_ipc_buffer_types.cr.so'),
+ (0x09d83000, 0x09d89748, 0x00006748, 'libgfx_ipc_color.cr.so'),
+ (0x09d8a000, 0x09d8f6f4, 0x000056f4, 'libgfx_ipc_geometry.cr.so'),
+ (0x09d90000, 0x09d94754, 0x00004754, 'libgfx_ipc_skia.cr.so'),
+ (0x09d95000, 0x09d9869c, 0x0000369c, 'libgfx_switches.cr.so'),
+ (0x09d99000, 0x09dba0ac, 0x000210ac, 'libgin.cr.so'),
+ (0x09dbb000, 0x09e0a8cc, 0x0004f8cc, 'libgl_in_process_context.cr.so'),
+ (0x09e0b000, 0x09e17a18, 0x0000ca18, 'libgl_init.cr.so'),
+ (0x09e18000, 0x09ee34e4, 0x000cb4e4, 'libgl_wrapper.cr.so'),
+ (0x09ee4000, 0x0a1a2e00, 0x002bee00, 'libgles2.cr.so'),
+ (0x0a1a3000, 0x0a24556c, 0x000a256c, 'libgles2_implementation.cr.so'),
+ (0x0a246000, 0x0a267038, 0x00021038, 'libgles2_utils.cr.so'),
+ (0x0a268000, 0x0a3288e4, 0x000c08e4, 'libgpu.cr.so'),
+ (0x0a329000, 0x0a3627ec, 0x000397ec, 'libgpu_ipc_service.cr.so'),
+ (0x0a363000, 0x0a388a18, 0x00025a18, 'libgpu_util.cr.so'),
+ (0x0a389000, 0x0a506d8c, 0x0017dd8c, 'libhost.cr.so'),
+ (0x0a507000, 0x0a6f0ec0, 0x001e9ec0, 'libicui18n.cr.so'),
+ (0x0a6f1000, 0x0a83b4c8, 0x0014a4c8, 'libicuuc.cr.so'),
+ (0x0a83c000, 0x0a8416e4, 0x000056e4, 'libinterfaces_shared.cr.so'),
+ (0x0a842000, 0x0a87e2a0, 0x0003c2a0, 'libipc.cr.so'),
+ (0x0a87f000, 0x0a88c98c, 0x0000d98c, 'libipc_mojom.cr.so'),
+ (0x0a88d000, 0x0a8926e4, 0x000056e4, 'libipc_mojom_shared.cr.so'),
+ (0x0a893000, 0x0a8a1e18, 0x0000ee18, 'libkeyed_service_content.cr.so'),
+ (0x0a8a2000, 0x0a8b4a30, 0x00012a30, 'libkeyed_service_core.cr.so'),
+ (0x0a8b5000, 0x0a930a80, 0x0007ba80, 'libleveldatabase.cr.so'),
+ (0x0a931000, 0x0a9b3908, 0x00082908, 'libmanager.cr.so'),
+ (0x0a9b4000, 0x0aea9bb4, 0x004f5bb4, 'libmedia.cr.so'),
+ (0x0aeaa000, 0x0b08cb88, 0x001e2b88, 'libmedia_blink.cr.so'),
+ (0x0b08d000, 0x0b0a4728, 0x00017728, 'libmedia_devices_mojo_bindings_shared.cr.so'),
+ (0x0b0a5000, 0x0b1943ec, 0x000ef3ec, 'libmedia_gpu.cr.so'),
+ (0x0b195000, 0x0b2d07d4, 0x0013b7d4, 'libmedia_mojo_services.cr.so'),
+ (0x0b2d1000, 0x0b2d4760, 0x00003760, 'libmessage_center.cr.so'),
+ (0x0b2d5000, 0x0b2e0938, 0x0000b938, 'libmessage_support.cr.so'),
+ (0x0b2e1000, 0x0b2f3ad0, 0x00012ad0, 'libmetrics_cpp.cr.so'),
+ (0x0b2f4000, 0x0b313bb8, 0x0001fbb8, 'libmidi.cr.so'),
+ (0x0b314000, 0x0b31b848, 0x00007848, 'libmojo_base_lib.cr.so'),
+ (0x0b31c000, 0x0b3329f8, 0x000169f8, 'libmojo_base_mojom.cr.so'),
+ (0x0b333000, 0x0b34b98c, 0x0001898c, 'libmojo_base_mojom_blink.cr.so'),
+ (0x0b34c000, 0x0b354700, 0x00008700, 'libmojo_base_mojom_shared.cr.so'),
+ (0x0b355000, 0x0b3608b0, 0x0000b8b0, 'libmojo_base_shared_typemap_traits.cr.so'),
+ (0x0b361000, 0x0b3ad454, 0x0004c454, 'libmojo_edk.cr.so'),
+ (0x0b3ae000, 0x0b3c4a20, 0x00016a20, 'libmojo_edk_ports.cr.so'),
+ (0x0b3c5000, 0x0b3d38a0, 0x0000e8a0, 'libmojo_mojom_bindings.cr.so'),
+ (0x0b3d4000, 0x0b3da6e8, 0x000066e8, 'libmojo_mojom_bindings_shared.cr.so'),
+ (0x0b3db000, 0x0b3e27f0, 0x000077f0, 'libmojo_public_system.cr.so'),
+ (0x0b3e3000, 0x0b3fa9fc, 0x000179fc, 'libmojo_public_system_cpp.cr.so'),
+ (0x0b3fb000, 0x0b407728, 0x0000c728, 'libmojom_core_shared.cr.so'),
+ (0x0b408000, 0x0b421744, 0x00019744, 'libmojom_platform_shared.cr.so'),
+ (0x0b422000, 0x0b43451c, 0x0001251c, 'libnative_theme.cr.so'),
+ (0x0b435000, 0x0baaa1bc, 0x006751bc, 'libnet.cr.so'),
+ (0x0baab000, 0x0bac3c08, 0x00018c08, 'libnet_with_v8.cr.so'),
+ (0x0bac4000, 0x0bb74670, 0x000b0670, 'libnetwork_cpp.cr.so'),
+ (0x0bb75000, 0x0bbaee8c, 0x00039e8c, 'libnetwork_cpp_base.cr.so'),
+ (0x0bbaf000, 0x0bd21844, 0x00172844, 'libnetwork_service.cr.so'),
+ (0x0bd22000, 0x0bd256e4, 0x000036e4, 'libnetwork_session_configurator.cr.so'),
+ (0x0bd26000, 0x0bd33734, 0x0000d734, 'libonc.cr.so'),
+ (0x0bd34000, 0x0bd9ce18, 0x00068e18, 'libperfetto.cr.so'),
+ (0x0bd9d000, 0x0bda4854, 0x00007854, 'libplatform.cr.so'),
+ (0x0bda5000, 0x0bec5ce4, 0x00120ce4, 'libpolicy_component.cr.so'),
+ (0x0bec6000, 0x0bf5ab58, 0x00094b58, 'libpolicy_proto.cr.so'),
+ (0x0bf5b000, 0x0bf86fbc, 0x0002bfbc, 'libprefs.cr.so'),
+ (0x0bf87000, 0x0bfa5d74, 0x0001ed74, 'libprinting.cr.so'),
+ (0x0bfa6000, 0x0bfe0e80, 0x0003ae80, 'libprotobuf_lite.cr.so'),
+ (0x0bfe1000, 0x0bff0a18, 0x0000fa18, 'libproxy_config.cr.so'),
+ (0x0bff1000, 0x0c0f6654, 0x00105654, 'libpublic.cr.so'),
+ (0x0c0f7000, 0x0c0fa6a4, 0x000036a4, 'librange.cr.so'),
+ (0x0c0fb000, 0x0c118058, 0x0001d058, 'libraster.cr.so'),
+ (0x0c119000, 0x0c133d00, 0x0001ad00, 'libresource_coordinator_cpp.cr.so'),
+ (0x0c134000, 0x0c1396a0, 0x000056a0, 'libresource_coordinator_cpp_base.cr.so'),
+ (0x0c13a000, 0x0c1973b8, 0x0005d3b8, 'libresource_coordinator_public_mojom.cr.so'),
+ (0x0c198000, 0x0c2033e8, 0x0006b3e8, 'libresource_coordinator_public_mojom_blink.cr.so'),
+ (0x0c204000, 0x0c219744, 0x00015744, 'libresource_coordinator_public_mojom_shared.cr.so'),
+ (0x0c21a000, 0x0c21e700, 0x00004700, 'libsandbox.cr.so'),
+ (0x0c21f000, 0x0c22f96c, 0x0001096c, 'libsandbox_services.cr.so'),
+ (0x0c230000, 0x0c249d58, 0x00019d58, 'libseccomp_bpf.cr.so'),
+ (0x0c24a000, 0x0c24e714, 0x00004714, 'libseccomp_starter_android.cr.so'),
+ (0x0c24f000, 0x0c4ae9f0, 0x0025f9f0, 'libservice.cr.so'),
+ (0x0c4af000, 0x0c4c3ae4, 0x00014ae4, 'libservice_manager_cpp.cr.so'),
+ (0x0c4c4000, 0x0c4cb708, 0x00007708, 'libservice_manager_cpp_types.cr.so'),
+ (0x0c4cc000, 0x0c4fbe30, 0x0002fe30, 'libservice_manager_mojom.cr.so'),
+ (0x0c4fc000, 0x0c532e78, 0x00036e78, 'libservice_manager_mojom_blink.cr.so'),
+ (0x0c533000, 0x0c53669c, 0x0000369c, 'libservice_manager_mojom_constants.cr.so'),
+ (0x0c537000, 0x0c53e85c, 0x0000785c, 'libservice_manager_mojom_constants_blink.cr.so'),
+ (0x0c53f000, 0x0c542668, 0x00003668, 'libservice_manager_mojom_constants_shared.cr.so'),
+ (0x0c543000, 0x0c54d700, 0x0000a700, 'libservice_manager_mojom_shared.cr.so'),
+ (0x0c54e000, 0x0c8fc6ec, 0x003ae6ec, 'libsessions.cr.so'),
+ (0x0c8fd000, 0x0c90a924, 0x0000d924, 'libshared_memory_support.cr.so'),
+ (0x0c90b000, 0x0c9148ec, 0x000098ec, 'libshell_dialogs.cr.so'),
+ (0x0c915000, 0x0cf8de70, 0x00678e70, 'libskia.cr.so'),
+ (0x0cf8e000, 0x0cf978bc, 0x000098bc, 'libsnapshot.cr.so'),
+ (0x0cf98000, 0x0cfb7d9c, 0x0001fd9c, 'libsql.cr.so'),
+ (0x0cfb8000, 0x0cfbe744, 0x00006744, 'libstartup_tracing.cr.so'),
+ (0x0cfbf000, 0x0d19b4e4, 0x001dc4e4, 'libstorage_browser.cr.so'),
+ (0x0d19c000, 0x0d2a773c, 0x0010b73c, 'libstorage_common.cr.so'),
+ (0x0d2a8000, 0x0d2ac6fc, 0x000046fc, 'libsurface.cr.so'),
+ (0x0d2ad000, 0x0d2baa98, 0x0000da98, 'libtracing.cr.so'),
+ (0x0d2bb000, 0x0d2f36b0, 0x000386b0, 'libtracing_cpp.cr.so'),
+ (0x0d2f4000, 0x0d326e70, 0x00032e70, 'libtracing_mojom.cr.so'),
+ (0x0d327000, 0x0d33270c, 0x0000b70c, 'libtracing_mojom_shared.cr.so'),
+ (0x0d333000, 0x0d46d804, 0x0013a804, 'libui_android.cr.so'),
+ (0x0d46e000, 0x0d4cb3f8, 0x0005d3f8, 'libui_base.cr.so'),
+ (0x0d4cc000, 0x0d4dbc40, 0x0000fc40, 'libui_base_ime.cr.so'),
+ (0x0d4dc000, 0x0d4e58d4, 0x000098d4, 'libui_data_pack.cr.so'),
+ (0x0d4e6000, 0x0d51d1e0, 0x000371e0, 'libui_devtools.cr.so'),
+ (0x0d51e000, 0x0d52b984, 0x0000d984, 'libui_message_center_cpp.cr.so'),
+ (0x0d52c000, 0x0d539a48, 0x0000da48, 'libui_touch_selection.cr.so'),
+ (0x0d53a000, 0x0d55bc60, 0x00021c60, 'liburl.cr.so'),
+ (0x0d55c000, 0x0d55f6b4, 0x000036b4, 'liburl_ipc.cr.so'),
+ (0x0d560000, 0x0d5af110, 0x0004f110, 'liburl_matcher.cr.so'),
+ (0x0d5b0000, 0x0d5e2fac, 0x00032fac, 'libuser_manager.cr.so'),
+ (0x0d5e3000, 0x0d5e66e4, 0x000036e4, 'libuser_prefs.cr.so'),
+ (0x0d5e7000, 0x0e3e1cc8, 0x00dfacc8, 'libv8.cr.so'),
+ (0x0e3e2000, 0x0e400ae0, 0x0001eae0, 'libv8_libbase.cr.so'),
+ (0x0e401000, 0x0e4d91d4, 0x000d81d4, 'libviz_common.cr.so'),
+ (0x0e4da000, 0x0e4df7e4, 0x000057e4, 'libviz_resource_format.cr.so'),
+ (0x0e4e0000, 0x0e5b7120, 0x000d7120, 'libweb_dialogs.cr.so'),
+ (0x0e5b8000, 0x0e5c7a18, 0x0000fa18, 'libwebdata_common.cr.so'),
+ (0x0e5c8000, 0x0e61bfe4, 0x00053fe4, 'libwtf.cr.so'),
+]
+
+
+# A small memory map fragment extracted from a tombstone for a process that
+# had loaded the APK corresponding to _TEST_APK_LIBS above.
+_TEST_MEMORY_MAP = r'''memory map:
+12c00000-12ccafff rw- 0 cb000 /dev/ashmem/dalvik-main space (deleted)
+12ccb000-130cafff rw- cb000 400000 /dev/ashmem/dalvik-main space (deleted)
+130cb000-32bfffff --- 4cb000 1fb35000 /dev/ashmem/dalvik-main space (deleted)
+32c00000-32c00fff rw- 0 1000 /dev/ashmem/dalvik-main space 1 (deleted)
+32c01000-52bfffff --- 1000 1ffff000 /dev/ashmem/dalvik-main space 1 (deleted)
+6f3b8000-6fd90fff rw- 0 9d9000 /data/dalvik-cache/x86/system@framework@boot.art
+6fd91000-71c42fff r-- 0 1eb2000 /data/dalvik-cache/x86/system@framework@boot.oat
+71c43000-7393efff r-x 1eb2000 1cfc000 /data/dalvik-cache/x86/system@framework@boot.oat (load base 0x71c43000)
+7393f000-7393ffff rw- 3bae000 1000 /data/dalvik-cache/x86/system@framework@boot.oat
+73940000-73a1bfff rw- 0 dc000 /dev/ashmem/dalvik-zygote space (deleted)
+73a1c000-73a1cfff rw- 0 1000 /dev/ashmem/dalvik-non moving space (deleted)
+73a1d000-73a2dfff rw- 1000 11000 /dev/ashmem/dalvik-non moving space (deleted)
+73a2e000-77540fff --- 12000 3b13000 /dev/ashmem/dalvik-non moving space (deleted)
+77541000-7793ffff rw- 3b25000 3ff000 /dev/ashmem/dalvik-non moving space (deleted)
+923aa000-92538fff r-- 8a9000 18f000 /data/app/com.example.app-2/base.apk
+92539000-9255bfff r-- 0 23000 /data/data/com.example.app/app_data/paks/es.pak@162db1c6689
+9255c000-92593fff r-- 213000 38000 /data/app/com.example.app-2/base.apk
+92594000-925c0fff r-- 87d000 2d000 /data/app/com.example.app-2/base.apk
+925c1000-927d3fff r-- a37000 213000 /data/app/com.example.app-2/base.apk
+927d4000-92e07fff r-- 24a000 634000 /data/app/com.example.app-2/base.apk
+92e08000-92e37fff r-- a931000 30000 /data/app/com.example.app-2/base.apk
+92e38000-92e86fff r-x a961000 4f000 /data/app/com.example.app-2/base.apk
+92e87000-92e8afff rw- a9b0000 4000 /data/app/com.example.app-2/base.apk
+92e8b000-92e8bfff rw- 0 1000
+92e8c000-92e9dfff r-- d5b0000 12000 /data/app/com.example.app-2/base.apk
+92e9e000-92ebcfff r-x d5c2000 1f000 /data/app/com.example.app-2/base.apk
+92ebd000-92ebefff rw- d5e1000 2000 /data/app/com.example.app-2/base.apk
+92ebf000-92ebffff rw- 0 1000
+'''
+
+# list of (address, size, path, offset) tuples that must appear in
+# _TEST_MEMORY_MAP. Not all sections need to be listed.
+_TEST_MEMORY_MAP_SECTIONS = [
+ (0x923aa000, 0x18f000, '/data/app/com.example.app-2/base.apk', 0x8a9000),
+ (0x9255c000, 0x038000, '/data/app/com.example.app-2/base.apk', 0x213000),
+ (0x92594000, 0x02d000, '/data/app/com.example.app-2/base.apk', 0x87d000),
+ (0x925c1000, 0x213000, '/data/app/com.example.app-2/base.apk', 0xa37000),
+]
+
+_EXPECTED_TEST_MEMORY_MAP = r'''memory map:
+12c00000-12ccafff rw- 0 cb000 /dev/ashmem/dalvik-main space (deleted)
+12ccb000-130cafff rw- cb000 400000 /dev/ashmem/dalvik-main space (deleted)
+130cb000-32bfffff --- 4cb000 1fb35000 /dev/ashmem/dalvik-main space (deleted)
+32c00000-32c00fff rw- 0 1000 /dev/ashmem/dalvik-main space 1 (deleted)
+32c01000-52bfffff --- 1000 1ffff000 /dev/ashmem/dalvik-main space 1 (deleted)
+6f3b8000-6fd90fff rw- 0 9d9000 /data/dalvik-cache/x86/system@framework@boot.art
+6fd91000-71c42fff r-- 0 1eb2000 /data/dalvik-cache/x86/system@framework@boot.oat
+71c43000-7393efff r-x 1eb2000 1cfc000 /data/dalvik-cache/x86/system@framework@boot.oat (load base 0x71c43000)
+7393f000-7393ffff rw- 3bae000 1000 /data/dalvik-cache/x86/system@framework@boot.oat
+73940000-73a1bfff rw- 0 dc000 /dev/ashmem/dalvik-zygote space (deleted)
+73a1c000-73a1cfff rw- 0 1000 /dev/ashmem/dalvik-non moving space (deleted)
+73a1d000-73a2dfff rw- 1000 11000 /dev/ashmem/dalvik-non moving space (deleted)
+73a2e000-77540fff --- 12000 3b13000 /dev/ashmem/dalvik-non moving space (deleted)
+77541000-7793ffff rw- 3b25000 3ff000 /dev/ashmem/dalvik-non moving space (deleted)
+923aa000-92538fff r-- 8a9000 18f000 /data/app/com.example.app-2/base.apk
+92539000-9255bfff r-- 0 23000 /data/data/com.example.app/app_data/paks/es.pak@162db1c6689
+9255c000-92593fff r-- 213000 38000 /data/app/com.example.app-2/base.apk
+92594000-925c0fff r-- 87d000 2d000 /data/app/com.example.app-2/base.apk
+925c1000-927d3fff r-- a37000 213000 /data/app/com.example.app-2/base.apk
+927d4000-92e07fff r-- 24a000 634000 /data/app/com.example.app-2/base.apk
+92e08000-92e37fff r-- a931000 30000 /data/app/com.example.app-2/base.apk!lib/libmanager.cr.so (offset 0x0)
+92e38000-92e86fff r-x a961000 4f000 /data/app/com.example.app-2/base.apk!lib/libmanager.cr.so (offset 0x30000)
+92e87000-92e8afff rw- a9b0000 4000 /data/app/com.example.app-2/base.apk!lib/libmanager.cr.so (offset 0x7f000)
+92e8b000-92e8bfff rw- 0 1000
+92e8c000-92e9dfff r-- d5b0000 12000 /data/app/com.example.app-2/base.apk!lib/libuser_manager.cr.so (offset 0x0)
+92e9e000-92ebcfff r-x d5c2000 1f000 /data/app/com.example.app-2/base.apk!lib/libuser_manager.cr.so (offset 0x12000)
+92ebd000-92ebefff rw- d5e1000 2000 /data/app/com.example.app-2/base.apk!lib/libuser_manager.cr.so (offset 0x31000)
+92ebf000-92ebffff rw- 0 1000
+'''
+
+# Example stack section, taken from the same tombstone that _TEST_MEMORY_MAP
+# was extracted from.
+_TEST_STACK = r'''stack:
+ bf89a070 b7439468 /system/lib/libc.so
+ bf89a074 bf89a1e4 [stack]
+ bf89a078 932d4000 /data/app/com.example.app-2/base.apk
+ bf89a07c b73bfbc9 /system/lib/libc.so (pthread_mutex_lock+65)
+ bf89a080 00000000
+ bf89a084 4000671c /dev/ashmem/dalvik-main space 1 (deleted)
+ bf89a088 932d1d86 /data/app/com.example.app-2/base.apk
+ bf89a08c b743671c /system/lib/libc.so
+ bf89a090 b77f8c00 /system/bin/linker
+ bf89a094 b743cc90
+ bf89a098 932d1d4a /data/app/com.example.app-2/base.apk
+ bf89a09c b73bf271 /system/lib/libc.so (__pthread_internal_find(long)+65)
+ bf89a0a0 b743cc90
+ bf89a0a4 bf89a0b0 [stack]
+ bf89a0a8 bf89a0b8 [stack]
+ bf89a0ac 00000008
+ ........ ........
+ #00 bf89a0b0 00000006
+ bf89a0b4 00000002
+ bf89a0b8 b743671c /system/lib/libc.so
+ bf89a0bc b73bf5d9 /system/lib/libc.so (pthread_kill+71)
+ #01 bf89a0c0 00006937
+ bf89a0c4 00006937
+ bf89a0c8 00000006
+ bf89a0cc b77fd3a9 /system/bin/app_process32 (sigprocmask+141)
+ bf89a0d0 00000002
+ bf89a0d4 bf89a0ec [stack]
+ bf89a0d8 00000000
+ bf89a0dc b743671c /system/lib/libc.so
+ bf89a0e0 bf89a12c [stack]
+ bf89a0e4 bf89a1e4 [stack]
+ bf89a0e8 932d1d4a /data/app/com.example.app-2/base.apk
+ bf89a0ec b7365206 /system/lib/libc.so (raise+37)
+ #02 bf89a0f0 b77f8c00 /system/bin/linker
+ bf89a0f4 00000006
+ bf89a0f8 b7439468 /system/lib/libc.so
+ bf89a0fc b743671c /system/lib/libc.so
+ bf89a100 bf89a12c [stack]
+ bf89a104 b743671c /system/lib/libc.so
+ bf89a108 bf89a12c [stack]
+ bf89a10c b735e9e5 /system/lib/libc.so (abort+81)
+ #03 bf89a110 00000006
+ bf89a114 bf89a12c [stack]
+ bf89a118 00000000
+ bf89a11c b55a3d3b /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::DefaultLogHandler(google::protobuf::LogLevel, char const*, int, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&)+99)
+ bf89a120 b7439468 /system/lib/libc.so
+ bf89a124 b55ba38d /system/lib/libprotobuf-cpp-lite.so
+ bf89a128 b55ba408 /system/lib/libprotobuf-cpp-lite.so
+ bf89a12c ffffffdf
+ bf89a130 0000003d
+ bf89a134 adfedf00 [anon:libc_malloc]
+ bf89a138 bf89a158 [stack]
+ #04 bf89a13c a0cee7f0 /data/app/com.example.app-2/base.apk
+ bf89a140 b55c1cb0 /system/lib/libprotobuf-cpp-lite.so
+ bf89a144 bf89a1e4 [stack]
+'''
+
+# Expected value of _TEST_STACK after translation of addresses in the APK
+# into offsets into libraries.
+_EXPECTED_STACK = r'''stack:
+ bf89a070 b7439468 /system/lib/libc.so
+ bf89a074 bf89a1e4 [stack]
+ bf89a078 932d4000 /data/app/com.example.app-2/base.apk
+ bf89a07c b73bfbc9 /system/lib/libc.so (pthread_mutex_lock+65)
+ bf89a080 00000000
+ bf89a084 4000671c /dev/ashmem/dalvik-main space 1 (deleted)
+ bf89a088 932d1d86 /data/app/com.example.app-2/base.apk
+ bf89a08c b743671c /system/lib/libc.so
+ bf89a090 b77f8c00 /system/bin/linker
+ bf89a094 b743cc90
+ bf89a098 932d1d4a /data/app/com.example.app-2/base.apk
+ bf89a09c b73bf271 /system/lib/libc.so (__pthread_internal_find(long)+65)
+ bf89a0a0 b743cc90
+ bf89a0a4 bf89a0b0 [stack]
+ bf89a0a8 bf89a0b8 [stack]
+ bf89a0ac 00000008
+ ........ ........
+ #00 bf89a0b0 00000006
+ bf89a0b4 00000002
+ bf89a0b8 b743671c /system/lib/libc.so
+ bf89a0bc b73bf5d9 /system/lib/libc.so (pthread_kill+71)
+ #01 bf89a0c0 00006937
+ bf89a0c4 00006937
+ bf89a0c8 00000006
+ bf89a0cc b77fd3a9 /system/bin/app_process32 (sigprocmask+141)
+ bf89a0d0 00000002
+ bf89a0d4 bf89a0ec [stack]
+ bf89a0d8 00000000
+ bf89a0dc b743671c /system/lib/libc.so
+ bf89a0e0 bf89a12c [stack]
+ bf89a0e4 bf89a1e4 [stack]
+ bf89a0e8 932d1d4a /data/app/com.example.app-2/base.apk
+ bf89a0ec b7365206 /system/lib/libc.so (raise+37)
+ #02 bf89a0f0 b77f8c00 /system/bin/linker
+ bf89a0f4 00000006
+ bf89a0f8 b7439468 /system/lib/libc.so
+ bf89a0fc b743671c /system/lib/libc.so
+ bf89a100 bf89a12c [stack]
+ bf89a104 b743671c /system/lib/libc.so
+ bf89a108 bf89a12c [stack]
+ bf89a10c b735e9e5 /system/lib/libc.so (abort+81)
+ #03 bf89a110 00000006
+ bf89a114 bf89a12c [stack]
+ bf89a118 00000000
+ bf89a11c b55a3d3b /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::DefaultLogHandler(google::protobuf::LogLevel, char const*, int, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&)+99)
+ bf89a120 b7439468 /system/lib/libc.so
+ bf89a124 b55ba38d /system/lib/libprotobuf-cpp-lite.so
+ bf89a128 b55ba408 /system/lib/libprotobuf-cpp-lite.so
+ bf89a12c ffffffdf
+ bf89a130 0000003d
+ bf89a134 adfedf00 [anon:libc_malloc]
+ bf89a138 bf89a158 [stack]
+ #04 bf89a13c a0cee7f0 /data/app/com.example.app-2/base.apk
+ bf89a140 b55c1cb0 /system/lib/libprotobuf-cpp-lite.so
+ bf89a144 bf89a1e4 [stack]
+'''
+
+_TEST_BACKTRACE = r'''backtrace:
+ #00 pc 00084126 /system/lib/libc.so (tgkill+22)
+ #01 pc 000815d8 /system/lib/libc.so (pthread_kill+70)
+ #02 pc 00027205 /system/lib/libc.so (raise+36)
+ #03 pc 000209e4 /system/lib/libc.so (abort+80)
+ #04 pc 0000cf73 /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogMessage::Finish()+117)
+ #05 pc 0000cf8e /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogFinisher::operator=(google::protobuf::internal::LogMessage&)+26)
+ #06 pc 0000d27f /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::VerifyVersion(int, int, char const*)+574)
+ #07 pc 007cd236 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #08 pc 000111a9 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0xbfc2000)
+ #09 pc 00013228 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0xbfc2000)
+ #10 pc 000131de /data/app/com.google.android.apps.chrome-2/base.apk (offset 0xbfc2000)
+ #11 pc 007cd2d8 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #12 pc 007cd956 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #13 pc 007c2d4a /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #14 pc 009fc9f1 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #15 pc 009fc8ea /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #16 pc 00561c63 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #17 pc 0106fbdb /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #18 pc 004d7371 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #19 pc 004d8159 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #20 pc 004d7b96 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #21 pc 004da4b6 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #22 pc 005ab66c /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+ #23 pc 005afca2 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+ #24 pc 0000cae8 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #25 pc 00ce864f /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+ #26 pc 00ce8dfa /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+ #27 pc 00ce74c6 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+ #28 pc 00004616 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x961e000)
+ #29 pc 00ce8215 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+ #30 pc 0013d8c7 /system/lib/libart.so (art_quick_generic_jni_trampoline+71)
+ #31 pc 00137c52 /system/lib/libart.so (art_quick_invoke_static_stub+418)
+ #32 pc 00143651 /system/lib/libart.so (art::ArtMethod::Invoke(art::Thread*, unsigned int*, unsigned int, art::JValue*, char const*)+353)
+ #33 pc 005e06ae /system/lib/libart.so (artInterpreterToCompiledCodeBridge+190)
+ #34 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+ #35 pc 0032cfc0 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160)
+ #36 pc 000fc703 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891)
+ #37 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+ #38 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+ #39 pc 0032cfc0 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160)
+ #40 pc 000fc703 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891)
+ #41 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+ #42 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+ #43 pc 0032ebf9 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)2, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+297)
+ #44 pc 000fc955 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+30485)
+ #45 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+ #46 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+ #47 pc 0033090c /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)4, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+636)
+ #48 pc 000fc67f /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29759)
+ #49 pc 00300700 /system/lib/libart.so (art::interpreter::EnterInterpreterFromEntryPoint(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame*)+128)
+ #50 pc 00667c73 /system/lib/libart.so (artQuickToInterpreterBridge+808)
+ #51 pc 0013d98d /system/lib/libart.so (art_quick_to_interpreter_bridge+77)
+ #52 pc 7264bc5b /data/dalvik-cache/x86/system@framework@boot.oat (offset 0x1eb2000)
+'''
+
+_EXPECTED_BACKTRACE = r'''backtrace:
+ #00 pc 00084126 /system/lib/libc.so (tgkill+22)
+ #01 pc 000815d8 /system/lib/libc.so (pthread_kill+70)
+ #02 pc 00027205 /system/lib/libc.so (raise+36)
+ #03 pc 000209e4 /system/lib/libc.so (abort+80)
+ #04 pc 0000cf73 /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogMessage::Finish()+117)
+ #05 pc 0000cf8e /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogFinisher::operator=(google::protobuf::internal::LogMessage&)+26)
+ #06 pc 0000d27f /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::VerifyVersion(int, int, char const*)+574)
+ #07 pc 007cd236 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #08 pc 000111a9 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so (offset 0x1c000)
+ #09 pc 00013228 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so (offset 0x1c000)
+ #10 pc 000131de /data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so (offset 0x1c000)
+ #11 pc 007cd2d8 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #12 pc 007cd956 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #13 pc 007c2d4a /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #14 pc 009fc9f1 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #15 pc 009fc8ea /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #16 pc 00561c63 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #17 pc 0106fbdb /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #18 pc 004d7371 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #19 pc 004d8159 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #20 pc 004d7b96 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #21 pc 004da4b6 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #22 pc 005ab66c /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+ #23 pc 005afca2 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+ #24 pc 0000cae8 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #25 pc 00ce864f /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+ #26 pc 00ce8dfa /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+ #27 pc 00ce74c6 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+ #28 pc 00004616 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libembedder.cr.so (offset 0x28000)
+ #29 pc 00ce8215 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+ #30 pc 0013d8c7 /system/lib/libart.so (art_quick_generic_jni_trampoline+71)
+ #31 pc 00137c52 /system/lib/libart.so (art_quick_invoke_static_stub+418)
+ #32 pc 00143651 /system/lib/libart.so (art::ArtMethod::Invoke(art::Thread*, unsigned int*, unsigned int, art::JValue*, char const*)+353)
+ #33 pc 005e06ae /system/lib/libart.so (artInterpreterToCompiledCodeBridge+190)
+ #34 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+ #35 pc 0032cfc0 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160)
+ #36 pc 000fc703 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891)
+ #37 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+ #38 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+ #39 pc 0032cfc0 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160)
+ #40 pc 000fc703 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891)
+ #41 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+ #42 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+ #43 pc 0032ebf9 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)2, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+297)
+ #44 pc 000fc955 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+30485)
+ #45 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+ #46 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+ #47 pc 0033090c /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)4, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+636)
+ #48 pc 000fc67f /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29759)
+ #49 pc 00300700 /system/lib/libart.so (art::interpreter::EnterInterpreterFromEntryPoint(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame*)+128)
+ #50 pc 00667c73 /system/lib/libart.so (artQuickToInterpreterBridge+808)
+ #51 pc 0013d98d /system/lib/libart.so (art_quick_to_interpreter_bridge+77)
+ #52 pc 7264bc5b /data/dalvik-cache/x86/system@framework@boot.oat (offset 0x1eb2000)
+'''
+
+_EXPECTED_BACKTRACE_OFFSETS_MAP = {
+ '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so':
+ set([
+ 0x1c000 + 0x111a9,
+ 0x1c000 + 0x13228,
+ 0x1c000 + 0x131de,
+ ]),
+
+ '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so':
+ set([
+ 0x90e000 + 0x7cd236,
+ 0x90e000 + 0x7cd2d8,
+ 0x90e000 + 0x7cd956,
+ 0x90e000 + 0x7c2d4a,
+ 0x90e000 + 0x9fc9f1,
+ 0x90e000 + 0x9fc8ea,
+ 0x90e000 + 0x561c63,
+ 0x90e000 + 0x106fbdb,
+ 0x90e000 + 0x4d7371,
+ 0x90e000 + 0x4d8159,
+ 0x90e000 + 0x4d7b96,
+ 0x90e000 + 0x4da4b6,
+ 0x90e000 + 0xcae8,
+ ]),
+ '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so':
+ set([
+ 0xc2d000 + 0x5ab66c,
+ 0xc2d000 + 0x5afca2,
+ 0xc2d000 + 0xce864f,
+ 0xc2d000 + 0xce8dfa,
+ 0xc2d000 + 0xce74c6,
+ 0xc2d000 + 0xce8215,
+ ]),
+ '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libembedder.cr.so':
+ set([
+ 0x28000 + 0x4616,
+ ])
+}
+
+# pylint: enable=line-too-long
+
+_ONE_MB = 1024 * 1024
+_TEST_SYMBOL_DATA = {
+ # Regular symbols
+ 0: 'mock_sym_for_addr_0 [mock_src/libmock1.so.c:0]',
+ 0x1000: 'mock_sym_for_addr_4096 [mock_src/libmock1.so.c:4096]',
+
+ # Symbols without source file path.
+ _ONE_MB: 'mock_sym_for_addr_1048576 [??:0]',
+ _ONE_MB + 0x8234: 'mock_sym_for_addr_1081908 [??:0]',
+
+ # Unknown symbol.
+ 2 * _ONE_MB: '?? [??:0]',
+
+ # Inlined symbol.
+ 3 * _ONE_MB:
+ 'mock_sym_for_addr_3145728_inner [mock_src/libmock1.so.c:3145728]',
+}
+
+@contextlib.contextmanager
+def _TempDir():
+ dirname = tempfile.mkdtemp()
+ try:
+ yield dirname
+ finally:
+ shutil.rmtree(dirname)
+
+
+def _TouchFile(path):
+ # Create parent directories.
+ try:
+ os.makedirs(os.path.dirname(path))
+ except OSError:
+ pass
+ with open(path, 'a'):
+ os.utime(path, None)
+
+class MockApkTranslator(object):
+ """A mock ApkLibraryPathTranslator object used for testing."""
+
+ # Regex that matches the content of APK native library map files generated
+ # with apk_lib_dump.py.
+ _RE_MAP_FILE = re.compile(
+ r'0x(?P<file_start>[0-9a-f]+)\s+' +
+ r'0x(?P<file_end>[0-9a-f]+)\s+' +
+ r'0x(?P<file_size>[0-9a-f]+)\s+' +
+ r'0x(?P<lib_path>[0-9a-f]+)\s+')
+
+ def __init__(self, test_apk_libs=None):
+ """Initialize instance.
+
+ Args:
+ test_apk_libs: Optional list of (file_start, file_end, size, lib_path)
+ tuples, like _TEST_APK_LIBS for example. This will be used to
+ implement TranslatePath().
+ """
+ self._apk_libs = []
+ if test_apk_libs:
+ self._AddLibEntries(test_apk_libs)
+
+ def _AddLibEntries(self, entries):
+ self._apk_libs = sorted(self._apk_libs + entries,
+ lambda x, y: cmp(x[0], y[0]))
+
+ def ReadMapFile(self, file_path):
+ """Read an .apk.native-libs file that was produced with apk_lib_dump.py.
+
+ Args:
+ file_path: input path to .apk.native-libs file. Its format is
+ essentially: 0x<start> 0x<end> 0x<size> <library-path>
+ """
+ new_libs = []
+ with open(file_path) as f:
+ for line in f.readlines():
+ m = MockApkTranslator._RE_MAP_FILE.match(line)
+ if m:
+ file_start = int(m.group('file_start'), 16)
+ file_end = int(m.group('file_end'), 16)
+ file_size = int(m.group('file_size'), 16)
+ lib_path = m.group('lib_path')
+ # Sanity check
+ if file_start + file_size != file_end:
+ logging.warning('%s: Inconsistent (start, end, size) values '
+ '(0x%x, 0x%x, 0x%x)',
+ file_path, file_start, file_end, file_size)
+ else:
+ new_libs.append((file_start, file_end, file_size, lib_path))
+
+ self._AddLibEntries(new_libs)
+
+ def TranslatePath(self, lib_path, lib_offset):
+ """Translate an APK file path + offset into a library path + offset."""
+ min_pos = 0
+ max_pos = len(self._apk_libs)
+ while min_pos < max_pos:
+ mid_pos = (min_pos + max_pos) / 2
+ mid_entry = self._apk_libs[mid_pos]
+ mid_offset = mid_entry[0]
+ mid_size = mid_entry[2]
+ if lib_offset < mid_offset:
+ max_pos = mid_pos
+ elif lib_offset >= mid_offset + mid_size:
+ min_pos = mid_pos + 1
+ else:
+ # Found it
+ new_path = '%s!lib/%s' % (lib_path, mid_entry[3])
+ new_offset = lib_offset - mid_offset
+ return (new_path, new_offset)
+
+ return lib_path, lib_offset
+
+
+class HostLibraryFinderTest(unittest.TestCase):
+
+ def testEmpty(self):
+ finder = symbol_utils.HostLibraryFinder()
+ self.assertIsNone(finder.Find('/data/data/com.example.app-1/lib/libfoo.so'))
+ self.assertIsNone(
+ finder.Find('/data/data/com.example.app-1/base.apk!lib/libfoo.so'))
+
+
+ def testSimpleDirectory(self):
+ finder = symbol_utils.HostLibraryFinder()
+ with _TempDir() as tmp_dir:
+ host_libfoo_path = os.path.join(tmp_dir, 'libfoo.so')
+ host_libbar_path = os.path.join(tmp_dir, 'libbar.so')
+ _TouchFile(host_libfoo_path)
+ _TouchFile(host_libbar_path)
+
+ finder.AddSearchDir(tmp_dir)
+
+ # Regular library path (extracted at installation by the PackageManager).
+ # Note that the extraction path has changed between Android releases,
+ # i.e. it can be /data/app/, /data/data/ or /data/app-lib/ depending
+ # on the system.
+ self.assertEqual(
+ host_libfoo_path,
+ finder.Find('/data/app-lib/com.example.app-1/lib/libfoo.so'))
+
+ # Verify that the path doesn't really matter
+ self.assertEqual(
+ host_libfoo_path,
+ finder.Find('/whatever/what.apk!lib/libfoo.so'))
+
+ self.assertEqual(
+ host_libbar_path,
+ finder.Find('/data/data/com.example.app-1/lib/libbar.so'))
+
+ self.assertIsNone(
+ finder.Find('/data/data/com.example.app-1/lib/libunknown.so'))
+
+
+ def testMultipleDirectories(self):
+ with _TempDir() as tmp_dir:
+ # Create the following files:
+ # <tmp_dir>/aaa/
+ # libfoo.so
+ # <tmp_dir>/bbb/
+ # libbar.so
+ # libfoo.so (this one should never be seen because 'aaa'
+ # shall be first in the search path list).
+ #
+ aaa_dir = os.path.join(tmp_dir, 'aaa')
+ bbb_dir = os.path.join(tmp_dir, 'bbb')
+ os.makedirs(aaa_dir)
+ os.makedirs(bbb_dir)
+
+ host_libfoo_path = os.path.join(aaa_dir, 'libfoo.so')
+ host_libbar_path = os.path.join(bbb_dir, 'libbar.so')
+ host_libfoo2_path = os.path.join(bbb_dir, 'libfoo.so')
+
+ _TouchFile(host_libfoo_path)
+ _TouchFile(host_libbar_path)
+ _TouchFile(host_libfoo2_path)
+
+ finder = symbol_utils.HostLibraryFinder()
+ finder.AddSearchDir(aaa_dir)
+ finder.AddSearchDir(bbb_dir)
+
+ self.assertEqual(
+ host_libfoo_path,
+ finder.Find('/data/data/com.example.app-1/lib/libfoo.so'))
+
+ self.assertEqual(
+ host_libfoo_path,
+ finder.Find('/data/whatever/base.apk!lib/libfoo.so'))
+
+ self.assertEqual(
+ host_libbar_path,
+ finder.Find('/data/data/com.example.app-1/lib/libbar.so'))
+
+ self.assertIsNone(
+ finder.Find('/data/data/com.example.app-1/lib/libunknown.so'))
+
+
+class ElfSymbolResolverTest(unittest.TestCase):
+
+ def testCreation(self):
+ resolver = symbol_utils.ElfSymbolResolver(
+ addr2line_path_for_tests=_MOCK_A2L_PATH)
+ self.assertTrue(resolver)
+
+ def testWithSimpleOffsets(self):
+ resolver = symbol_utils.ElfSymbolResolver(
+ addr2line_path_for_tests=_MOCK_A2L_PATH)
+ resolver.SetAndroidAbi('ignored-abi')
+
+ for addr, expected_sym in _TEST_SYMBOL_DATA.iteritems():
+ self.assertEqual(resolver.FindSymbolInfo('/some/path/libmock1.so', addr),
+ expected_sym)
+
+ def testWithPreResolvedSymbols(self):
+ resolver = symbol_utils.ElfSymbolResolver(
+ addr2line_path_for_tests=_MOCK_A2L_PATH)
+ resolver.SetAndroidAbi('ignored-abi')
+ resolver.AddLibraryOffsets('/some/path/libmock1.so',
+ _TEST_SYMBOL_DATA.keys())
+
+ resolver.DisallowSymbolizerForTesting()
+
+ for addr, expected_sym in _TEST_SYMBOL_DATA.iteritems():
+ sym_info = resolver.FindSymbolInfo('/some/path/libmock1.so', addr)
+ self.assertIsNotNone(sym_info, 'None symbol info for addr %x' % addr)
+ self.assertEqual(
+ sym_info, expected_sym,
+ 'Invalid symbol info for addr %x [%s] expected [%s]' % (
+ addr, sym_info, expected_sym))
+
+
+class MemoryMapTest(unittest.TestCase):
+
+ def testCreation(self):
+ mem_map = symbol_utils.MemoryMap('test-abi32')
+ self.assertIsNone(mem_map.FindSectionForAddress(0))
+
+ def testParseLines(self):
+ mem_map = symbol_utils.MemoryMap('test-abi32')
+ mem_map.ParseLines(_TEST_MEMORY_MAP.splitlines())
+ for exp_addr, exp_size, exp_path, exp_offset in _TEST_MEMORY_MAP_SECTIONS:
+ text = '(addr:%x, size:%x, path:%s, offset=%x)' % (
+ exp_addr, exp_size, exp_path, exp_offset)
+
+ t = mem_map.FindSectionForAddress(exp_addr)
+ self.assertTrue(t, 'Could not find %s' % text)
+ self.assertEqual(t.address, exp_addr)
+ self.assertEqual(t.size, exp_size)
+ self.assertEqual(t.offset, exp_offset)
+ self.assertEqual(t.path, exp_path)
+
+ def testTranslateLine(self):
+ android_abi = 'test-abi'
+ apk_translator = MockApkTranslator(_TEST_APK_LIBS)
+ mem_map = symbol_utils.MemoryMap(android_abi)
+ for line, expected_line in zip(_TEST_MEMORY_MAP.splitlines(),
+ _EXPECTED_TEST_MEMORY_MAP.splitlines()):
+ self.assertEqual(mem_map.TranslateLine(line, apk_translator),
+ expected_line)
+
+class StackTranslatorTest(unittest.TestCase):
+
+ def testSimpleStack(self):
+ android_abi = 'test-abi32'
+ mem_map = symbol_utils.MemoryMap(android_abi)
+ mem_map.ParseLines(_TEST_MEMORY_MAP)
+ apk_translator = MockApkTranslator(_TEST_APK_LIBS)
+ stack_translator = symbol_utils.StackTranslator(android_abi, mem_map,
+ apk_translator)
+ input_stack = _TEST_STACK.splitlines()
+ expected_stack = _EXPECTED_STACK.splitlines()
+ self.assertEqual(len(input_stack), len(expected_stack))
+ for stack_line, expected_line in zip(input_stack, expected_stack):
+ new_line = stack_translator.TranslateLine(stack_line)
+ self.assertEqual(new_line, expected_line)
+
+
+class MockSymbolResolver(symbol_utils.SymbolResolver):
+
+ # A regex matching a symbol definition as it appears in a test symbol file.
+ # Format is: <hex-offset> <whitespace> <symbol-string>
+ _RE_SYMBOL_DEFINITION = re.compile(
+ r'(?P<offset>[0-9a-f]+)\s+(?P<symbol>.*)')
+
+ def __init__(self):
+ super(MockSymbolResolver, self).__init__()
+ self._map = collections.defaultdict(dict)
+
+ def AddTestLibrarySymbols(self, lib_name, offsets_map):
+ """Add a new test entry for a given library name.
+
+ Args:
+ lib_name: Library name (e.g. 'libfoo.so')
+ offsets_map: A mapping from offsets to symbol info strings.
+ """
+ self._map[lib_name] = offsets_map
+
+ def ReadTestFile(self, file_path, lib_name):
+ """Read a single test symbol file, matching a given library.
+
+ Args:
+ file_path: Input file path.
+ lib_name: Library name these symbols correspond to (e.g. 'libfoo.so')
+ """
+ with open(file_path) as f:
+ for line in f.readlines():
+ line = line.rstrip()
+ m = MockSymbolResolver._RE_SYMBOL_DEFINITION.match(line)
+ if m:
+ offset = int(m.group('offset'))
+ symbol = m.group('symbol')
+ self._map[lib_name][offset] = symbol
+
+ def ReadTestFilesInDir(self, dir_path, file_suffix):
+ """Read all symbol test files in a given directory.
+
+ Args:
+ dir_path: Directory path.
+ file_suffix: File suffix used to detect test symbol files.
+ """
+ for filename in os.listdir(dir_path):
+ if filename.endswith(file_suffix):
+ lib_name = filename[:-len(file_suffix)]
+ self.ReadTestFile(os.path.join(dir_path, filename), lib_name)
+
+ def FindSymbolInfo(self, device_path, device_offset):
+ """Implement SymbolResolver.FindSymbolInfo."""
+ lib_name = os.path.basename(device_path)
+ offsets = self._map.get(lib_name)
+ if not offsets:
+ return None
+
+ return offsets.get(device_offset)
+
+
+class BacktraceTranslatorTest(unittest.TestCase):
+
+ def testEmpty(self):
+ android_abi = 'test-abi'
+ apk_translator = MockApkTranslator()
+ backtrace_translator = symbol_utils.BacktraceTranslator(android_abi,
+ apk_translator)
+ self.assertTrue(backtrace_translator)
+
+ def testFindLibraryOffsets(self):
+ android_abi = 'test-abi'
+ apk_translator = MockApkTranslator(_TEST_APK_LIBS)
+ backtrace_translator = symbol_utils.BacktraceTranslator(android_abi,
+ apk_translator)
+ input_backtrace = _EXPECTED_BACKTRACE.splitlines()
+ expected_lib_offsets_map = _EXPECTED_BACKTRACE_OFFSETS_MAP
+ offset_map = backtrace_translator.FindLibraryOffsets(input_backtrace)
+ for lib_path, offsets in offset_map.iteritems():
+ self.assertTrue(lib_path in expected_lib_offsets_map,
+ '%s is not in expected library-offsets map!' % lib_path)
+ sorted_offsets = sorted(offsets)
+ sorted_expected_offsets = sorted(expected_lib_offsets_map[lib_path])
+ self.assertEqual(sorted_offsets, sorted_expected_offsets,
+ '%s has invalid offsets %s expected %s' % (
+ lib_path, sorted_offsets, sorted_expected_offsets))
+
+ def testTranslateLine(self):
+ android_abi = 'test-abi'
+ apk_translator = MockApkTranslator(_TEST_APK_LIBS)
+ backtrace_translator = symbol_utils.BacktraceTranslator(android_abi,
+ apk_translator)
+ input_backtrace = _TEST_BACKTRACE.splitlines()
+ expected_backtrace = _EXPECTED_BACKTRACE.splitlines()
+ self.assertEqual(len(input_backtrace), len(expected_backtrace))
+ for trace_line, expected_line in zip(input_backtrace, expected_backtrace):
+ line = backtrace_translator.TranslateLine(trace_line,
+ MockSymbolResolver())
+ self.assertEqual(line, expected_line)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/utils/__init__.py b/deps/v8/build/android/pylib/utils/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/__init__.py
diff --git a/deps/v8/build/android/pylib/utils/app_bundle_utils.py b/deps/v8/build/android/pylib/utils/app_bundle_utils.py
new file mode 100644
index 0000000000..2098f4f35d
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/app_bundle_utils.py
@@ -0,0 +1,140 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import logging
+import os
+import re
+import sys
+import tempfile
+import zipfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'gyp'))
+
+from util import build_utils
+from util import md5_check
+from util import resource_utils
+import bundletool
+
+# List of valid modes for GenerateBundleApks()
+BUILD_APKS_MODES = ('default', 'universal', 'system', 'system_compressed')
+_SYSTEM_MODES = ('system_compressed', 'system')
+
+_ALL_ABIS = ['armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64']
+
+
+def _CreateDeviceSpec(bundle_path, sdk_version, locales):
+ # Could also use "bundletool dump resources", but reading directly is faster.
+ if not sdk_version:
+ with zipfile.ZipFile(bundle_path) as f:
+ manifest_data = f.read('base/manifest/AndroidManifest.xml')
+ sdk_version = int(
+ re.search(r'minSdkVersion.*?(\d+)', manifest_data).group(1))
+
+ # Setting sdkVersion=minSdkVersion prevents multiple per-minSdkVersion .apk
+ # files from being created within the .apks file.
+ return {
+ 'screenDensity': 1000, # Ignored since we don't split on density.
+ 'sdkVersion': sdk_version,
+ 'supportedAbis': _ALL_ABIS, # Our .aab files are already split on abi.
+ 'supportedLocales': locales,
+ }
+
+
+def GenerateBundleApks(bundle_path,
+ bundle_apks_path,
+ aapt2_path,
+ keystore_path,
+ keystore_password,
+ keystore_alias,
+ mode=None,
+ minimal=False,
+ minimal_sdk_version=None,
+ check_for_noop=True,
+ system_image_locales=None):
+ """Generate an .apks archive from a an app bundle if needed.
+
+ Args:
+ bundle_path: Input bundle file path.
+ bundle_apks_path: Output bundle .apks archive path. Name must end with
+ '.apks' or this operation will fail.
+ aapt2_path: Path to aapt2 build tool.
+ keystore_path: Path to keystore.
+ keystore_password: Keystore password, as a string.
+ keystore_alias: Keystore signing key alias.
+ mode: Build mode, which must be either None or one of BUILD_APKS_MODES.
+ minimal: Create the minimal set of apks possible (english-only).
+ minimal_sdk_version: Use this sdkVersion when |minimal| or
+ |system_image_locales| args are present.
+ check_for_noop: Use md5_check to short-circuit when inputs have not changed.
+ system_image_locales: Locales to package in the APK when mode is "system"
+ or "system_compressed".
+ """
+ device_spec = None
+ if minimal:
+ # Measure with one language split installed. Use Hindi because it is
+ # popular. resource_size.py looks for splits/base-hi.apk.
+ # Note: English is always included since it's in base-master.apk.
+ device_spec = _CreateDeviceSpec(bundle_path, minimal_sdk_version, ['hi'])
+ elif mode in _SYSTEM_MODES:
+ if not system_image_locales:
+ raise Exception('system modes require system_image_locales')
+ # Bundletool doesn't seem to understand device specs with locales in the
+ # form of "<lang>-r<region>", so just provide the language code instead.
+ locales = [
+ resource_utils.ToAndroidLocaleName(l).split('-')[0]
+ for l in system_image_locales
+ ]
+ device_spec = _CreateDeviceSpec(bundle_path, minimal_sdk_version, locales)
+
+ def rebuild():
+ logging.info('Building %s', bundle_apks_path)
+ with tempfile.NamedTemporaryFile(suffix='.json') as spec_file, \
+ build_utils.AtomicOutput(bundle_apks_path, only_if_changed=False) as f:
+ cmd_args = [
+ 'build-apks',
+ '--aapt2=%s' % aapt2_path,
+ '--output=%s' % f.name,
+ '--bundle=%s' % bundle_path,
+ '--ks=%s' % keystore_path,
+ '--ks-pass=pass:%s' % keystore_password,
+ '--ks-key-alias=%s' % keystore_alias,
+ '--overwrite',
+ ]
+ if device_spec:
+ json.dump(device_spec, spec_file)
+ spec_file.flush()
+ cmd_args += ['--device-spec=' + spec_file.name]
+ if mode is not None:
+ if mode not in BUILD_APKS_MODES:
+ raise Exception('Invalid mode parameter %s (should be in %s)' %
+ (mode, BUILD_APKS_MODES))
+ cmd_args += ['--mode=' + mode]
+ bundletool.RunBundleTool(cmd_args)
+
+ if check_for_noop:
+ # NOTE: BUNDLETOOL_JAR_PATH is added to input_strings, rather than
+ # input_paths, to speed up MD5 computations by about 400ms (the .jar file
+ # contains thousands of class files which are checked independently,
+ # resulting in an .md5.stamp of more than 60000 lines!).
+ input_paths = [bundle_path, aapt2_path, keystore_path]
+ input_strings = [
+ keystore_password,
+ keystore_alias,
+ bundletool.BUNDLETOOL_JAR_PATH,
+ # NOTE: BUNDLETOOL_VERSION is already part of BUNDLETOOL_JAR_PATH, but
+ # it's simpler to assume that this may not be the case in the future.
+ bundletool.BUNDLETOOL_VERSION,
+ device_spec,
+ ]
+ if mode is not None:
+ input_strings.append(mode)
+
+ md5_check.CallAndRecordIfStale(
+ rebuild,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=[bundle_apks_path])
+ else:
+ rebuild()
diff --git a/deps/v8/build/android/pylib/utils/argparse_utils.py b/deps/v8/build/android/pylib/utils/argparse_utils.py
new file mode 100644
index 0000000000..e456d9ddab
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/argparse_utils.py
@@ -0,0 +1,50 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+
+
+class CustomHelpAction(argparse.Action):
+ '''Allows defining custom help actions.
+
+ Help actions can run even when the parser would otherwise fail on missing
+ arguments. The first help or custom help command mentioned on the command
+ line will have its help text displayed.
+
+ Usage:
+ parser = argparse.ArgumentParser(...)
+ CustomHelpAction.EnableFor(parser)
+ parser.add_argument('--foo-help',
+ action='custom_help',
+ custom_help_text='this is the help message',
+ help='What this helps with')
+ '''
+ # Derived from argparse._HelpAction from
+ # https://github.com/python/cpython/blob/master/Lib/argparse.py
+
+ # pylint: disable=redefined-builtin
+ # (complains about 'help' being redefined)
+ def __init__(self,
+ option_strings,
+ dest=argparse.SUPPRESS,
+ default=argparse.SUPPRESS,
+ custom_help_text=None,
+ help=None):
+ super(CustomHelpAction, self).__init__(option_strings=option_strings,
+ dest=dest,
+ default=default,
+ nargs=0,
+ help=help)
+
+ if not custom_help_text:
+ raise ValueError('custom_help_text is required')
+ self._help_text = custom_help_text
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ print self._help_text
+ parser.exit()
+
+ @staticmethod
+ def EnableFor(parser):
+ parser.register('action', 'custom_help', CustomHelpAction)
diff --git a/deps/v8/build/android/pylib/utils/decorators.py b/deps/v8/build/android/pylib/utils/decorators.py
new file mode 100644
index 0000000000..8eec1d1e58
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/decorators.py
@@ -0,0 +1,37 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import functools
+import logging
+
+
+def Memoize(f):
+ """Decorator to cache return values of function."""
+ memoize_dict = {}
+ @functools.wraps(f)
+ def wrapper(*args, **kwargs):
+ key = repr((args, kwargs))
+ if key not in memoize_dict:
+ memoize_dict[key] = f(*args, **kwargs)
+ return memoize_dict[key]
+ return wrapper
+
+
+def NoRaiseException(default_return_value=None, exception_message=''):
+ """Returns decorator that catches and logs uncaught Exceptions.
+
+ Args:
+ default_return_value: Value to return in the case of uncaught Exception.
+ exception_message: Message for uncaught exceptions.
+ """
+ def decorator(f):
+ @functools.wraps(f)
+ def wrapper(*args, **kwargs):
+ try:
+ return f(*args, **kwargs)
+ except Exception: # pylint: disable=broad-except
+ logging.exception(exception_message)
+ return default_return_value
+ return wrapper
+ return decorator
diff --git a/deps/v8/build/android/pylib/utils/decorators_test.py b/deps/v8/build/android/pylib/utils/decorators_test.py
new file mode 100755
index 0000000000..60f4811b4f
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/decorators_test.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for decorators.py."""
+
+import unittest
+
+from pylib.utils import decorators
+
+
+class NoRaiseExceptionDecoratorTest(unittest.TestCase):
+
+ def testFunctionDoesNotRaiseException(self):
+ """Tests that the |NoRaiseException| decorator catches exception."""
+
+ @decorators.NoRaiseException()
+ def raiseException():
+ raise Exception()
+
+ try:
+ raiseException()
+ except Exception: # pylint: disable=broad-except
+ self.fail('Exception was not caught by |NoRaiseException| decorator')
+
+ def testFunctionReturnsCorrectValues(self):
+ """Tests that the |NoRaiseException| decorator returns correct values."""
+
+ @decorators.NoRaiseException(default_return_value=111)
+ def raiseException():
+ raise Exception()
+
+ @decorators.NoRaiseException(default_return_value=111)
+ def doesNotRaiseException():
+ return 999
+
+ self.assertEquals(raiseException(), 111)
+ self.assertEquals(doesNotRaiseException(), 999)
+
+
+class MemoizeDecoratorTest(unittest.TestCase):
+
+ def testFunctionExceptionNotMemoized(self):
+ """Tests that |Memoize| decorator does not cache exception results."""
+
+ class ExceptionType1(Exception):
+ pass
+
+ class ExceptionType2(Exception):
+ pass
+
+ @decorators.Memoize
+ def raiseExceptions():
+ if raiseExceptions.count == 0:
+ raiseExceptions.count += 1
+ raise ExceptionType1()
+
+ if raiseExceptions.count == 1:
+ raise ExceptionType2()
+ raiseExceptions.count = 0
+
+ with self.assertRaises(ExceptionType1):
+ raiseExceptions()
+ with self.assertRaises(ExceptionType2):
+ raiseExceptions()
+
+ def testFunctionResultMemoized(self):
+ """Tests that |Memoize| decorator caches results."""
+
+ @decorators.Memoize
+ def memoized():
+ memoized.count += 1
+ return memoized.count
+ memoized.count = 0
+
+ def notMemoized():
+ notMemoized.count += 1
+ return notMemoized.count
+ notMemoized.count = 0
+
+ self.assertEquals(memoized(), 1)
+ self.assertEquals(memoized(), 1)
+ self.assertEquals(memoized(), 1)
+
+ self.assertEquals(notMemoized(), 1)
+ self.assertEquals(notMemoized(), 2)
+ self.assertEquals(notMemoized(), 3)
+
+ def testFunctionMemoizedBasedOnArgs(self):
+ """Tests that |Memoize| caches results based on args and kwargs."""
+
+ @decorators.Memoize
+ def returnValueBasedOnArgsKwargs(a, k=0):
+ return a + k
+
+ self.assertEquals(returnValueBasedOnArgsKwargs(1, 1), 2)
+ self.assertEquals(returnValueBasedOnArgsKwargs(1, 2), 3)
+ self.assertEquals(returnValueBasedOnArgsKwargs(2, 1), 3)
+ self.assertEquals(returnValueBasedOnArgsKwargs(3, 3), 6)
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/deps/v8/build/android/pylib/utils/device_dependencies.py b/deps/v8/build/android/pylib/utils/device_dependencies.py
new file mode 100644
index 0000000000..bccc1c37a6
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/device_dependencies.py
@@ -0,0 +1,117 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+
+from pylib import constants
+
+
+_BLACKLIST = [
+ re.compile(r'.*OWNERS'), # Should never be included.
+ re.compile(r'.*\.crx'), # Chrome extension zip files.
+ re.compile(r'.*\.so'), # Libraries packed into .apk.
+ re.compile(r'.*Mojo.*manifest\.json'), # Some source_set()s pull these in.
+ re.compile(r'.*\.py'), # Some test_support targets include python deps.
+ re.compile(r'.*\.stamp'), # Stamp files should never be included.
+ re.compile(r'.*\.apk'), # Should be installed separately.
+ re.compile(r'.*lib.java/.*'), # Never need java intermediates.
+
+ # Chrome external extensions config file.
+ re.compile(r'.*external_extensions\.json'),
+
+ # Exists just to test the compile, not to be run.
+ re.compile(r'.*jni_generator_tests'),
+
+ # v8's blobs and icu data get packaged into APKs.
+ re.compile(r'.*natives_blob.*\.bin'),
+ re.compile(r'.*snapshot_blob.*\.bin'),
+ re.compile(r'.*icudtl.bin'),
+
+ # Scripts that are needed by swarming, but not on devices:
+ re.compile(r'.*llvm-symbolizer'),
+ re.compile(r'.*md5sum_bin'),
+ re.compile(os.path.join('.*', 'development', 'scripts', 'stack')),
+]
+
+
+def _FilterDataDeps(abs_host_files):
+ blacklist = _BLACKLIST + [
+ re.compile(os.path.join(constants.GetOutDirectory(), 'bin'))]
+ return [p for p in abs_host_files
+ if not any(r.match(p) for r in blacklist)]
+
+
+def DevicePathComponentsFor(host_path, output_directory):
+ """Returns the device path components for a given host path.
+
+ This returns the device path as a list of joinable path components,
+ with None as the first element to indicate that the path should be
+ rooted at $EXTERNAL_STORAGE.
+
+ e.g., given
+
+ '$CHROMIUM_SRC/foo/bar/baz.txt'
+
+ this would return
+
+ [None, 'foo', 'bar', 'baz.txt']
+
+ This handles a couple classes of paths differently than it otherwise would:
+ - All .pak files get mapped to top-level paks/
+ - Anything in the output directory gets mapped relative to the output
+ directory rather than the source directory.
+
+ e.g. given
+
+ '$CHROMIUM_SRC/out/Release/icu_fake_dir/icudtl.dat'
+
+ this would return
+
+ [None, 'icu_fake_dir', 'icudtl.dat']
+
+ Args:
+ host_path: The absolute path to the host file.
+ Returns:
+ A list of device path components.
+ """
+ if host_path.startswith(output_directory):
+ if os.path.splitext(host_path)[1] == '.pak':
+ return [None, 'paks', os.path.basename(host_path)]
+ rel_host_path = os.path.relpath(host_path, output_directory)
+ else:
+ rel_host_path = os.path.relpath(host_path, constants.DIR_SOURCE_ROOT)
+
+ device_path_components = [None]
+ p = rel_host_path
+ while p:
+ p, d = os.path.split(p)
+ if d:
+ device_path_components.insert(1, d)
+ return device_path_components
+
+
+def GetDataDependencies(runtime_deps_path):
+ """Returns a list of device data dependencies.
+
+ Args:
+ runtime_deps_path: A str path to the .runtime_deps file.
+ Returns:
+ A list of (host_path, device_path) tuples.
+ """
+ if not runtime_deps_path:
+ return []
+
+ with open(runtime_deps_path, 'r') as runtime_deps_file:
+ rel_host_files = [l.strip() for l in runtime_deps_file if l]
+
+ output_directory = constants.GetOutDirectory()
+ abs_host_files = [
+ os.path.abspath(os.path.join(output_directory, r))
+ for r in rel_host_files]
+ filtered_abs_host_files = _FilterDataDeps(abs_host_files)
+ # TODO(crbug.com/752610): Filter out host executables, and investigate
+ # whether other files could be filtered as well.
+ return [(f, DevicePathComponentsFor(f, output_directory))
+ for f in filtered_abs_host_files]
diff --git a/deps/v8/build/android/pylib/utils/device_dependencies_test.py b/deps/v8/build/android/pylib/utils/device_dependencies_test.py
new file mode 100755
index 0000000000..aaa9ebf68a
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/device_dependencies_test.py
@@ -0,0 +1,56 @@
+#! /usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import unittest
+
+from pylib import constants
+from pylib.utils import device_dependencies
+
+
+class DevicePathComponentsForTest(unittest.TestCase):
+
+ def testCheckedInFile(self):
+ test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'foo', 'bar', 'baz.txt')
+ output_directory = os.path.join(
+ constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+ self.assertEquals(
+ [None, 'foo', 'bar', 'baz.txt'],
+ device_dependencies.DevicePathComponentsFor(
+ test_path, output_directory))
+
+ def testOutputDirectoryFile(self):
+ test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
+ 'icudtl.dat')
+ output_directory = os.path.join(
+ constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+ self.assertEquals(
+ [None, 'icudtl.dat'],
+ device_dependencies.DevicePathComponentsFor(
+ test_path, output_directory))
+
+ def testOutputDirectorySubdirFile(self):
+ test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
+ 'test_dir', 'icudtl.dat')
+ output_directory = os.path.join(
+ constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+ self.assertEquals(
+ [None, 'test_dir', 'icudtl.dat'],
+ device_dependencies.DevicePathComponentsFor(
+ test_path, output_directory))
+
+ def testOutputDirectoryPakFile(self):
+ test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
+ 'foo.pak')
+ output_directory = os.path.join(
+ constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+ self.assertEquals(
+ [None, 'paks', 'foo.pak'],
+ device_dependencies.DevicePathComponentsFor(
+ test_path, output_directory))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/utils/dexdump.py b/deps/v8/build/android/pylib/utils/dexdump.py
new file mode 100644
index 0000000000..2bb11ce198
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/dexdump.py
@@ -0,0 +1,115 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import shutil
+import tempfile
+from xml.etree import ElementTree
+
+from devil.utils import cmd_helper
+from pylib import constants
+
+DEXDUMP_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'dexdump')
+
+
+def Dump(apk_path):
+ """Dumps class and method information from a APK into a dict via dexdump.
+
+ Args:
+ apk_path: An absolute path to an APK file to dump.
+ Returns:
+ A dict in the following format:
+ {
+ <package_name>: {
+ 'classes': {
+ <class_name>: {
+ 'methods': [<method_1>, <method_2>]
+ }
+ }
+ }
+ }
+ """
+ # TODO(mikecase): Support multi-dex
+ try:
+ dexfile_dir = tempfile.mkdtemp()
+ # Python zipfile module is unable to unzip APKs.
+ cmd_helper.RunCmd(['unzip', apk_path, 'classes.dex'], cwd=dexfile_dir)
+ dexfile = os.path.join(dexfile_dir, 'classes.dex')
+ output_xml = cmd_helper.GetCmdOutput([DEXDUMP_PATH, '-l', 'xml', dexfile])
+ return _ParseRootNode(ElementTree.fromstring(output_xml))
+ finally:
+ shutil.rmtree(dexfile_dir)
+
+
+def _ParseRootNode(root):
+ """Parses the XML output of dexdump. This output is in the following format.
+
+ This is a subset of the information contained within dexdump output.
+
+ <api>
+ <package name="foo.bar">
+ <class name="Class" extends="foo.bar.SuperClass">
+ <field name="Field">
+ </field>
+ <constructor name="Method">
+ <parameter name="Param" type="int">
+ </parameter>
+ </constructor>
+ <method name="Method">
+ <parameter name="Param" type="int">
+ </parameter>
+ </method>
+ </class>
+ </package>
+ </api>
+ """
+ results = {}
+ for child in root:
+ if child.tag == 'package':
+ package_name = child.attrib['name']
+ parsed_node = _ParsePackageNode(child)
+ if package_name in results:
+ results[package_name]['classes'].update(parsed_node['classes'])
+ else:
+ results[package_name] = parsed_node
+ return results
+
+
+def _ParsePackageNode(package_node):
+ """Parses a <package> node from the dexdump xml output.
+
+ Returns:
+ A dict in the format:
+ {
+ 'classes': {
+ <class_1>: {
+ 'methods': [<method_1>, <method_2>]
+ },
+ <class_2>: {
+ 'methods': [<method_1>, <method_2>]
+ },
+ }
+ }
+ """
+ classes = {}
+ for child in package_node:
+ if child.tag == 'class':
+ classes[child.attrib['name']] = _ParseClassNode(child)
+ return {'classes': classes}
+
+
+def _ParseClassNode(class_node):
+ """Parses a <class> node from the dexdump xml output.
+
+ Returns:
+ A dict in the format:
+ {
+ 'methods': [<method_1>, <method_2>]
+ }
+ """
+ methods = []
+ for child in class_node:
+ if child.tag == 'method':
+ methods.append(child.attrib['name'])
+ return {'methods': methods, 'superclass': class_node.attrib['extends']}
diff --git a/deps/v8/build/android/pylib/utils/dexdump_test.py b/deps/v8/build/android/pylib/utils/dexdump_test.py
new file mode 100755
index 0000000000..6b2c4542f2
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/dexdump_test.py
@@ -0,0 +1,141 @@
+#! /usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+from xml.etree import ElementTree
+
+from pylib.utils import dexdump
+
+# pylint: disable=protected-access
+
+
+class DexdumpXMLParseTest(unittest.TestCase):
+
+ def testParseRootXmlNode(self):
+ example_xml_string = (
+ '<api>'
+ '<package name="com.foo.bar1">'
+ '<class'
+ ' name="Class1"'
+ ' extends="java.lang.Object"'
+ ' abstract="false"'
+ ' static="false"'
+ ' final="true"'
+ ' visibility="public">'
+ '<method'
+ ' name="class1Method1"'
+ ' return="java.lang.String"'
+ ' abstract="false"'
+ ' native="false"'
+ ' synchronized="false"'
+ ' static="false"'
+ ' final="false"'
+ ' visibility="public">'
+ '</method>'
+ '<method'
+ ' name="class1Method2"'
+ ' return="viod"'
+ ' abstract="false"'
+ ' native="false"'
+ ' synchronized="false"'
+ ' static="false"'
+ ' final="false"'
+ ' visibility="public">'
+ '</method>'
+ '</class>'
+ '<class'
+ ' name="Class2"'
+ ' extends="java.lang.Object"'
+ ' abstract="false"'
+ ' static="false"'
+ ' final="true"'
+ ' visibility="public">'
+ '<method'
+ ' name="class2Method1"'
+ ' return="java.lang.String"'
+ ' abstract="false"'
+ ' native="false"'
+ ' synchronized="false"'
+ ' static="false"'
+ ' final="false"'
+ ' visibility="public">'
+ '</method>'
+ '</class>'
+ '</package>'
+ '<package name="com.foo.bar2">'
+ '</package>'
+ '<package name="com.foo.bar3">'
+ '</package>'
+ '</api>')
+
+ actual = dexdump._ParseRootNode(
+ ElementTree.fromstring(example_xml_string))
+
+ expected = {
+ 'com.foo.bar1' : {
+ 'classes': {
+ 'Class1': {
+ 'methods': ['class1Method1', 'class1Method2'],
+ 'superclass': 'java.lang.Object',
+ },
+ 'Class2': {
+ 'methods': ['class2Method1'],
+ 'superclass': 'java.lang.Object',
+ }
+ },
+ },
+ 'com.foo.bar2' : {'classes': {}},
+ 'com.foo.bar3' : {'classes': {}},
+ }
+ self.assertEquals(expected, actual)
+
+ def testParsePackageNode(self):
+ example_xml_string = (
+ '<package name="com.foo.bar">'
+ '<class name="Class1" extends="java.lang.Object">'
+ '</class>'
+ '<class name="Class2" extends="java.lang.Object">'
+ '</class>'
+ '</package>')
+
+
+ actual = dexdump._ParsePackageNode(
+ ElementTree.fromstring(example_xml_string))
+
+ expected = {
+ 'classes': {
+ 'Class1': {
+ 'methods': [],
+ 'superclass': 'java.lang.Object',
+ },
+ 'Class2': {
+ 'methods': [],
+ 'superclass': 'java.lang.Object',
+ },
+ },
+ }
+ self.assertEquals(expected, actual)
+
+ def testParseClassNode(self):
+ example_xml_string = (
+ '<class name="Class1" extends="java.lang.Object">'
+ '<method name="method1">'
+ '</method>'
+ '<method name="method2">'
+ '</method>'
+ '</class>')
+
+ actual = dexdump._ParseClassNode(
+ ElementTree.fromstring(example_xml_string))
+
+ expected = {
+ 'methods': ['method1', 'method2'],
+ 'superclass': 'java.lang.Object',
+ }
+ self.assertEquals(expected, actual)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/utils/google_storage_helper.py b/deps/v8/build/android/pylib/utils/google_storage_helper.py
new file mode 100644
index 0000000000..d184810517
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/google_storage_helper.py
@@ -0,0 +1,126 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions to upload data to Google Storage.
+
+Text data should be streamed to logdog using |logdog_helper| module.
+Due to logdog not having image or HTML viewer, those instead should be uploaded
+to Google Storage directly using this module.
+"""
+
+import logging
+import os
+import sys
+import time
+import urlparse
+
+from pylib.constants import host_paths
+from pylib.utils import decorators
+
+if host_paths.DEVIL_PATH not in sys.path:
+ sys.path.append(host_paths.DEVIL_PATH)
+from devil.utils import cmd_helper
+
+_GSUTIL_PATH = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'third_party', 'catapult',
+ 'third_party', 'gsutil', 'gsutil.py')
+_PUBLIC_URL = 'https://storage.googleapis.com/%s/'
+_AUTHENTICATED_URL = 'https://storage.cloud.google.com/%s/'
+
+
+@decorators.NoRaiseException(default_return_value='')
+def upload(name, filepath, bucket, gs_args=None, command_args=None,
+ content_type=None, authenticated_link=True):
+ """Uploads data to Google Storage.
+
+ Args:
+ name: Name of the file on Google Storage.
+ filepath: Path to file you want to upload.
+ bucket: Bucket to upload file to.
+ content_type: Content type to upload as. If not specified, Google storage
+ will attempt to infer content type from file extension.
+ authenticated_link: Whether to return a link that requires user to
+ authenticate with a Google account. Setting this to false will return
+ a link that does not require user to be signed into Google account but
+ will only work for completely public storage buckets.
+ Returns:
+ Web link to item uploaded to Google Storage bucket.
+ """
+ bucket = _format_bucket_name(bucket)
+
+ gs_path = 'gs://%s/%s' % (bucket, name)
+ logging.info('Uploading %s to %s', filepath, gs_path)
+
+ cmd = [_GSUTIL_PATH, '-q']
+ cmd.extend(gs_args or [])
+ if content_type:
+ cmd.extend(['-h', 'Content-Type:%s' % content_type])
+ cmd.extend(['cp'] + (command_args or []) + [filepath, gs_path])
+
+ cmd_helper.RunCmd(cmd)
+
+ return get_url_link(name, bucket, authenticated_link)
+
+
+@decorators.NoRaiseException(default_return_value='')
+def read_from_link(link):
+ # Note that urlparse returns the path with an initial '/', so we only need to
+ # add one more after the 'gs;'
+ gs_path = 'gs:/%s' % urlparse.urlparse(link).path
+ cmd = [_GSUTIL_PATH, '-q', 'cat', gs_path]
+ return cmd_helper.GetCmdOutput(cmd)
+
+
+@decorators.NoRaiseException(default_return_value=False)
+def exists(name, bucket):
+ bucket = _format_bucket_name(bucket)
+ gs_path = 'gs://%s/%s' % (bucket, name)
+
+ cmd = [_GSUTIL_PATH, '-q', 'stat', gs_path]
+ return_code = cmd_helper.RunCmd(cmd)
+ return return_code == 0
+
+
+# TODO(jbudorick): Delete this function. Only one user of it.
+def unique_name(basename, suffix='', timestamp=True, device=None):
+ """Helper function for creating a unique name for a file to store in GS.
+
+ Args:
+ basename: Base of the unique filename.
+ suffix: Suffix of filename.
+ timestamp: Whether or not to add a timestamp to name.
+ device: Device to add device serial of to name.
+ """
+ return '%s%s%s%s' % (
+ basename,
+ '_%s' % time.strftime('%Y_%m_%d_T%H_%M_%S-UTC', time.gmtime())
+ if timestamp else '',
+ '_%s' % device.serial if device else '',
+ suffix)
+
+
+def get_url_link(name, bucket, authenticated_link=True):
+ """Get url link before/without uploading.
+
+ Args:
+ name: Name of the file on Google Storage.
+ bucket: Bucket to upload file to.
+ authenticated_link: Whether to return a link that requires user to
+ authenticate with a Google account. Setting this to false will return
+ a link that does not require user to be signed into Google account but
+ will only work for completely public storage buckets.
+ Returns:
+ Web link to item to be uploaded to Google Storage bucket
+ """
+ bucket = _format_bucket_name(bucket)
+ url_template = _AUTHENTICATED_URL if authenticated_link else _PUBLIC_URL
+ return os.path.join(url_template % bucket, name)
+
+
+def _format_bucket_name(bucket):
+ if bucket.startswith('gs://'):
+ bucket = bucket[len('gs://'):]
+ if bucket.endswith('/'):
+ bucket = bucket[:-1]
+ return bucket
diff --git a/deps/v8/build/android/pylib/utils/instrumentation_tracing.py b/deps/v8/build/android/pylib/utils/instrumentation_tracing.py
new file mode 100644
index 0000000000..f1d03a0dcf
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/instrumentation_tracing.py
@@ -0,0 +1,204 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions to instrument all Python function calls.
+
+This generates a JSON file readable by Chrome's about:tracing. To use it,
+either call start_instrumenting and stop_instrumenting at the appropriate times,
+or use the Instrument context manager.
+
+A function is only traced if it is from a Python module that matches at least
+one regular expression object in to_include, and does not match any in
+to_exclude. In between the start and stop events, every function call of a
+function from such a module will be added to the trace.
+"""
+
+import contextlib
+import functools
+import inspect
+import os
+import re
+import sys
+import threading
+
+from py_trace_event import trace_event
+
+
+# Modules to exclude by default (to avoid problems like infinite loops)
+DEFAULT_EXCLUDE = [r'py_trace_event\..*']
+
+class _TraceArguments(object):
+ def __init__(self):
+ """Wraps a dictionary to ensure safe evaluation of repr()."""
+ self._arguments = {}
+
+ @staticmethod
+ def _safeStringify(item):
+ try:
+ item_str = repr(item)
+ except Exception: # pylint: disable=broad-except
+ try:
+ item_str = str(item)
+ except Exception: # pylint: disable=broad-except
+ item_str = "<ERROR>"
+ return item_str
+
+ def add(self, key, val):
+ key_str = _TraceArguments._safeStringify(key)
+ val_str = _TraceArguments._safeStringify(val)
+
+ self._arguments[key_str] = val_str
+
+ def __repr__(self):
+ return repr(self._arguments)
+
+
+saved_thread_ids = set()
+
+def _shouldTrace(frame, to_include, to_exclude, included, excluded):
+ """
+ Decides whether or not the function called in frame should be traced.
+
+ Args:
+ frame: The Python frame object of this function call.
+ to_include: Set of regex objects for modules which should be traced.
+ to_exclude: Set of regex objects for modules which should not be traced.
+ included: Set of module names we've determined should be traced.
+ excluded: Set of module names we've determined should not be traced.
+ """
+ if not inspect.getmodule(frame):
+ return False
+
+ module_name = inspect.getmodule(frame).__name__
+
+ if module_name in included:
+ includes = True
+ elif to_include:
+ includes = any([pattern.match(module_name) for pattern in to_include])
+ else:
+ includes = True
+
+ if includes:
+ included.add(module_name)
+ else:
+ return False
+
+ # Find the modules of every function in the stack trace.
+ frames = inspect.getouterframes(frame)
+ calling_module_names = [inspect.getmodule(fr[0]).__name__ for fr in frames]
+
+ # Return False for anything with an excluded module's function anywhere in the
+ # stack trace (even if the function itself is in an included module).
+ if to_exclude:
+ for calling_module in calling_module_names:
+ if calling_module in excluded:
+ return False
+ for pattern in to_exclude:
+ if pattern.match(calling_module):
+ excluded.add(calling_module)
+ return False
+
+ return True
+
+def _generate_trace_function(to_include, to_exclude):
+ to_include = {re.compile(item) for item in to_include}
+ to_exclude = {re.compile(item) for item in to_exclude}
+ to_exclude.update({re.compile(item) for item in DEFAULT_EXCLUDE})
+
+ included = set()
+ excluded = set()
+
+ tracing_pid = os.getpid()
+
+ def traceFunction(frame, event, arg):
+ del arg
+
+ # Don't try to trace in subprocesses.
+ if os.getpid() != tracing_pid:
+ sys.settrace(None)
+ return None
+
+ # pylint: disable=unused-argument
+ if event not in ("call", "return"):
+ return None
+
+ function_name = frame.f_code.co_name
+ filename = frame.f_code.co_filename
+ line_number = frame.f_lineno
+
+ if _shouldTrace(frame, to_include, to_exclude, included, excluded):
+ if event == "call":
+ # This function is beginning; we save the thread name (if that hasn't
+ # been done), record the Begin event, and return this function to be
+ # used as the local trace function.
+
+ thread_id = threading.current_thread().ident
+
+ if thread_id not in saved_thread_ids:
+ thread_name = threading.current_thread().name
+
+ trace_event.trace_set_thread_name(thread_name)
+
+ saved_thread_ids.add(thread_id)
+
+ arguments = _TraceArguments()
+ # The function's argument values are stored in the frame's
+ # |co_varnames| as the first |co_argcount| elements. (Following that
+ # are local variables.)
+ for idx in range(frame.f_code.co_argcount):
+ arg_name = frame.f_code.co_varnames[idx]
+ arguments.add(arg_name, frame.f_locals[arg_name])
+ trace_event.trace_begin(function_name, arguments=arguments,
+ module=inspect.getmodule(frame).__name__,
+ filename=filename, line_number=line_number)
+
+ # Return this function, so it gets used as the "local trace function"
+ # within this function's frame (and in particular, gets called for this
+ # function's "return" event).
+ return traceFunction
+
+ if event == "return":
+ trace_event.trace_end(function_name)
+ return None
+
+ return traceFunction
+
+
+def no_tracing(f):
+ @functools.wraps(f)
+ def wrapper(*args, **kwargs):
+ trace_func = sys.gettrace()
+ try:
+ sys.settrace(None)
+ threading.settrace(None)
+ return f(*args, **kwargs)
+ finally:
+ sys.settrace(trace_func)
+ threading.settrace(trace_func)
+ return wrapper
+
+
+def start_instrumenting(output_file, to_include=(), to_exclude=()):
+ """Enable tracing of all function calls (from specified modules)."""
+ trace_event.trace_enable(output_file)
+
+ traceFunc = _generate_trace_function(to_include, to_exclude)
+ sys.settrace(traceFunc)
+ threading.settrace(traceFunc)
+
+
+def stop_instrumenting():
+ trace_event.trace_disable()
+
+ sys.settrace(None)
+ threading.settrace(None)
+
+
+@contextlib.contextmanager
+def Instrument(output_file, to_include=(), to_exclude=()):
+ try:
+ start_instrumenting(output_file, to_include, to_exclude)
+ yield None
+ finally:
+ stop_instrumenting()
diff --git a/deps/v8/build/android/pylib/utils/logdog_helper.py b/deps/v8/build/android/pylib/utils/logdog_helper.py
new file mode 100644
index 0000000000..68a7ba57ab
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/logdog_helper.py
@@ -0,0 +1,94 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions to upload data to logdog."""
+
+import logging
+import os
+import sys
+
+from pylib import constants
+from pylib.utils import decorators
+
+sys.path.insert(0, os.path.abspath(os.path.join(
+ constants.DIR_SOURCE_ROOT, 'tools', 'swarming_client')))
+from libs.logdog import bootstrap # pylint: disable=import-error
+
+
+@decorators.NoRaiseException(default_return_value='',
+ exception_message=('Ignore this exception. '
+ 'crbug.com/675666'))
+def text(name, data, content_type=None):
+ """Uploads text to logdog.
+
+ Args:
+ name: Name of the logdog stream.
+ data: String with data you want to upload.
+ content_type: The optional content type of the stream. If None, a
+ default content type will be chosen.
+
+ Returns:
+ Link to view uploaded text in logdog viewer.
+ """
+ logging.info('Writing text to logdog stream, %s', name)
+ with get_logdog_client().text(name, content_type=content_type) as stream:
+ stream.write(data)
+ return stream.get_viewer_url()
+
+
+@decorators.NoRaiseException(default_return_value=None,
+ exception_message=('Ignore this exception. '
+ 'crbug.com/675666'))
+def open_text(name):
+ """Returns a file like object which you can write to.
+
+ Args:
+ name: Name of the logdog stream.
+
+ Returns:
+ A file like object. close() file when done.
+ """
+ logging.info('Opening text logdog stream, %s', name)
+ return get_logdog_client().open_text(name)
+
+
+@decorators.NoRaiseException(default_return_value='',
+ exception_message=('Ignore this exception. '
+ 'crbug.com/675666'))
+def binary(name, binary_path):
+ """Uploads binary to logdog.
+
+ Args:
+ name: Name of the logdog stream.
+ binary_path: Path to binary you want to upload.
+
+ Returns:
+ Link to view uploaded binary in logdog viewer.
+ """
+ logging.info('Writing binary to logdog stream, %s', name)
+ with get_logdog_client().binary(name) as stream:
+ with open(binary_path, 'r') as f:
+ stream.write(f.read())
+ return stream.get_viewer_url()
+
+
+@decorators.NoRaiseException(default_return_value='',
+ exception_message=('Ignore this exception. '
+ 'crbug.com/675666'))
+def get_viewer_url(name):
+ """Get Logdog viewer URL.
+
+ Args:
+ name: Name of the logdog stream.
+
+ Returns:
+ Link to view uploaded binary in logdog viewer.
+ """
+ return get_logdog_client().get_viewer_url(name)
+
+
+@decorators.Memoize
+def get_logdog_client():
+ logging.info('Getting logdog client.')
+ return bootstrap.ButlerBootstrap.probe().stream_client()
diff --git a/deps/v8/build/android/pylib/utils/logging_utils.py b/deps/v8/build/android/pylib/utils/logging_utils.py
new file mode 100644
index 0000000000..9c4eae3fcb
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/logging_utils.py
@@ -0,0 +1,136 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import logging
+import os
+
+from pylib.constants import host_paths
+
+_COLORAMA_PATH = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src')
+
+with host_paths.SysPath(_COLORAMA_PATH, position=0):
+ import colorama
+
+BACK = colorama.Back
+FORE = colorama.Fore
+STYLE = colorama.Style
+
+
+class _ColorFormatter(logging.Formatter):
+ # pylint does not see members added dynamically in the constructor.
+ # pylint: disable=no-member
+ color_map = {
+ logging.DEBUG: (FORE.CYAN),
+ logging.WARNING: (FORE.YELLOW),
+ logging.ERROR: (FORE.RED),
+ logging.CRITICAL: (BACK.RED),
+ }
+
+ def __init__(self, wrapped_formatter=None):
+ """Wraps a |logging.Formatter| and adds color."""
+ super(_ColorFormatter, self).__init__(self)
+ self._wrapped_formatter = wrapped_formatter or logging.Formatter()
+
+ #override
+ def format(self, record):
+ message = self._wrapped_formatter.format(record)
+ return self.Colorize(message, record.levelno)
+
+ def Colorize(self, message, log_level):
+ try:
+ return (''.join(self.color_map[log_level]) + message +
+ colorama.Style.RESET_ALL)
+ except KeyError:
+ return message
+
+
+class ColorStreamHandler(logging.StreamHandler):
+ """Handler that can be used to colorize logging output.
+
+ Example using a specific logger:
+
+ logger = logging.getLogger('my_logger')
+ logger.addHandler(ColorStreamHandler())
+ logger.info('message')
+
+ Example using the root logger:
+
+ ColorStreamHandler.MakeDefault()
+ logging.info('message')
+
+ """
+ def __init__(self, force_color=False):
+ super(ColorStreamHandler, self).__init__()
+ self.force_color = force_color
+ self.setFormatter(logging.Formatter())
+
+ @property
+ def is_tty(self):
+ isatty = getattr(self.stream, 'isatty', None)
+ return isatty and isatty()
+
+ #override
+ def setFormatter(self, formatter):
+ if self.force_color or self.is_tty:
+ formatter = _ColorFormatter(formatter)
+ super(ColorStreamHandler, self).setFormatter(formatter)
+
+ @staticmethod
+ def MakeDefault(force_color=False):
+ """
+ Replaces the default logging handlers with a coloring handler. To use
+ a colorizing handler at the same time as others, either register them
+ after this call, or add the ColorStreamHandler on the logger using
+ Logger.addHandler()
+
+ Args:
+ force_color: Set to True to bypass the tty check and always colorize.
+ """
+ # If the existing handlers aren't removed, messages are duplicated
+ logging.getLogger().handlers = []
+ logging.getLogger().addHandler(ColorStreamHandler(force_color))
+
+
+@contextlib.contextmanager
+def OverrideColor(level, color):
+ """Temporarily override the logging color for a specified level.
+
+ Args:
+ level: logging level whose color gets overridden.
+ color: tuple of formats to apply to log lines.
+ """
+ prev_colors = {}
+ for handler in logging.getLogger().handlers:
+ if isinstance(handler.formatter, _ColorFormatter):
+ prev_colors[handler.formatter] = handler.formatter.color_map[level]
+ handler.formatter.color_map[level] = color
+ try:
+ yield
+ finally:
+ for formatter, prev_color in prev_colors.iteritems():
+ formatter.color_map[level] = prev_color
+
+
+@contextlib.contextmanager
+def SuppressLogging(level=logging.ERROR):
+ """Momentarilly suppress logging events from all loggers.
+
+ TODO(jbudorick): This is not thread safe. Log events from other threads might
+ also inadvertently disappear.
+
+ Example:
+
+ with logging_utils.SuppressLogging():
+ # all but CRITICAL logging messages are suppressed
+ logging.info('just doing some thing') # not shown
+ logging.critical('something really bad happened') # still shown
+
+ Args:
+ level: logging events with this or lower levels are suppressed.
+ """
+ logging.disable(level)
+ yield
+ logging.disable(logging.NOTSET)
diff --git a/deps/v8/build/android/pylib/utils/maven_downloader.py b/deps/v8/build/android/pylib/utils/maven_downloader.py
new file mode 100755
index 0000000000..c60b0140ac
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/maven_downloader.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import errno
+import logging
+import os
+import shutil
+
+from devil.utils import cmd_helper
+from devil.utils import parallelizer
+
+
+def _MakeDirsIfAbsent(path):
+ try:
+ os.makedirs(path)
+ except OSError as err:
+ if err.errno != errno.EEXIST or not os.path.isdir(path):
+ raise
+
+
+class MavenDownloader(object):
+ '''
+ Downloads and installs the requested artifacts from the Google Maven repo.
+ The artifacts are expected to be specified in the format
+ "group_id:artifact_id:version:file_type", as the default file type is JAR
+ but most Android libraries are provided as AARs, which would otherwise fail
+ downloading. See Install()
+ '''
+
+ # Remote repository to download the artifacts from. The support library and
+ # Google Play service are only distributed there, but third party libraries
+ # could use Maven Central or JCenter for example. The default Maven remote
+ # is Maven Central.
+ _REMOTE_REPO = 'https://maven.google.com'
+
+ # Default Maven repository.
+ _DEFAULT_REPO_PATH = os.path.join(
+ os.path.expanduser('~'), '.m2', 'repository')
+
+ def __init__(self, debug=False):
+ self._repo_path = MavenDownloader._DEFAULT_REPO_PATH
+ self._remote_url = MavenDownloader._REMOTE_REPO
+ self._debug = debug
+
+ def Install(self, target_repo, artifacts, include_poms=False):
+ logging.info('Installing %d artifacts...', len(artifacts))
+ downloaders = [_SingleArtifactDownloader(self, artifact, target_repo)
+ for artifact in artifacts]
+ if self._debug:
+ for downloader in downloaders:
+ downloader.Run(include_poms)
+ else:
+ parallelizer.SyncParallelizer(downloaders).Run(include_poms)
+ logging.info('%d artifacts installed to %s', len(artifacts), target_repo)
+
+ @property
+ def repo_path(self):
+ return self._repo_path
+
+ @property
+ def remote_url(self):
+ return self._remote_url
+
+ @property
+ def debug(self):
+ return self._debug
+
+
+class _SingleArtifactDownloader(object):
+ '''Handles downloading and installing a single Maven artifact.'''
+
+ _POM_FILE_TYPE = 'pom'
+
+ def __init__(self, download_manager, artifact, target_repo):
+ self._download_manager = download_manager
+ self._artifact = artifact
+ self._target_repo = target_repo
+
+ def Run(self, include_pom=False):
+ parts = self._artifact.split(':')
+ if len(parts) != 4:
+ raise Exception('Artifacts expected as '
+ '"group_id:artifact_id:version:file_type".')
+ group_id, artifact_id, version, file_type = parts
+ self._InstallArtifact(group_id, artifact_id, version, file_type)
+
+ if include_pom and file_type != _SingleArtifactDownloader._POM_FILE_TYPE:
+ self._InstallArtifact(group_id, artifact_id, version,
+ _SingleArtifactDownloader._POM_FILE_TYPE)
+
+ def _InstallArtifact(self, group_id, artifact_id, version, file_type):
+ logging.debug('Processing %s', self._artifact)
+
+ download_relpath = self._DownloadArtifact(
+ group_id, artifact_id, version, file_type)
+ logging.debug('Downloaded.')
+
+ install_path = self._ImportArtifact(download_relpath)
+ logging.debug('Installed %s', os.path.relpath(install_path))
+
+ def _DownloadArtifact(self, group_id, artifact_id, version, file_type):
+ '''
+ Downloads the specified artifact using maven, to its standard location, see
+ MavenDownloader._DEFAULT_REPO_PATH.
+ '''
+ cmd = ['mvn',
+ 'org.apache.maven.plugins:maven-dependency-plugin:RELEASE:get',
+ '-DremoteRepositories={}'.format(self._download_manager.remote_url),
+ '-Dartifact={}:{}:{}:{}'.format(group_id, artifact_id, version,
+ file_type)]
+
+ stdout = None if self._download_manager.debug else open(os.devnull, 'wb')
+
+ try:
+ ret_code = cmd_helper.Call(cmd, stdout=stdout)
+ if ret_code != 0:
+ raise Exception('Command "{}" failed'.format(' '.join(cmd)))
+ except OSError as e:
+ if e.errno == os.errno.ENOENT:
+ raise Exception('mvn command not found. Please install Maven.')
+ raise
+
+ return os.path.join(os.path.join(*group_id.split('.')),
+ artifact_id,
+ version,
+ '{}-{}.{}'.format(artifact_id, version, file_type))
+
+ def _ImportArtifact(self, artifact_path):
+ src_dir = os.path.join(self._download_manager.repo_path, artifact_path)
+ dst_dir = os.path.join(self._target_repo, os.path.dirname(artifact_path))
+
+ _MakeDirsIfAbsent(dst_dir)
+ shutil.copy(src_dir, dst_dir)
+
+ return dst_dir
diff --git a/deps/v8/build/android/pylib/utils/proguard.py b/deps/v8/build/android/pylib/utils/proguard.py
new file mode 100644
index 0000000000..2d439a52c3
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/proguard.py
@@ -0,0 +1,288 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import tempfile
+
+from devil.utils import cmd_helper
+from pylib import constants
+
+
+_PROGUARD_CLASS_RE = re.compile(r'\s*?- Program class:\s*([\S]+)$')
+_PROGUARD_SUPERCLASS_RE = re.compile(r'\s*? Superclass:\s*([\S]+)$')
+_PROGUARD_SECTION_RE = re.compile(
+ r'^(Interfaces|Constant Pool|Fields|Methods|Class file attributes) '
+ r'\(count = \d+\):$')
+_PROGUARD_METHOD_RE = re.compile(r'\s*?- Method:\s*(\S*)[(].*$')
+_PROGUARD_ANNOTATION_RE = re.compile(r'^(\s*?)- Annotation \[L(\S*);\]:$')
+_ELEMENT_PRIMITIVE = 0
+_ELEMENT_ARRAY = 1
+_ELEMENT_ANNOTATION = 2
+_PROGUARD_ELEMENT_RES = [
+ (_ELEMENT_PRIMITIVE,
+ re.compile(r'^(\s*?)- Constant element value \[(\S*) .*\]$')),
+ (_ELEMENT_ARRAY,
+ re.compile(r'^(\s*?)- Array element value \[(\S*)\]:$')),
+ (_ELEMENT_ANNOTATION,
+ re.compile(r'^(\s*?)- Annotation element value \[(\S*)\]:$'))
+]
+_PROGUARD_INDENT_WIDTH = 2
+_PROGUARD_ANNOTATION_VALUE_RE = re.compile(r'^(\s*?)- \S+? \[(.*)\]$')
+
+
+def _GetProguardPath():
+ # Use the one in lib.java rather than source tree because it is the one that
+ # is added to swarming .isolate files.
+ return os.path.join(
+ constants.GetOutDirectory(), 'lib.java', 'third_party', 'proguard',
+ 'proguard603.jar')
+
+
+def Dump(jar_path):
+ """Dumps class and method information from a JAR into a dict via proguard.
+
+ Args:
+ jar_path: An absolute path to the JAR file to dump.
+ Returns:
+ A dict in the following format:
+ {
+ 'classes': [
+ {
+ 'class': '',
+ 'superclass': '',
+ 'annotations': {/* dict -- see below */},
+ 'methods': [
+ {
+ 'method': '',
+ 'annotations': {/* dict -- see below */},
+ },
+ ...
+ ],
+ },
+ ...
+ ],
+ }
+
+ Annotations dict format:
+ {
+ 'empty-annotation-class-name': None,
+ 'annotation-class-name': {
+ 'field': 'primitive-value',
+ 'field': [ 'array-item-1', 'array-item-2', ... ],
+ 'field': {
+ /* Object value */
+ 'field': 'primitive-value',
+ 'field': [ 'array-item-1', 'array-item-2', ... ],
+ 'field': { /* Object value */ }
+ }
+ }
+ }
+
+ Note that for top-level annotations their class names are used for
+ identification, whereas for any nested annotations the corresponding
+ field names are used.
+
+ One drawback of this approach is that an array containing empty
+ annotation classes will be represented as an array of 'None' values,
+ thus it will not be possible to find out annotation class names.
+ On the other hand, storing both annotation class name and the field name
+ would produce a very complex JSON.
+ """
+
+ with tempfile.NamedTemporaryFile() as proguard_output:
+ cmd_helper.GetCmdStatusAndOutput([
+ 'java',
+ '-jar', _GetProguardPath(),
+ '-injars', jar_path,
+ '-dontshrink', '-dontoptimize', '-dontobfuscate', '-dontpreverify',
+ '-dump', proguard_output.name])
+ return Parse(proguard_output)
+
+class _AnnotationElement(object):
+ def __init__(self, name, ftype, depth):
+ self.ref = None
+ self.name = name
+ self.ftype = ftype
+ self.depth = depth
+
+class _ParseState(object):
+ _INITIAL_VALUES = (lambda: None, list, dict)
+ # Empty annotations are represented as 'None', not as an empty dictionary.
+ _LAZY_INITIAL_VALUES = (lambda: None, list, lambda: None)
+
+ def __init__(self):
+ self._class_result = None
+ self._method_result = None
+ self._parse_annotations = False
+ self._annotation_stack = []
+
+ def ResetPerSection(self, section_name):
+ self.InitMethod(None)
+ self._parse_annotations = (
+ section_name in ['Class file attributes', 'Methods'])
+
+ def ParseAnnotations(self):
+ return self._parse_annotations
+
+ def CreateAndInitClass(self, class_name):
+ self.InitMethod(None)
+ self._class_result = {
+ 'class': class_name,
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [],
+ }
+ return self._class_result
+
+ def HasCurrentClass(self):
+ return bool(self._class_result)
+
+ def SetSuperClass(self, superclass):
+ assert self.HasCurrentClass()
+ self._class_result['superclass'] = superclass
+
+ def InitMethod(self, method_name):
+ self._annotation_stack = []
+ if method_name:
+ self._method_result = {
+ 'method': method_name,
+ 'annotations': {},
+ }
+ self._class_result['methods'].append(self._method_result)
+ else:
+ self._method_result = None
+
+ def InitAnnotation(self, annotation, depth):
+ if not self._annotation_stack:
+ # Add a fake parent element comprising 'annotations' dictionary,
+ # so we can work uniformly with both top-level and nested annotations.
+ annotations = _AnnotationElement(
+ '<<<top level>>>', _ELEMENT_ANNOTATION, depth - 1)
+ if self._method_result:
+ annotations.ref = self._method_result['annotations']
+ else:
+ annotations.ref = self._class_result['annotations']
+ self._annotation_stack = [annotations]
+ self._BacktrackAnnotationStack(depth)
+ if not self.HasCurrentAnnotation():
+ self._annotation_stack.append(
+ _AnnotationElement(annotation, _ELEMENT_ANNOTATION, depth))
+ self._CreateAnnotationPlaceHolder(self._LAZY_INITIAL_VALUES)
+
+ def HasCurrentAnnotation(self):
+ return len(self._annotation_stack) > 1
+
+ def InitAnnotationField(self, field, field_type, depth):
+ self._BacktrackAnnotationStack(depth)
+ # Create the parent representation, if needed. E.g. annotations
+ # are represented with `None`, not with `{}` until they receive the first
+ # field.
+ self._CreateAnnotationPlaceHolder(self._INITIAL_VALUES)
+ if self._annotation_stack[-1].ftype == _ELEMENT_ARRAY:
+ # Nested arrays are not allowed in annotations.
+ assert not field_type == _ELEMENT_ARRAY
+ # Use array index instead of bogus field name.
+ field = len(self._annotation_stack[-1].ref)
+ self._annotation_stack.append(_AnnotationElement(field, field_type, depth))
+ self._CreateAnnotationPlaceHolder(self._LAZY_INITIAL_VALUES)
+
+ def UpdateCurrentAnnotationFieldValue(self, value, depth):
+ self._BacktrackAnnotationStack(depth)
+ self._InitOrUpdateCurrentField(value)
+
+ def _CreateAnnotationPlaceHolder(self, constructors):
+ assert self.HasCurrentAnnotation()
+ field = self._annotation_stack[-1]
+ if field.ref is None:
+ field.ref = constructors[field.ftype]()
+ self._InitOrUpdateCurrentField(field.ref)
+
+ def _BacktrackAnnotationStack(self, depth):
+ stack = self._annotation_stack
+ while len(stack) > 0 and stack[-1].depth >= depth:
+ stack.pop()
+
+ def _InitOrUpdateCurrentField(self, value):
+ assert self.HasCurrentAnnotation()
+ parent = self._annotation_stack[-2]
+ assert not parent.ref is None
+ # There can be no nested constant element values.
+ assert parent.ftype in [_ELEMENT_ARRAY, _ELEMENT_ANNOTATION]
+ field = self._annotation_stack[-1]
+ if isinstance(value, str) and not field.ftype == _ELEMENT_PRIMITIVE:
+ # The value comes from the output parser via
+ # UpdateCurrentAnnotationFieldValue, and should be a value of a constant
+ # element. If it isn't, just skip it.
+ return
+ if parent.ftype == _ELEMENT_ARRAY and field.name >= len(parent.ref):
+ parent.ref.append(value)
+ else:
+ parent.ref[field.name] = value
+
+
+def _GetDepth(prefix):
+ return len(prefix) // _PROGUARD_INDENT_WIDTH
+
+def Parse(proguard_output):
+ results = {
+ 'classes': [],
+ }
+
+ state = _ParseState()
+
+ for line in proguard_output:
+ line = line.strip('\r\n')
+
+ m = _PROGUARD_CLASS_RE.match(line)
+ if m:
+ results['classes'].append(
+ state.CreateAndInitClass(m.group(1).replace('/', '.')))
+ continue
+
+ if not state.HasCurrentClass():
+ continue
+
+ m = _PROGUARD_SUPERCLASS_RE.match(line)
+ if m:
+ state.SetSuperClass(m.group(1).replace('/', '.'))
+ continue
+
+ m = _PROGUARD_SECTION_RE.match(line)
+ if m:
+ state.ResetPerSection(m.group(1))
+ continue
+
+ m = _PROGUARD_METHOD_RE.match(line)
+ if m:
+ state.InitMethod(m.group(1))
+ continue
+
+ if not state.ParseAnnotations():
+ continue
+
+ m = _PROGUARD_ANNOTATION_RE.match(line)
+ if m:
+ # Ignore the annotation package.
+ state.InitAnnotation(m.group(2).split('/')[-1], _GetDepth(m.group(1)))
+ continue
+
+ if state.HasCurrentAnnotation():
+ m = None
+ for (element_type, element_re) in _PROGUARD_ELEMENT_RES:
+ m = element_re.match(line)
+ if m:
+ state.InitAnnotationField(
+ m.group(2), element_type, _GetDepth(m.group(1)))
+ break
+ if m:
+ continue
+ m = _PROGUARD_ANNOTATION_VALUE_RE.match(line)
+ if m:
+ state.UpdateCurrentAnnotationFieldValue(
+ m.group(2), _GetDepth(m.group(1)))
+ else:
+ state.InitMethod(None)
+
+ return results
diff --git a/deps/v8/build/android/pylib/utils/proguard_test.py b/deps/v8/build/android/pylib/utils/proguard_test.py
new file mode 100755
index 0000000000..7672476e0a
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/proguard_test.py
@@ -0,0 +1,495 @@
+#! /usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.utils import proguard
+
+class TestParse(unittest.TestCase):
+
+ def setUp(self):
+ self.maxDiff = None
+
+ def testClass(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ ' Superclass: java/lang/Object'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': 'java.lang.Object',
+ 'annotations': {},
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testMethod(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: <init>()V'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': '<init>',
+ 'annotations': {}
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testClassAnnotation(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Class file attributes (count = 3):',
+ ' - Annotation [Lorg/example/Annotation;]:',
+ ' - Annotation [Lorg/example/AnnotationWithValue;]:',
+ ' - Constant element value [attr \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationWithTwoValues;]:',
+ ' - Constant element value [attr1 \'13\']',
+ ' - Utf8 [val1]',
+ ' - Constant element value [attr2 \'13\']',
+ ' - Utf8 [val2]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {
+ 'Annotation': None,
+ 'AnnotationWithValue': {'attr': 'val'},
+ 'AnnotationWithTwoValues': {'attr1': 'val1', 'attr2': 'val2'}
+ },
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testClassAnnotationWithArrays(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Class file attributes (count = 3):',
+ ' - Annotation [Lorg/example/AnnotationWithEmptyArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Annotation [Lorg/example/AnnotationWithOneElemArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationWithTwoElemArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val2]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {
+ 'AnnotationWithEmptyArray': {'arrayAttr': []},
+ 'AnnotationWithOneElemArray': {'arrayAttr': ['val']},
+ 'AnnotationWithTwoElemArray': {'arrayAttr': ['val1', 'val2']}
+ },
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testNestedClassAnnotations(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Class file attributes (count = 1):',
+ ' - Annotation [Lorg/example/OuterAnnotation;]:',
+ ' - Constant element value [outerAttr \'13\']',
+ ' - Utf8 [outerVal]',
+ ' - Array element value [outerArr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [outerArrVal1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [outerArrVal2]',
+ ' - Annotation element value [emptyAnn]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Annotation element value [ann]:',
+ ' - Annotation [Lorg/example/InnerAnnotation;]:',
+ ' - Constant element value [innerAttr \'13\']',
+ ' - Utf8 [innerVal]',
+ ' - Array element value [innerArr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [innerArrVal1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [innerArrVal2]',
+ ' - Annotation element value [emptyInnerAnn]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {
+ 'OuterAnnotation': {
+ 'outerAttr': 'outerVal',
+ 'outerArr': ['outerArrVal1', 'outerArrVal2'],
+ 'emptyAnn': None,
+ 'ann': {
+ 'innerAttr': 'innerVal',
+ 'innerArr': ['innerArrVal1', 'innerArrVal2'],
+ 'emptyInnerAnn': None
+ }
+ }
+ },
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testClassArraysOfAnnotations(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Class file attributes (count = 1):',
+ ' - Annotation [Lorg/example/OuterAnnotation;]:',
+ ' - Array element value [arrayWithEmptyAnnotations]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Array element value [outerArray]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation;]:',
+ ' - Constant element value [innerAttr \'115\']',
+ ' - Utf8 [innerVal]',
+ ' - Array element value [arguments]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+ ' - Constant element value [arg1Attr \'115\']',
+ ' - Utf8 [arg1Val]',
+ ' - Array element value [arg1Array]:',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [11]',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [12]',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+ ' - Constant element value [arg2Attr \'115\']',
+ ' - Utf8 [arg2Val]',
+ ' - Array element value [arg2Array]:',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [21]',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [22]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {
+ 'OuterAnnotation': {
+ 'arrayWithEmptyAnnotations': [None, None],
+ 'outerArray': [
+ {
+ 'innerAttr': 'innerVal',
+ 'arguments': [
+ {'arg1Attr': 'arg1Val', 'arg1Array': ['11', '12']},
+ {'arg2Attr': 'arg2Val', 'arg2Array': ['21', '22']}
+ ]
+ }
+ ]
+ }
+ },
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testReadFullClassFileAttributes(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Class file attributes (count = 3):',
+ ' - Source file attribute:',
+ ' - Utf8 [Class.java]',
+ ' - Runtime visible annotations attribute:',
+ ' - Annotation [Lorg/example/IntValueAnnotation;]:',
+ ' - Constant element value [value \'73\']',
+ ' - Integer [19]',
+ ' - Inner classes attribute (count = 1)',
+ ' - InnerClassesInfo:',
+ ' Access flags: 0x9 = public static',
+ ' - Class [org/example/Class1]',
+ ' - Class [org/example/Class2]',
+ ' - Utf8 [OnPageFinishedHelper]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {
+ 'IntValueAnnotation': {
+ 'value': '19',
+ }
+ },
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testMethodAnnotation(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: Test()V',
+ ' - Annotation [Lorg/example/Annotation;]:',
+ ' - Annotation [Lorg/example/AnnotationWithValue;]:',
+ ' - Constant element value [attr \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationWithTwoValues;]:',
+ ' - Constant element value [attr1 \'13\']',
+ ' - Utf8 [val1]',
+ ' - Constant element value [attr2 \'13\']',
+ ' - Utf8 [val2]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': 'Test',
+ 'annotations': {
+ 'Annotation': None,
+ 'AnnotationWithValue': {'attr': 'val'},
+ 'AnnotationWithTwoValues': {'attr1': 'val1', 'attr2': 'val2'}
+ },
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testMethodAnnotationWithArrays(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: Test()V',
+ ' - Annotation [Lorg/example/AnnotationWithEmptyArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Annotation [Lorg/example/AnnotationWithOneElemArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationWithTwoElemArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val2]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': 'Test',
+ 'annotations': {
+ 'AnnotationWithEmptyArray': {'arrayAttr': []},
+ 'AnnotationWithOneElemArray': {'arrayAttr': ['val']},
+ 'AnnotationWithTwoElemArray': {'arrayAttr': ['val1', 'val2']}
+ },
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testMethodAnnotationWithPrimitivesAndArrays(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: Test()V',
+ ' - Annotation [Lorg/example/AnnotationPrimitiveThenArray;]:',
+ ' - Constant element value [attr \'13\']',
+ ' - Utf8 [val]',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationArrayThenPrimitive;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val]',
+ ' - Constant element value [attr \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationTwoArrays;]:',
+ ' - Array element value [arrayAttr1]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val1]',
+ ' - Array element value [arrayAttr2]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val2]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': 'Test',
+ 'annotations': {
+ 'AnnotationPrimitiveThenArray': {'attr': 'val',
+ 'arrayAttr': ['val']},
+ 'AnnotationArrayThenPrimitive': {'arrayAttr': ['val'],
+ 'attr': 'val'},
+ 'AnnotationTwoArrays': {'arrayAttr1': ['val1'],
+ 'arrayAttr2': ['val2']}
+ },
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testNestedMethodAnnotations(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: Test()V',
+ ' - Annotation [Lorg/example/OuterAnnotation;]:',
+ ' - Constant element value [outerAttr \'13\']',
+ ' - Utf8 [outerVal]',
+ ' - Array element value [outerArr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [outerArrVal1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [outerArrVal2]',
+ ' - Annotation element value [emptyAnn]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Annotation element value [ann]:',
+ ' - Annotation [Lorg/example/InnerAnnotation;]:',
+ ' - Constant element value [innerAttr \'13\']',
+ ' - Utf8 [innerVal]',
+ ' - Array element value [innerArr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [innerArrVal1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [innerArrVal2]',
+ ' - Annotation element value [emptyInnerAnn]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': 'Test',
+ 'annotations': {
+ 'OuterAnnotation': {
+ 'outerAttr': 'outerVal',
+ 'outerArr': ['outerArrVal1', 'outerArrVal2'],
+ 'emptyAnn': None,
+ 'ann': {
+ 'innerAttr': 'innerVal',
+ 'innerArr': ['innerArrVal1', 'innerArrVal2'],
+ 'emptyInnerAnn': None
+ }
+ }
+ },
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testMethodArraysOfAnnotations(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: Test()V',
+ ' - Annotation [Lorg/example/OuterAnnotation;]:',
+ ' - Array element value [arrayWithEmptyAnnotations]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Array element value [outerArray]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation;]:',
+ ' - Constant element value [innerAttr \'115\']',
+ ' - Utf8 [innerVal]',
+ ' - Array element value [arguments]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+ ' - Constant element value [arg1Attr \'115\']',
+ ' - Utf8 [arg1Val]',
+ ' - Array element value [arg1Array]:',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [11]',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [12]',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+ ' - Constant element value [arg2Attr \'115\']',
+ ' - Utf8 [arg2Val]',
+ ' - Array element value [arg2Array]:',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [21]',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [22]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': 'Test',
+ 'annotations': {
+ 'OuterAnnotation': {
+ 'arrayWithEmptyAnnotations': [None, None],
+ 'outerArray': [
+ {
+ 'innerAttr': 'innerVal',
+ 'arguments': [
+ {'arg1Attr': 'arg1Val', 'arg1Array': ['11', '12']},
+ {'arg2Attr': 'arg2Val', 'arg2Array': ['21', '22']}
+ ]
+ }
+ ]
+ }
+ }
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/utils/repo_utils.py b/deps/v8/build/android/pylib/utils/repo_utils.py
new file mode 100644
index 0000000000..5a0efa8b6e
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/repo_utils.py
@@ -0,0 +1,16 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from devil.utils import cmd_helper
+
+
+def GetGitHeadSHA1(in_directory):
+ """Returns the git hash tag for the given directory.
+
+ Args:
+ in_directory: The directory where git is to be run.
+ """
+ command_line = ['git', 'log', '-1', '--pretty=format:%H']
+ output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory)
+ return output[0:40]
diff --git a/deps/v8/build/android/pylib/utils/shared_preference_utils.py b/deps/v8/build/android/pylib/utils/shared_preference_utils.py
new file mode 100644
index 0000000000..ae0d31b784
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/shared_preference_utils.py
@@ -0,0 +1,95 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions for modifying an app's settings file using JSON."""
+
+import json
+import logging
+
+
+def UnicodeToStr(data):
+ """Recursively converts any Unicode to Python strings.
+
+ Args:
+ data: The data to be converted.
+
+ Return:
+ A copy of the given data, but with instances of Unicode converted to Python
+ strings.
+ """
+ if isinstance(data, dict):
+ return {UnicodeToStr(key): UnicodeToStr(value)
+ for key, value in data.iteritems()}
+ elif isinstance(data, list):
+ return [UnicodeToStr(element) for element in data]
+ elif isinstance(data, unicode):
+ return data.encode('utf-8')
+ return data
+
+
+def ExtractSettingsFromJson(filepath):
+ """Extracts the settings data from the given JSON file.
+
+ Args:
+ filepath: The path to the JSON file to read.
+
+ Return:
+ The data read from the JSON file with strings converted to Python strings.
+ """
+ # json.load() loads strings as unicode, which causes issues when trying
+ # to edit string values in preference files, so convert to Python strings
+ with open(filepath) as prefs_file:
+ return UnicodeToStr(json.load(prefs_file))
+
+
+def ApplySharedPreferenceSetting(shared_pref, setting):
+ """Applies the given app settings to the given device.
+
+ Modifies an installed app's settings by modifying its shared preference
+ settings file. Provided settings data must be a settings dictionary,
+ which are in the following format:
+ {
+ "package": "com.example.package",
+ "filename": "AppSettingsFile.xml",
+ "supports_encrypted_path": true,
+ "set": {
+ "SomeBoolToSet": true,
+ "SomeStringToSet": "StringValue",
+ },
+ "remove": [
+ "list",
+ "of",
+ "keys",
+ "to",
+ "remove",
+ ]
+ }
+
+ Example JSON files that can be read with ExtractSettingsFromJson and passed to
+ this function are in //chrome/android/shared_preference_files/test/.
+
+ Args:
+ shared_pref: The devil SharedPrefs object for the device the settings will
+ be applied to.
+ setting: A settings dictionary to apply.
+ """
+ shared_pref.Load()
+ for key in setting.get('remove', []):
+ try:
+ shared_pref.Remove(key)
+ except KeyError:
+ logging.warning("Attempted to remove non-existent key %s", key)
+ for key, value in setting.get('set', {}).iteritems():
+ if isinstance(value, bool):
+ shared_pref.SetBoolean(key, value)
+ elif isinstance(value, basestring):
+ shared_pref.SetString(key, value)
+ elif isinstance(value, long) or isinstance(value, int):
+ shared_pref.SetLong(key, value)
+ elif isinstance(value, list):
+ shared_pref.SetStringSet(key, value)
+ else:
+ raise ValueError("Given invalid value type %s for key %s" % (
+ str(type(value)), key))
+ shared_pref.Commit()
diff --git a/deps/v8/build/android/pylib/utils/simpleperf.py b/deps/v8/build/android/pylib/utils/simpleperf.py
new file mode 100644
index 0000000000..be259d621f
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/simpleperf.py
@@ -0,0 +1,259 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+from devil import devil_env
+from devil.android import device_signal
+from devil.android.sdk import version_codes
+
+
+def _ProcessType(proc):
+ _, _, suffix = proc.name.partition(':')
+ if not suffix:
+ return 'browser'
+ if suffix.startswith('sandboxed_process'):
+ return 'renderer'
+ if suffix.startswith('privileged_process'):
+ return 'gpu'
+ return None
+
+
+def _GetSpecifiedPID(device, package_name, process_specifier):
+ if process_specifier is None:
+ return None
+
+ # Check for numeric PID
+ try:
+ pid = int(process_specifier)
+ return pid
+ except ValueError:
+ pass
+
+ # Check for exact process name; can be any of these formats:
+ # <package>:<process name>, i.e. 'org.chromium.chrome:sandboxed_process0'
+ # :<process name>, i.e. ':sandboxed_process0'
+ # <process name>, i.e. 'sandboxed_process0'
+ full_process_name = process_specifier
+ if process_specifier.startswith(':'):
+ full_process_name = package_name + process_specifier
+ elif ':' not in process_specifier:
+ full_process_name = '%s:%s' % (package_name, process_specifier)
+ matching_processes = device.ListProcesses(full_process_name)
+ if len(matching_processes) == 1:
+ return matching_processes[0].pid
+ if len(matching_processes) > 1:
+ raise RuntimeError('Found %d processes with name "%s".' % (
+ len(matching_processes), process_specifier))
+
+ # Check for process type (i.e. 'renderer')
+ package_processes = device.ListProcesses(package_name)
+ matching_processes = [p for p in package_processes if (
+ _ProcessType(p) == process_specifier)]
+ if process_specifier == 'renderer' and len(matching_processes) > 1:
+ raise RuntimeError('Found %d renderer processes; please re-run with only '
+ 'one open tab.' % len(matching_processes))
+ if len(matching_processes) != 1:
+ raise RuntimeError('Found %d processes of type "%s".' % (
+ len(matching_processes), process_specifier))
+ return matching_processes[0].pid
+
+
+def _ThreadsForProcess(device, pid):
+ # The thread list output format for 'ps' is the same regardless of version.
+ # Here's the column headers, and a sample line for a thread belonging to
+ # pid 12345 (note that the last few columns are not aligned with headers):
+ #
+ # USER PID TID PPID VSZ RSS WCHAN ADDR S CMD
+ # u0_i101 12345 24680 567 1357902 97531 futex_wait_queue_me e85acd9c S \
+ # CrRendererMain
+ if device.build_version_sdk >= version_codes.OREO:
+ pid_regex = (
+ r'^[[:graph:]]\{1,\}[[:blank:]]\{1,\}%d[[:blank:]]\{1,\}' % pid)
+ ps_cmd = "ps -T -e | grep '%s'" % pid_regex
+ ps_output_lines = device.RunShellCommand(
+ ps_cmd, shell=True, check_return=True)
+ else:
+ ps_cmd = ['ps', '-p', str(pid), '-t']
+ ps_output_lines = device.RunShellCommand(ps_cmd, check_return=True)
+ result = []
+ for l in ps_output_lines:
+ fields = l.split()
+ # fields[2] is tid, fields[-1] is thread name. Output may include an entry
+ # for the process itself with tid=pid; omit that one.
+ if fields[2] == str(pid):
+ continue
+ result.append((int(fields[2]), fields[-1]))
+ return result
+
+
+def _ThreadType(thread_name):
+ if not thread_name:
+ return 'unknown'
+ if (thread_name.startswith('Chrome_ChildIO') or
+ thread_name.startswith('Chrome_IO')):
+ return 'io'
+ if thread_name.startswith('Compositor'):
+ return 'compositor'
+ if (thread_name.startswith('ChildProcessMai') or
+ thread_name.startswith('CrGpuMain') or
+ thread_name.startswith('CrRendererMain')):
+ return 'main'
+ if thread_name.startswith('RenderThread'):
+ return 'render'
+
+
+def _GetSpecifiedTID(device, pid, thread_specifier):
+ if thread_specifier is None:
+ return None
+
+ # Check for numeric TID
+ try:
+ tid = int(thread_specifier)
+ return tid
+ except ValueError:
+ pass
+
+ # Check for thread type
+ if pid is not None:
+ matching_threads = [t for t in _ThreadsForProcess(device, pid) if (
+ _ThreadType(t[1]) == thread_specifier)]
+ if len(matching_threads) != 1:
+ raise RuntimeError('Found %d threads of type "%s".' % (
+ len(matching_threads), thread_specifier))
+ return matching_threads[0][0]
+
+ return None
+
+
+def PrepareDevice(device):
+ if device.build_version_sdk < version_codes.NOUGAT:
+ raise RuntimeError('Simpleperf profiling is only supported on Android N '
+ 'and later.')
+
+ # Necessary for profiling
+ # https://android-review.googlesource.com/c/platform/system/sepolicy/+/234400
+ device.SetProp('security.perf_harden', '0')
+
+
+def InstallSimpleperf(device, package_name):
+ package_arch = device.GetPackageArchitecture(package_name) or 'armeabi-v7a'
+ host_simpleperf_path = devil_env.config.LocalPath('simpleperf', package_arch)
+ if not host_simpleperf_path:
+ raise Exception('Could not get path to simpleperf executable on host.')
+ device_simpleperf_path = '/'.join(
+ ('/data/local/tmp/profilers', package_arch, 'simpleperf'))
+ device.PushChangedFiles([(host_simpleperf_path, device_simpleperf_path)])
+ return device_simpleperf_path
+
+
+@contextlib.contextmanager
+def RunSimpleperf(device, device_simpleperf_path, package_name,
+ process_specifier, thread_specifier, profiler_args,
+ host_out_path):
+ pid = _GetSpecifiedPID(device, package_name, process_specifier)
+ tid = _GetSpecifiedTID(device, pid, thread_specifier)
+ if pid is None and tid is None:
+ raise RuntimeError('Could not find specified process/thread running on '
+ 'device. Make sure the apk is already running before '
+ 'attempting to profile.')
+ profiler_args = list(profiler_args)
+ if profiler_args and profiler_args[0] == 'record':
+ profiler_args.pop(0)
+ if '--call-graph' not in profiler_args and '-g' not in profiler_args:
+ profiler_args.append('-g')
+ if '-f' not in profiler_args:
+ profiler_args.extend(('-f', '1000'))
+ device_out_path = '/data/local/tmp/perf.data'
+ if '-o' in profiler_args:
+ device_out_path = profiler_args[profiler_args.index('-o') + 1]
+ else:
+ profiler_args.extend(('-o', device_out_path))
+
+ if tid:
+ profiler_args.extend(('-t', str(tid)))
+ else:
+ profiler_args.extend(('-p', str(pid)))
+
+ adb_shell_simpleperf_process = device.adb.StartShell(
+ [device_simpleperf_path, 'record'] + profiler_args)
+
+ completed = False
+ try:
+ yield
+ completed = True
+
+ finally:
+ device.KillAll('simpleperf', signum=device_signal.SIGINT, blocking=True,
+ quiet=True)
+ if completed:
+ adb_shell_simpleperf_process.wait()
+ device.PullFile(device_out_path, host_out_path)
+
+
+def ConvertSimpleperfToPprof(simpleperf_out_path, build_directory,
+ pprof_out_path):
+ # The simpleperf scripts require the unstripped libs to be installed in the
+ # same directory structure as the libs on the device. Much of the logic here
+ # is just figuring out and creating the necessary directory structure, and
+ # symlinking the unstripped shared libs.
+
+ # Get the set of libs that we can symbolize
+ unstripped_lib_dir = os.path.join(build_directory, 'lib.unstripped')
+ unstripped_libs = set(
+ f for f in os.listdir(unstripped_lib_dir) if f.endswith('.so'))
+
+ # report.py will show the directory structure above the shared libs;
+ # that is the directory structure we need to recreate on the host.
+ script_dir = devil_env.config.LocalPath('simpleperf_scripts')
+ report_path = os.path.join(script_dir, 'report.py')
+ report_cmd = [sys.executable, report_path, '-i', simpleperf_out_path]
+ device_lib_path = None
+ for line in subprocess.check_output(
+ report_cmd, stderr=subprocess.STDOUT).splitlines():
+ fields = line.split()
+ if len(fields) < 5:
+ continue
+ shlib_path = fields[4]
+ shlib_dirname, shlib_basename = shlib_path.rpartition('/')[::2]
+ if shlib_basename in unstripped_libs:
+ device_lib_path = shlib_dirname
+ break
+ if not device_lib_path:
+ raise RuntimeError('No chrome-related symbols in profiling data in %s. '
+ 'Either the process was idle for the entire profiling '
+ 'period, or something went very wrong (and you should '
+ 'file a bug at crbug.com/new with component '
+ 'Speed>Tracing, and assign it to szager@chromium.org).'
+ % simpleperf_out_path)
+
+ # Recreate the directory structure locally, and symlink unstripped libs.
+ processing_dir = tempfile.mkdtemp()
+ try:
+ processing_lib_dir = os.path.join(
+ processing_dir, 'binary_cache', device_lib_path.lstrip('/'))
+ os.makedirs(processing_lib_dir)
+ for lib in unstripped_libs:
+ unstripped_lib_path = os.path.join(unstripped_lib_dir, lib)
+ processing_lib_path = os.path.join(processing_lib_dir, lib)
+ os.symlink(unstripped_lib_path, processing_lib_path)
+
+ # Run the script to annotate symbols and convert from simpleperf format to
+ # pprof format.
+ llvm_symbolizer_path = devil_env.config.LocalPath('llvm-symbolizer')
+ pprof_converter_script = os.path.join(
+ script_dir, 'pprof_proto_generator.py')
+ pprof_converter_cmd = [sys.executable, pprof_converter_script,
+ '-i', simpleperf_out_path,
+ '-o', os.path.abspath(pprof_out_path),
+ '--addr2line', llvm_symbolizer_path]
+ subprocess.check_output(pprof_converter_cmd, stderr=subprocess.STDOUT,
+ cwd=processing_dir)
+ finally:
+ shutil.rmtree(processing_dir, ignore_errors=True)
diff --git a/deps/v8/build/android/pylib/utils/test_filter.py b/deps/v8/build/android/pylib/utils/test_filter.py
new file mode 100644
index 0000000000..430b4c598d
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/test_filter.py
@@ -0,0 +1,139 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+
+
+_CMDLINE_NAME_SEGMENT_RE = re.compile(
+ r' with(?:out)? \{[^\}]*\}')
+
+class ConflictingPositiveFiltersException(Exception):
+ """Raised when both filter file and filter argument have positive filters."""
+
+
+def ParseFilterFile(input_lines):
+ """Converts test filter file contents to positive and negative pattern lists.
+
+ See //testing/buildbot/filters/README.md for description of the
+ syntax that |input_lines| are expected to follow.
+
+ See
+ https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md#running-a-subset-of-the-tests
+ for description of the syntax that --gtest_filter argument should follow.
+
+ Args:
+ input_lines: An iterable (e.g. a list or a file) containing input lines.
+ Returns:
+ tuple containing the lists of positive patterns and negative patterns
+ """
+ # Strip comments and whitespace from each line and filter non-empty lines.
+ stripped_lines = (l.split('#', 1)[0].strip() for l in input_lines)
+ filter_lines = [l for l in stripped_lines if l]
+
+ # Split the tests into positive and negative patterns (gtest treats
+ # every pattern after the first '-' sign as an exclusion).
+ positive_patterns = [l for l in filter_lines if l[0] != '-']
+ negative_patterns = [l[1:] for l in filter_lines if l[0] == '-']
+ return positive_patterns, negative_patterns
+
+
+def AddFilterOptions(parser):
+ """Adds filter command-line options to the provided parser.
+
+ Args:
+ parser: an argparse.ArgumentParser instance.
+ """
+ parser.add_argument(
+ # Deprecated argument.
+ '--gtest-filter-file',
+ # New argument.
+ '--test-launcher-filter-file',
+ dest='test_filter_file', type=os.path.realpath,
+ help='Path to file that contains googletest-style filter strings. '
+ 'See also //testing/buildbot/filters/README.md.')
+
+ filter_group = parser.add_mutually_exclusive_group()
+ filter_group.add_argument(
+ '-f', '--test-filter', '--gtest_filter', '--gtest-filter',
+ dest='test_filter',
+ help='googletest-style filter string.',
+ default=os.environ.get('GTEST_FILTER'))
+ filter_group.add_argument(
+ '--isolated-script-test-filter',
+ help='isolated script filter string. '
+ 'Like gtest filter strings, but with :: separators instead of :')
+
+
+def AppendPatternsToFilter(test_filter, positive_patterns=None,
+ negative_patterns=None):
+ """Returns a test-filter string with additional patterns.
+
+ Args:
+ test_filter: test filter string
+ positive_patterns: list of positive patterns to add to string
+ negative_patterns: list of negative patterns to add to string
+ """
+ positives = []
+ negatives = []
+ positive = ''
+ negative = ''
+
+ split_filter = test_filter.split('-', 1)
+ if len(split_filter) == 1:
+ positive = split_filter[0]
+ else:
+ positive, negative = split_filter
+
+ positives += [f for f in positive.split(':') if f]
+ negatives += [f for f in negative.split(':') if f]
+
+ positives += positive_patterns if positive_patterns else []
+ negatives += negative_patterns if negative_patterns else []
+
+ final_filter = ':'.join([p.replace('#', '.') for p in positives])
+ if negatives:
+ final_filter += '-' + ':'.join([n.replace('#', '.') for n in negatives])
+ return final_filter
+
+
+def HasPositivePatterns(test_filter):
+ """Returns True if test_filter contains a positive pattern, else False
+
+ Args:
+ test_filter: test-filter style string
+ """
+ return bool(len(test_filter) > 0 and test_filter[0] != '-')
+
+
+def InitializeFilterFromArgs(args):
+ """Returns a filter string from the command-line option values.
+
+ Args:
+ args: an argparse.Namespace instance resulting from a using parser
+ to which the filter options above were added.
+
+ Raises:
+ ConflictingPositiveFiltersException if both filter file and command line
+ specify positive filters.
+ """
+ test_filter = ''
+ if args.isolated_script_test_filter:
+ args.test_filter = args.isolated_script_test_filter.replace('::', ':')
+ if args.test_filter:
+ test_filter = _CMDLINE_NAME_SEGMENT_RE.sub(
+ '', args.test_filter.replace('#', '.'))
+
+ if args.test_filter_file:
+ with open(args.test_filter_file, 'r') as f:
+ positive_file_patterns, negative_file_patterns = ParseFilterFile(f)
+ if positive_file_patterns and HasPositivePatterns(test_filter):
+ raise ConflictingPositiveFiltersException(
+ 'Cannot specify positive pattern in both filter file and ' +
+ 'filter command line argument')
+ test_filter = AppendPatternsToFilter(test_filter,
+ positive_patterns=positive_file_patterns,
+ negative_patterns=negative_file_patterns)
+
+ return test_filter
diff --git a/deps/v8/build/android/pylib/utils/test_filter_test.py b/deps/v8/build/android/pylib/utils/test_filter_test.py
new file mode 100755
index 0000000000..1ae5a7ebe0
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/test_filter_test.py
@@ -0,0 +1,233 @@
+#!/usr/bin/env vpython
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import sys
+import tempfile
+import unittest
+
+from pylib.utils import test_filter
+
+class ParseFilterFileTest(unittest.TestCase):
+
+ def testParseFilterFile_commentsAndBlankLines(self):
+ input_lines = [
+ 'positive1',
+ '# comment',
+ 'positive2 # Another comment',
+ ''
+ 'positive3'
+ ]
+ actual = test_filter.ParseFilterFile(input_lines)
+ expected = ['positive1', 'positive2', 'positive3'], []
+ self.assertEquals(expected, actual)
+
+ def testParseFilterFile_onlyPositive(self):
+ input_lines = [
+ 'positive1',
+ 'positive2'
+ ]
+ actual = test_filter.ParseFilterFile(input_lines)
+ expected = ['positive1', 'positive2'], []
+ self.assertEquals(expected, actual)
+
+ def testParseFilterFile_onlyNegative(self):
+ input_lines = [
+ '-negative1',
+ '-negative2'
+ ]
+ actual = test_filter.ParseFilterFile(input_lines)
+ expected = [], ['negative1', 'negative2']
+ self.assertEquals(expected, actual)
+
+ def testParseFilterFile_positiveAndNegative(self):
+ input_lines = [
+ 'positive1',
+ 'positive2',
+ '-negative1',
+ '-negative2'
+ ]
+ actual = test_filter.ParseFilterFile(input_lines)
+ expected = ['positive1', 'positive2'], ['negative1', 'negative2']
+ self.assertEquals(expected, actual)
+
+
+class InitializeFilterFromArgsTest(unittest.TestCase):
+
+ def testInitializeBasicFilter(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ args = parser.parse_args([
+ '--test-filter',
+ 'FooTest.testFoo:BarTest.testBar'])
+ expected = 'FooTest.testFoo:BarTest.testBar'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEquals(actual, expected)
+
+ def testInitializeJavaStyleFilter(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ args = parser.parse_args([
+ '--test-filter',
+ 'FooTest#testFoo:BarTest#testBar'])
+ expected = 'FooTest.testFoo:BarTest.testBar'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEquals(actual, expected)
+
+ def testInitializeBasicIsolatedScript(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ args = parser.parse_args([
+ '--isolated-script-test-filter',
+ 'FooTest.testFoo::BarTest.testBar'])
+ expected = 'FooTest.testFoo:BarTest.testBar'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEquals(actual, expected)
+
+ def testFilterArgWithPositiveFilterInFilterFile(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ with tempfile.NamedTemporaryFile() as tmp_file:
+ tmp_file.write('positive1\npositive2\n-negative2\n-negative3\n')
+ tmp_file.seek(0)
+ args = parser.parse_args([
+ '--test-filter=-negative1',
+ '--test-launcher-filter-file',
+ tmp_file.name])
+ expected = 'positive1:positive2-negative1:negative2:negative3'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEquals(actual, expected)
+
+ def testFilterFileWithPositiveFilterInFilterArg(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ with tempfile.NamedTemporaryFile() as tmp_file:
+ tmp_file.write('-negative2\n-negative3\n')
+ tmp_file.seek(0)
+ args = parser.parse_args([
+ '--test-filter',
+ 'positive1:positive2-negative1',
+ '--test-launcher-filter-file',
+ tmp_file.name])
+ expected = 'positive1:positive2-negative1:negative2:negative3'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEquals(actual, expected)
+
+ def testPositiveFilterInBothFileAndArg(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ with tempfile.NamedTemporaryFile() as tmp_file:
+ tmp_file.write('positive1\n')
+ tmp_file.seek(0)
+ args = parser.parse_args([
+ '--test-filter',
+ 'positive2',
+ '--test-launcher-filter-file',
+ tmp_file.name])
+ with self.assertRaises(test_filter.ConflictingPositiveFiltersException):
+ test_filter.InitializeFilterFromArgs(args)
+
+ def testFilterArgWithFilterFileAllNegative(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ with tempfile.NamedTemporaryFile() as tmp_file:
+ tmp_file.write('-negative3\n-negative4\n')
+ tmp_file.seek(0)
+ args = parser.parse_args([
+ '--test-filter=-negative1:negative2',
+ '--test-launcher-filter-file',
+ tmp_file.name])
+ expected = '-negative1:negative2:negative3:negative4'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEquals(actual, expected)
+
+
+class AppendPatternsToFilter(unittest.TestCase):
+ def testAllEmpty(self):
+ expected = ''
+ actual = test_filter.AppendPatternsToFilter('', [], [])
+ self.assertEquals(actual, expected)
+ def testAppendOnlyPositiveToEmptyFilter(self):
+ expected = 'positive'
+ actual = test_filter.AppendPatternsToFilter('', ['positive'])
+ self.assertEquals(actual, expected)
+ def testAppendOnlyNegativeToEmptyFilter(self):
+ expected = '-negative'
+ actual = test_filter.AppendPatternsToFilter('',
+ negative_patterns=['negative'])
+ self.assertEquals(actual, expected)
+ def testAppendToEmptyFilter(self):
+ expected = 'positive-negative'
+ actual = test_filter.AppendPatternsToFilter('', ['positive'], ['negative'])
+ self.assertEquals(actual, expected)
+ def testAppendToPositiveOnlyFilter(self):
+ expected = 'positive1:positive2-negative'
+ actual = test_filter.AppendPatternsToFilter('positive1', ['positive2'],
+ ['negative'])
+ self.assertEquals(actual, expected)
+ def testAppendToNegativeOnlyFilter(self):
+ expected = 'positive-negative1:negative2'
+ actual = test_filter.AppendPatternsToFilter('-negative1', ['positive'],
+ ['negative2'])
+ self.assertEquals(actual, expected)
+ def testAppendPositiveToFilter(self):
+ expected = 'positive1:positive2-negative1'
+ actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+ ['positive2'])
+ self.assertEquals(actual, expected)
+ def testAppendNegativeToFilter(self):
+ expected = 'positive1-negative1:negative2'
+ actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+ negative_patterns=['negative2'])
+ self.assertEquals(actual, expected)
+ def testAppendBothToFilter(self):
+ expected = 'positive1:positive2-negative1:negative2'
+ actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+ positive_patterns=['positive2'],
+ negative_patterns=['negative2'])
+ self.assertEquals(actual, expected)
+ def testAppendMultipleToFilter(self):
+ expected = 'positive1:positive2:positive3-negative1:negative2:negative3'
+ actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+ ['positive2', 'positive3'],
+ ['negative2', 'negative3'])
+ self.assertEquals(actual, expected)
+ def testRepeatedAppendToFilter(self):
+ expected = 'positive1:positive2:positive3-negative1:negative2:negative3'
+ filter_string = test_filter.AppendPatternsToFilter('positive1-negative1',
+ ['positive2'],
+ ['negative2'])
+ actual = test_filter.AppendPatternsToFilter(filter_string, ['positive3'],
+ ['negative3'])
+ self.assertEquals(actual, expected)
+ def testAppendHashSeparatedPatternsToFilter(self):
+ expected = 'positive.test1:positive.test2-negative.test1:negative.test2'
+ actual = test_filter.AppendPatternsToFilter('positive#test1-negative#test1',
+ ['positive#test2'],
+ ['negative#test2'])
+ self.assertEquals(actual, expected)
+
+
+class HasPositivePatterns(unittest.TestCase):
+ def testEmpty(self):
+ expected = False
+ actual = test_filter.HasPositivePatterns('')
+ self.assertEquals(actual, expected)
+ def testHasOnlyPositive(self):
+ expected = True
+ actual = test_filter.HasPositivePatterns('positive')
+ self.assertEquals(actual, expected)
+ def testHasOnlyNegative(self):
+ expected = False
+ actual = test_filter.HasPositivePatterns('-negative')
+ self.assertEquals(actual, expected)
+ def testHasBoth(self):
+ expected = True
+ actual = test_filter.HasPositivePatterns('positive-negative')
+ self.assertEquals(actual, expected)
+
+
+if __name__ == '__main__':
+ sys.exit(unittest.main())
diff --git a/deps/v8/build/android/pylib/utils/time_profile.py b/deps/v8/build/android/pylib/utils/time_profile.py
new file mode 100644
index 0000000000..094799c4f2
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/time_profile.py
@@ -0,0 +1,45 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+
+class TimeProfile(object):
+ """Class for simple profiling of action, with logging of cost."""
+
+ def __init__(self, description='operation'):
+ self._starttime = None
+ self._endtime = None
+ self._description = description
+ self.Start()
+
+ def Start(self):
+ self._starttime = time.time()
+ self._endtime = None
+
+ def GetDelta(self):
+ """Returns the rounded delta.
+
+ Also stops the timer if Stop() has not already been called.
+ """
+ if self._endtime is None:
+ self.Stop(log=False)
+ delta = self._endtime - self._starttime
+ delta = round(delta, 2) if delta < 10 else round(delta, 1)
+ return delta
+
+ def LogResult(self):
+ """Logs the result."""
+ logging.info('%s seconds to perform %s', self.GetDelta(), self._description)
+
+ def Stop(self, log=True):
+ """Stop profiling.
+
+ Args:
+ log: Log the delta (defaults to true).
+ """
+ self._endtime = time.time()
+ if log:
+ self.LogResult()
diff --git a/deps/v8/build/android/pylib/utils/xvfb.py b/deps/v8/build/android/pylib/utils/xvfb.py
new file mode 100644
index 0000000000..cb9d50e8fd
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/xvfb.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=W0702
+
+import os
+import signal
+import subprocess
+import sys
+import time
+
+
+def _IsLinux():
+ """Return True if on Linux; else False."""
+ return sys.platform.startswith('linux')
+
+
+class Xvfb(object):
+ """Class to start and stop Xvfb if relevant. Nop if not Linux."""
+
+ def __init__(self):
+ self._pid = 0
+
+ def Start(self):
+ """Start Xvfb and set an appropriate DISPLAY environment. Linux only.
+
+ Copied from tools/code_coverage/coverage_posix.py
+ """
+ if not _IsLinux():
+ return
+ proc = subprocess.Popen(['Xvfb', ':9', '-screen', '0', '1024x768x24',
+ '-ac'],
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ self._pid = proc.pid
+ if not self._pid:
+ raise Exception('Could not start Xvfb')
+ os.environ['DISPLAY'] = ':9'
+
+ # Now confirm, giving a chance for it to start if needed.
+ for _ in range(10):
+ proc = subprocess.Popen('xdpyinfo >/dev/null', shell=True)
+ _, retcode = os.waitpid(proc.pid, 0)
+ if retcode == 0:
+ break
+ time.sleep(0.25)
+ if retcode != 0:
+ raise Exception('Could not confirm Xvfb happiness')
+
+ def Stop(self):
+ """Stop Xvfb if needed. Linux only."""
+ if self._pid:
+ try:
+ os.kill(self._pid, signal.SIGKILL)
+ except:
+ pass
+ del os.environ['DISPLAY']
+ self._pid = 0
diff --git a/deps/v8/build/android/pylib/valgrind_tools.py b/deps/v8/build/android/pylib/valgrind_tools.py
new file mode 100644
index 0000000000..4c27b083b7
--- /dev/null
+++ b/deps/v8/build/android/pylib/valgrind_tools.py
@@ -0,0 +1,129 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=R0201
+
+import glob
+import logging
+import os.path
+import subprocess
+import sys
+
+from devil.android import device_errors
+from devil.android.valgrind_tools import base_tool
+from pylib.constants import DIR_SOURCE_ROOT
+
+
+def SetChromeTimeoutScale(device, scale):
+ """Sets the timeout scale in /data/local/tmp/chrome_timeout_scale to scale."""
+ path = '/data/local/tmp/chrome_timeout_scale'
+ if not scale or scale == 1.0:
+ # Delete if scale is None/0.0/1.0 since the default timeout scale is 1.0
+ device.RemovePath(path, force=True, as_root=True)
+ else:
+ device.WriteFile(path, '%f' % scale, as_root=True)
+
+
+
+class AddressSanitizerTool(base_tool.BaseTool):
+ """AddressSanitizer tool."""
+
+ WRAPPER_NAME = '/system/bin/asanwrapper'
+ # Disable memcmp overlap check.There are blobs (gl drivers)
+ # on some android devices that use memcmp on overlapping regions,
+ # nothing we can do about that.
+ EXTRA_OPTIONS = 'strict_memcmp=0,use_sigaltstack=1'
+
+ def __init__(self, device):
+ super(AddressSanitizerTool, self).__init__()
+ self._device = device
+
+ @classmethod
+ def CopyFiles(cls, device):
+ """Copies ASan tools to the device."""
+ libs = glob.glob(os.path.join(DIR_SOURCE_ROOT,
+ 'third_party/llvm-build/Release+Asserts/',
+ 'lib/clang/*/lib/linux/',
+ 'libclang_rt.asan-arm-android.so'))
+ assert len(libs) == 1
+ subprocess.call(
+ [os.path.join(
+ DIR_SOURCE_ROOT,
+ 'tools/android/asan/third_party/asan_device_setup.sh'),
+ '--device', str(device),
+ '--lib', libs[0],
+ '--extra-options', AddressSanitizerTool.EXTRA_OPTIONS])
+ device.WaitUntilFullyBooted()
+
+ def GetTestWrapper(self):
+ return AddressSanitizerTool.WRAPPER_NAME
+
+ def GetUtilWrapper(self):
+ """Returns the wrapper for utilities, such as forwarder.
+
+ AddressSanitizer wrapper must be added to all instrumented binaries,
+ including forwarder and the like. This can be removed if such binaries
+ were built without instrumentation. """
+ return self.GetTestWrapper()
+
+ def SetupEnvironment(self):
+ try:
+ self._device.EnableRoot()
+ except device_errors.CommandFailedError as e:
+ # Try to set the timeout scale anyway.
+ # TODO(jbudorick) Handle this exception appropriately after interface
+ # conversions are finished.
+ logging.error(str(e))
+ SetChromeTimeoutScale(self._device, self.GetTimeoutScale())
+
+ def CleanUpEnvironment(self):
+ SetChromeTimeoutScale(self._device, None)
+
+ def GetTimeoutScale(self):
+ # Very slow startup.
+ return 20.0
+
+
+TOOL_REGISTRY = {
+ 'asan': AddressSanitizerTool,
+}
+
+
+def CreateTool(tool_name, device):
+ """Creates a tool with the specified tool name.
+
+ Args:
+ tool_name: Name of the tool to create.
+ device: A DeviceUtils instance.
+ Returns:
+ A tool for the specified tool_name.
+ """
+ if not tool_name:
+ return base_tool.BaseTool()
+
+ ctor = TOOL_REGISTRY.get(tool_name)
+ if ctor:
+ return ctor(device)
+ else:
+ print 'Unknown tool %s, available tools: %s' % (
+ tool_name, ', '.join(sorted(TOOL_REGISTRY.keys())))
+ sys.exit(1)
+
+def PushFilesForTool(tool_name, device):
+ """Pushes the files required for |tool_name| to |device|.
+
+ Args:
+ tool_name: Name of the tool to create.
+ device: A DeviceUtils instance.
+ """
+ if not tool_name:
+ return
+
+ clazz = TOOL_REGISTRY.get(tool_name)
+ if clazz:
+ clazz.CopyFiles(device)
+ else:
+ print 'Unknown tool %s, available tools: %s' % (
+ tool_name, ', '.join(sorted(TOOL_REGISTRY.keys())))
+ sys.exit(1)
diff --git a/deps/v8/build/android/pylintrc b/deps/v8/build/android/pylintrc
new file mode 100644
index 0000000000..2a721bf270
--- /dev/null
+++ b/deps/v8/build/android/pylintrc
@@ -0,0 +1,15 @@
+[FORMAT]
+
+max-line-length=80
+
+[MESSAGES CONTROL]
+
+disable=abstract-class-not-used,bad-continuation,bad-indentation,duplicate-code,fixme,invalid-name,locally-disabled,locally-enabled,missing-docstring,star-args,too-few-public-methods,too-many-arguments,too-many-branches,too-many-instance-attributes,too-many-lines,too-many-locals,too-many-public-methods,too-many-statements,wrong-import-position
+
+[REPORTS]
+
+reports=no
+
+[VARIABLES]
+
+dummy-variables-rgx=^_.*$|dummy
diff --git a/deps/v8/build/android/resource_sizes.gni b/deps/v8/build/android/resource_sizes.gni
new file mode 100644
index 0000000000..a22b7a23b8
--- /dev/null
+++ b/deps/v8/build/android/resource_sizes.gni
@@ -0,0 +1,39 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/internal_rules.gni")
+
+# Generates a script in the bin directory that runs
+# //build/android/resource_sizes.py against the provided apk.
+#
+# Variables:
+# apk: The APK target against which resource_sizes.py should run.
+template("android_resource_sizes_test") {
+ generate_android_wrapper(target_name) {
+ executable = "//build/android/resource_sizes.py"
+ wrapper_script = "$root_out_dir/bin/${target_name}"
+
+ # Getting the _apk_path below at build time requires the APK's
+ # build config.
+ deps = [
+ invoker.apk,
+ ]
+
+ data_deps = [
+ "//build/android:resource_sizes_py",
+ ]
+
+ _apk_build_config = get_label_info(invoker.apk, "target_gen_dir") + "/" +
+ get_label_info(invoker.apk, "name") + ".build_config"
+ _rebased_apk_build_config = rebase_path(_apk_build_config, root_build_dir)
+ _apk_path =
+ "@WrappedPath(@FileArg($_rebased_apk_build_config:deps_info:apk_path))"
+ executable_args = [
+ "--chartjson",
+ _apk_path,
+ "--chromium-output-directory",
+ "@WrappedPath(.)",
+ ]
+ }
+}
diff --git a/deps/v8/build/android/resource_sizes.py b/deps/v8/build/android/resource_sizes.py
new file mode 100755
index 0000000000..8d763b41be
--- /dev/null
+++ b/deps/v8/build/android/resource_sizes.py
@@ -0,0 +1,769 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Reports binary size metrics for an APK.
+
+More information at //docs/speed/binary_size/metrics.md.
+"""
+
+from __future__ import print_function
+
+import argparse
+import collections
+from contextlib import contextmanager
+import json
+import logging
+import os
+import posixpath
+import re
+import struct
+import sys
+import tempfile
+import zipfile
+import zlib
+
+from binary_size import apk_downloader
+import devil_chromium
+from devil.android.sdk import build_tools
+from devil.utils import cmd_helper
+from devil.utils import lazy
+import method_count
+from pylib import constants
+from pylib.constants import host_paths
+
+_AAPT_PATH = lazy.WeakConstant(lambda: build_tools.GetPath('aapt'))
+_BUILD_UTILS_PATH = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'gyp')
+_APK_PATCH_SIZE_ESTIMATOR_PATH = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'third_party', 'apk-patch-size-estimator')
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+ import perf_tests_results_helper # pylint: disable=import-error
+
+with host_paths.SysPath(host_paths.TRACING_PATH):
+ from tracing.value import convert_chart_json # pylint: disable=import-error
+
+with host_paths.SysPath(_BUILD_UTILS_PATH, 0):
+ from util import build_utils # pylint: disable=import-error
+
+with host_paths.SysPath(_APK_PATCH_SIZE_ESTIMATOR_PATH):
+ import apk_patch_size_estimator # pylint: disable=import-error
+
+
+# Python had a bug in zipinfo parsing that triggers on ChromeModern.apk
+# https://bugs.python.org/issue14315
+def _PatchedDecodeExtra(self):
+ # Try to decode the extra field.
+ extra = self.extra
+ unpack = struct.unpack
+ while len(extra) >= 4:
+ tp, ln = unpack('<HH', extra[:4])
+ if tp == 1:
+ if ln >= 24:
+ counts = unpack('<QQQ', extra[4:28])
+ elif ln == 16:
+ counts = unpack('<QQ', extra[4:20])
+ elif ln == 8:
+ counts = unpack('<Q', extra[4:12])
+ elif ln == 0:
+ counts = ()
+ else:
+ raise RuntimeError, "Corrupt extra field %s"%(ln,)
+
+ idx = 0
+
+ # ZIP64 extension (large files and/or large archives)
+ if self.file_size in (0xffffffffffffffffL, 0xffffffffL):
+ self.file_size = counts[idx]
+ idx += 1
+
+ if self.compress_size == 0xFFFFFFFFL:
+ self.compress_size = counts[idx]
+ idx += 1
+
+ if self.header_offset == 0xffffffffL:
+ self.header_offset = counts[idx]
+ idx += 1
+
+ extra = extra[ln + 4:]
+
+zipfile.ZipInfo._decodeExtra = ( # pylint: disable=protected-access
+ _PatchedDecodeExtra)
+
+# Captures an entire config from aapt output.
+_AAPT_CONFIG_PATTERN = r'config %s:(.*?)config [a-zA-Z-]+:'
+# Matches string resource entries from aapt output.
+_AAPT_ENTRY_RE = re.compile(
+ r'resource (?P<id>\w{10}) [\w\.]+:string/.*?"(?P<val>.+?)"', re.DOTALL)
+_BASE_CHART = {
+ 'format_version': '0.1',
+ 'benchmark_name': 'resource_sizes',
+ 'benchmark_description': 'APK resource size information.',
+ 'trace_rerun_options': [],
+ 'charts': {}
+}
+# Macro definitions look like (something, 123) when
+# enable_resource_whitelist_generation=true.
+_RC_HEADER_RE = re.compile(r'^#define (?P<name>\w+).* (?P<id>\d+)\)?$')
+_RE_NON_LANGUAGE_PAK = re.compile(r'^assets/.*(resources|percent)\.pak$')
+_READELF_SIZES_METRICS = {
+ 'text': ['.text'],
+ 'data': ['.data', '.rodata', '.data.rel.ro', '.data.rel.ro.local'],
+ 'relocations': ['.rel.dyn', '.rel.plt', '.rela.dyn', '.rela.plt'],
+ 'unwind': [
+ '.ARM.extab', '.ARM.exidx', '.eh_frame', '.eh_frame_hdr',
+ '.ARM.exidxsentinel_section_after_text'
+ ],
+ 'symbols': [
+ '.dynsym', '.dynstr', '.dynamic', '.shstrtab', '.got', '.plt',
+ '.got.plt', '.hash', '.gnu.hash'
+ ],
+ 'bss': ['.bss'],
+ 'other': [
+ '.init_array', '.preinit_array', '.ctors', '.fini_array', '.comment',
+ '.note.gnu.gold-version', '.note.crashpad.info', '.note.android.ident',
+ '.ARM.attributes', '.note.gnu.build-id', '.gnu.version',
+ '.gnu.version_d', '.gnu.version_r', '.interp', '.gcc_except_table'
+ ]
+}
+
+
+def _PercentageDifference(a, b):
+ if a == 0:
+ return 0
+ return float(b - a) / a
+
+
+def _RunReadelf(so_path, options, tool_prefix=''):
+ return cmd_helper.GetCmdOutput(
+ [tool_prefix + 'readelf'] + options + [so_path])
+
+
+def _ExtractLibSectionSizesFromApk(apk_path, lib_path, tool_prefix):
+ with Unzip(apk_path, filename=lib_path) as extracted_lib_path:
+ grouped_section_sizes = collections.defaultdict(int)
+ section_sizes = _CreateSectionNameSizeMap(extracted_lib_path, tool_prefix)
+ for group_name, section_names in _READELF_SIZES_METRICS.iteritems():
+ for section_name in section_names:
+ if section_name in section_sizes:
+ grouped_section_sizes[group_name] += section_sizes.pop(section_name)
+
+ # Group any unknown section headers into the "other" group.
+ for section_header, section_size in section_sizes.iteritems():
+ print('Unknown elf section header: %s' % section_header)
+ grouped_section_sizes['other'] += section_size
+
+ return grouped_section_sizes
+
+
+def _CreateSectionNameSizeMap(so_path, tool_prefix):
+ stdout = _RunReadelf(so_path, ['-S', '--wide'], tool_prefix)
+ section_sizes = {}
+ # Matches [ 2] .hash HASH 00000000006681f0 0001f0 003154 04 A 3 0 8
+ for match in re.finditer(r'\[[\s\d]+\] (\..*)$', stdout, re.MULTILINE):
+ items = match.group(1).split()
+ section_sizes[items[0]] = int(items[4], 16)
+
+ return section_sizes
+
+
+def _ParseManifestAttributes(apk_path):
+ # Check if the manifest specifies whether or not to extract native libs.
+ skip_extract_lib = False
+ output = cmd_helper.GetCmdOutput([
+ _AAPT_PATH.read(), 'd', 'xmltree', apk_path, 'AndroidManifest.xml'])
+ m = re.search(r'extractNativeLibs\(.*\)=\(.*\)(\w)', output)
+ if m:
+ skip_extract_lib = not bool(int(m.group(1)))
+
+ # Dex decompression overhead varies by Android version.
+ m = re.search(r'android:minSdkVersion\(\w+\)=\(type \w+\)(\w+)', output)
+ sdk_version = int(m.group(1), 16)
+
+ return sdk_version, skip_extract_lib
+
+
+def _NormalizeLanguagePaks(translations, factor):
+ english_pak = translations.FindByPattern(r'.*/en[-_][Uu][Ss]\.l?pak')
+ num_translations = translations.GetNumEntries()
+ ret = 0
+ if english_pak:
+ ret -= translations.ComputeZippedSize()
+ ret += int(english_pak.compress_size * num_translations * factor)
+ return ret
+
+
+def _NormalizeResourcesArsc(apk_path, num_arsc_files, num_translations,
+ out_dir):
+ """Estimates the expected overhead of untranslated strings in resources.arsc.
+
+ See http://crbug.com/677966 for why this is necessary.
+ """
+ # If there are multiple .arsc files, use the resource packaged APK instead.
+ if num_arsc_files > 1:
+ if not out_dir:
+ print('Skipping resources.arsc normalization (output directory required)')
+ return 0
+ ap_name = os.path.basename(apk_path).replace('.apk', '.intermediate.ap_')
+ ap_path = os.path.join(out_dir, 'arsc/apks', ap_name)
+ if not os.path.exists(ap_path):
+ raise Exception('Missing expected file: %s, try rebuilding.' % ap_path)
+ apk_path = ap_path
+
+ aapt_output = _RunAaptDumpResources(apk_path)
+ # en-rUS is in the default config and may be cluttered with non-translatable
+ # strings, so en-rGB is a better baseline for finding missing translations.
+ en_strings = _CreateResourceIdValueMap(aapt_output, 'en-rGB')
+ fr_strings = _CreateResourceIdValueMap(aapt_output, 'fr')
+
+ # en-US and en-GB will never be translated.
+ config_count = num_translations - 2
+
+ size = 0
+ for res_id, string_val in en_strings.iteritems():
+ if string_val == fr_strings[res_id]:
+ string_size = len(string_val)
+ # 7 bytes is the per-entry overhead (not specific to any string). See
+ # https://android.googlesource.com/platform/frameworks/base.git/+/android-4.2.2_r1/tools/aapt/StringPool.cpp#414.
+ # The 1.5 factor was determined experimentally and is meant to account for
+ # other languages generally having longer strings than english.
+ size += config_count * (7 + string_size * 1.5)
+
+ return size
+
+
+def _CreateResourceIdValueMap(aapt_output, lang):
+ """Return a map of resource ids to string values for the given |lang|."""
+ config_re = _AAPT_CONFIG_PATTERN % lang
+ return {entry.group('id'): entry.group('val')
+ for config_section in re.finditer(config_re, aapt_output, re.DOTALL)
+ for entry in re.finditer(_AAPT_ENTRY_RE, config_section.group(0))}
+
+
+def _RunAaptDumpResources(apk_path):
+ cmd = [_AAPT_PATH.read(), 'dump', '--values', 'resources', apk_path]
+ status, output = cmd_helper.GetCmdStatusAndOutput(cmd)
+ if status != 0:
+ raise Exception('Failed running aapt command: "%s" with output "%s".' %
+ (' '.join(cmd), output))
+ return output
+
+
+class _FileGroup(object):
+ """Represents a category that apk files can fall into."""
+
+ def __init__(self, name):
+ self.name = name
+ self._zip_infos = []
+ self._extracted_multipliers = []
+
+ def AddZipInfo(self, zip_info, extracted_multiplier=0):
+ self._zip_infos.append(zip_info)
+ self._extracted_multipliers.append(extracted_multiplier)
+
+ def AllEntries(self):
+ return iter(self._zip_infos)
+
+ def GetNumEntries(self):
+ return len(self._zip_infos)
+
+ def FindByPattern(self, pattern):
+ return next((i for i in self._zip_infos if re.match(pattern, i.filename)),
+ None)
+
+ def FindLargest(self):
+ if not self._zip_infos:
+ return None
+ return max(self._zip_infos, key=lambda i: i.file_size)
+
+ def ComputeZippedSize(self):
+ return sum(i.compress_size for i in self._zip_infos)
+
+ def ComputeUncompressedSize(self):
+ return sum(i.file_size for i in self._zip_infos)
+
+ def ComputeExtractedSize(self):
+ ret = 0
+ for zi, multiplier in zip(self._zip_infos, self._extracted_multipliers):
+ ret += zi.file_size * multiplier
+ return ret
+
+ def ComputeInstallSize(self):
+ return self.ComputeExtractedSize() + self.ComputeZippedSize()
+
+
+def _DoApkAnalysis(apk_filename, apks_path, tool_prefix, out_dir, report_func):
+ """Analyse APK to determine size contributions of different file classes."""
+ file_groups = []
+
+ def make_group(name):
+ group = _FileGroup(name)
+ file_groups.append(group)
+ return group
+
+ native_code = make_group('Native code')
+ java_code = make_group('Java code')
+ native_resources_no_translations = make_group('Native resources (no l10n)')
+ translations = make_group('Native resources (l10n)')
+ stored_translations = make_group('Native resources stored (l10n)')
+ icu_data = make_group('ICU (i18n library) data')
+ v8_snapshots = make_group('V8 Snapshots')
+ png_drawables = make_group('PNG drawables')
+ res_directory = make_group('Non-compiled Android resources')
+ arsc = make_group('Compiled Android resources')
+ metadata = make_group('Package metadata')
+ unknown = make_group('Unknown files')
+ notices = make_group('licenses.notice file')
+ unwind_cfi = make_group('unwind_cfi (dev and canary only)')
+
+ with zipfile.ZipFile(apk_filename, 'r') as apk:
+ apk_contents = apk.infolist()
+
+ sdk_version, skip_extract_lib = _ParseManifestAttributes(apk_filename)
+
+ # Pre-L: Dalvik - .odex file is simply decompressed/optimized dex file (~1x).
+ # L, M: ART - .odex file is compiled version of the dex file (~4x).
+ # N: ART - Uses Dalvik-like JIT for normal apps (~1x), full compilation for
+ # shared apps (~4x).
+ # Actual multipliers calculated using "apk_operations.py disk-usage".
+ # Will need to update multipliers once apk obfuscation is enabled.
+ # E.g. with obfuscation, the 4.04 changes to 4.46.
+ speed_profile_dex_multiplier = 1.17
+ orig_filename = apks_path or apk_filename
+ is_monochrome = 'Monochrome' in orig_filename
+ is_webview = 'WebView' in orig_filename
+ is_shared_apk = sdk_version >= 24 and (is_monochrome or is_webview)
+ if sdk_version < 21:
+ # JellyBean & KitKat
+ dex_multiplier = 1.16
+ elif sdk_version < 24:
+ # Lollipop & Marshmallow
+ dex_multiplier = 4.04
+ elif is_shared_apk:
+ # Oreo and above, compilation_filter=speed
+ dex_multiplier = 4.04
+ else:
+ # Oreo and above, compilation_filter=speed-profile
+ dex_multiplier = speed_profile_dex_multiplier
+
+ total_apk_size = os.path.getsize(apk_filename)
+ for member in apk_contents:
+ filename = member.filename
+ if filename.endswith('/'):
+ continue
+ if filename.endswith('.so'):
+ basename = posixpath.basename(filename)
+ should_extract_lib = not skip_extract_lib and basename.startswith('lib')
+ native_code.AddZipInfo(
+ member, extracted_multiplier=int(should_extract_lib))
+ elif filename.endswith('.dex'):
+ java_code.AddZipInfo(member, extracted_multiplier=dex_multiplier)
+ elif re.search(_RE_NON_LANGUAGE_PAK, filename):
+ native_resources_no_translations.AddZipInfo(member)
+ elif filename.endswith('.pak') or filename.endswith('.lpak'):
+ compressed = member.compress_type != zipfile.ZIP_STORED
+ bucket = translations if compressed else stored_translations
+ extracted_multiplier = 0
+ if compressed:
+ extracted_multiplier = int('en_' in filename or 'en-' in filename)
+ bucket.AddZipInfo(member, extracted_multiplier=extracted_multiplier)
+ elif filename == 'assets/icudtl.dat':
+ icu_data.AddZipInfo(member)
+ elif filename.endswith('.bin'):
+ v8_snapshots.AddZipInfo(member)
+ elif filename.endswith('.png') or filename.endswith('.webp'):
+ png_drawables.AddZipInfo(member)
+ elif filename.startswith('res/'):
+ res_directory.AddZipInfo(member)
+ elif filename.endswith('.arsc'):
+ arsc.AddZipInfo(member)
+ elif filename.startswith('META-INF') or filename == 'AndroidManifest.xml':
+ metadata.AddZipInfo(member)
+ elif filename.endswith('.notice'):
+ notices.AddZipInfo(member)
+ elif filename.startswith('assets/unwind_cfi'):
+ unwind_cfi.AddZipInfo(member)
+ else:
+ unknown.AddZipInfo(member)
+
+ if apks_path:
+ # We're mostly focused on size of Chrome for non-English locales, so assume
+ # Hindi (arbitrarily chosen) locale split is installed.
+ with zipfile.ZipFile(apks_path) as z:
+ hindi_apk_info = z.getinfo('splits/base-hi.apk')
+ total_apk_size += hindi_apk_info.file_size
+
+ total_install_size = total_apk_size
+ total_install_size_android_go = total_apk_size
+ zip_overhead = total_apk_size
+
+ for group in file_groups:
+ actual_size = group.ComputeZippedSize()
+ install_size = group.ComputeInstallSize()
+ uncompressed_size = group.ComputeUncompressedSize()
+ extracted_size = group.ComputeExtractedSize()
+ total_install_size += extracted_size
+ zip_overhead -= actual_size
+
+ report_func('Breakdown', group.name + ' size', actual_size, 'bytes')
+ report_func('InstallBreakdown', group.name + ' size', int(install_size),
+ 'bytes')
+ # Only a few metrics are compressed in the first place.
+ # To avoid over-reporting, track uncompressed size only for compressed
+ # entries.
+ if uncompressed_size != actual_size:
+ report_func('Uncompressed', group.name + ' size', uncompressed_size,
+ 'bytes')
+
+ if group is java_code and is_shared_apk:
+ # Updates are compiled using quicken, but system image uses speed-profile.
+ extracted_size = int(uncompressed_size * speed_profile_dex_multiplier)
+ total_install_size_android_go += extracted_size
+ report_func('InstallBreakdownGo', group.name + ' size',
+ actual_size + extracted_size, 'bytes')
+ elif group is translations and apks_path:
+ # Assume Hindi rather than English (accounted for above in total_apk_size)
+ total_install_size_android_go += actual_size
+ else:
+ total_install_size_android_go += extracted_size
+
+ # Per-file zip overhead is caused by:
+ # * 30 byte entry header + len(file name)
+ # * 46 byte central directory entry + len(file name)
+ # * 0-3 bytes for zipalign.
+ report_func('Breakdown', 'Zip Overhead', zip_overhead, 'bytes')
+ report_func('InstallSize', 'APK size', total_apk_size, 'bytes')
+ report_func('InstallSize', 'Estimated installed size',
+ int(total_install_size), 'bytes')
+ if is_shared_apk:
+ report_func('InstallSize', 'Estimated installed size (Android Go)',
+ int(total_install_size_android_go), 'bytes')
+ transfer_size = _CalculateCompressedSize(apk_filename)
+ report_func('TransferSize', 'Transfer size (deflate)', transfer_size, 'bytes')
+
+ # Size of main dex vs remaining.
+ main_dex_info = java_code.FindByPattern('classes.dex')
+ if main_dex_info:
+ main_dex_size = main_dex_info.file_size
+ report_func('Specifics', 'main dex size', main_dex_size, 'bytes')
+ secondary_size = java_code.ComputeUncompressedSize() - main_dex_size
+ report_func('Specifics', 'secondary dex size', secondary_size, 'bytes')
+
+ main_lib_info = native_code.FindLargest()
+ native_code_unaligned_size = 0
+ for lib_info in native_code.AllEntries():
+ section_sizes = _ExtractLibSectionSizesFromApk(
+ apk_filename, lib_info.filename, tool_prefix)
+ native_code_unaligned_size += sum(
+ v for k, v in section_sizes.iteritems() if k != 'bss')
+ # Size of main .so vs remaining.
+ if lib_info == main_lib_info:
+ main_lib_size = lib_info.file_size
+ report_func('Specifics', 'main lib size', main_lib_size, 'bytes')
+ secondary_size = native_code.ComputeUncompressedSize() - main_lib_size
+ report_func('Specifics', 'other lib size', secondary_size, 'bytes')
+
+ for metric_name, size in section_sizes.iteritems():
+ report_func('MainLibInfo', metric_name, size, 'bytes')
+
+ # Main metric that we want to monitor for jumps.
+ normalized_apk_size = total_apk_size
+ # unwind_cfi exists only in dev, canary, and non-channel builds.
+ normalized_apk_size -= unwind_cfi.ComputeZippedSize()
+ # Sections within .so files get 4kb aligned, so use section sizes rather than
+ # file size. Also gets rid of compression.
+ normalized_apk_size -= native_code.ComputeZippedSize()
+ normalized_apk_size += native_code_unaligned_size
+ # Unaligned size should be ~= uncompressed size or something is wrong.
+ # As of now, padding_fraction ~= .007
+ padding_fraction = -_PercentageDifference(
+ native_code.ComputeUncompressedSize(), native_code_unaligned_size)
+ assert 0 <= padding_fraction < .02, 'Padding was: {}'.format(padding_fraction)
+ # Normalized dex size: size within the zip + size on disk for Android Go
+ # devices (which ~= uncompressed dex size).
+ normalized_apk_size += java_code.ComputeUncompressedSize()
+ if apks_path:
+ # Locale normalization not needed when measuring only one locale.
+ # E.g. a change that adds 300 chars of unstranslated strings would cause the
+ # metric to be off by only 390 bytes (assuming a multiplier of 2.3 for
+ # Hindi).
+ pass
+ else:
+ # Avoid noise caused when strings change and translations haven't yet been
+ # updated.
+ num_translations = translations.GetNumEntries()
+ num_stored_translations = stored_translations.GetNumEntries()
+
+ if num_translations > 1:
+ # Multipliers found by looking at MonochromePublic.apk and seeing how much
+ # smaller en-US.pak is relative to the average locale.pak.
+ normalized_apk_size += _NormalizeLanguagePaks(translations, 1.17)
+ if num_stored_translations > 1:
+ normalized_apk_size += _NormalizeLanguagePaks(stored_translations, 1.43)
+ if num_translations + num_stored_translations > 1:
+ if num_translations == 0:
+ # WebView stores all locale paks uncompressed.
+ num_arsc_translations = num_stored_translations
+ else:
+ # Monochrome has more configurations than Chrome since it includes
+ # WebView (which supports more locales), but these should mostly be
+ # empty so ignore them here.
+ num_arsc_translations = num_translations
+ normalized_apk_size += int(
+ _NormalizeResourcesArsc(apk_filename, arsc.GetNumEntries(),
+ num_arsc_translations, out_dir))
+
+ report_func('Specifics', 'normalized apk size', normalized_apk_size, 'bytes')
+ # The "file count" metric cannot be grouped with any other metrics when the
+ # end result is going to be uploaded to the perf dashboard in the HistogramSet
+ # format due to mixed units (bytes vs. zip entries) causing malformed
+ # summaries to be generated.
+ # TODO(https://crbug.com/903970): Remove this workaround if unit mixing is
+ # ever supported.
+ report_func('FileCount', 'file count', len(apk_contents), 'zip entries')
+
+ for info in unknown.AllEntries():
+ sys.stderr.write(
+ 'Unknown entry: %s %d\n' % (info.filename, info.compress_size))
+
+
+def _AnnotatePakResources(out_dir):
+ """Returns a pair of maps: id_name_map, id_header_map."""
+ print('Looking at resources in: %s' % out_dir)
+
+ grit_headers = []
+ for root, _, files in os.walk(out_dir):
+ if root.endswith('grit'):
+ grit_headers += [os.path.join(root, f) for f in files if f.endswith('.h')]
+ assert grit_headers, 'Failed to find grit headers in %s' % out_dir
+
+ id_name_map = {}
+ id_header_map = {}
+ for header in grit_headers:
+ with open(header, 'r') as f:
+ for line in f.readlines():
+ m = _RC_HEADER_RE.match(line.strip())
+ if m:
+ i = int(m.group('id'))
+ name = m.group('name')
+ if i in id_name_map and name != id_name_map[i]:
+ print('WARNING: Resource ID conflict %s (%s vs %s)' % (
+ i, id_name_map[i], name))
+ id_name_map[i] = name
+ id_header_map[i] = os.path.relpath(header, out_dir)
+ return id_name_map, id_header_map
+
+
+def _CalculateCompressedSize(file_path):
+ CHUNK_SIZE = 256 * 1024
+ compressor = zlib.compressobj()
+ total_size = 0
+ with open(file_path, 'rb') as f:
+ for chunk in iter(lambda: f.read(CHUNK_SIZE), ''):
+ total_size += len(compressor.compress(chunk))
+ total_size += len(compressor.flush())
+ return total_size
+
+
+def _DoDexAnalysis(apk_filename, report_func):
+ sizes, total_size = method_count.ExtractSizesFromZip(apk_filename)
+
+ dex_metrics = method_count.CONTRIBUTORS_TO_DEX_CACHE
+ cumulative_sizes = collections.defaultdict(int)
+ for classes_dex_sizes in sizes.values():
+ for key in dex_metrics:
+ cumulative_sizes[key] += classes_dex_sizes[key]
+ for key, label in dex_metrics.iteritems():
+ report_func('Dex', label, cumulative_sizes[key], 'entries')
+
+ report_func('DexCache', 'DexCache', total_size, 'bytes')
+
+
+def _PrintPatchSizeEstimate(new_apk, builder, bucket, report_func):
+ apk_name = os.path.basename(new_apk)
+ # Reference APK paths have spaces replaced by underscores.
+ builder = builder.replace(' ', '_')
+ old_apk = apk_downloader.MaybeDownloadApk(
+ builder, apk_downloader.CURRENT_MILESTONE, apk_name,
+ apk_downloader.DEFAULT_DOWNLOAD_PATH, bucket)
+ if old_apk:
+ # Use a temp dir in case patch size functions fail to clean up temp files.
+ with build_utils.TempDir() as tmp:
+ tmp_name = os.path.join(tmp, 'patch.tmp')
+ bsdiff = apk_patch_size_estimator.calculate_bsdiff(
+ old_apk, new_apk, None, tmp_name)
+ report_func('PatchSizeEstimate', 'BSDiff (gzipped)', bsdiff, 'bytes')
+ fbf = apk_patch_size_estimator.calculate_filebyfile(
+ old_apk, new_apk, None, tmp_name)
+ report_func('PatchSizeEstimate', 'FileByFile (gzipped)', fbf, 'bytes')
+
+
+@contextmanager
+def Unzip(zip_file, filename=None):
+ """Utility for temporary use of a single file in a zip archive."""
+ with build_utils.TempDir() as unzipped_dir:
+ unzipped_files = build_utils.ExtractAll(
+ zip_file, unzipped_dir, True, pattern=filename)
+ if len(unzipped_files) == 0:
+ raise Exception(
+ '%s not found in %s' % (filename, zip_file))
+ yield unzipped_files[0]
+
+
+def _ConfigOutDirAndToolsPrefix(out_dir):
+ if out_dir:
+ constants.SetOutputDirectory(out_dir)
+ else:
+ out_dir = constants.GetOutDirectory()
+ if out_dir:
+ build_vars = build_utils.ReadBuildVars(
+ os.path.join(out_dir, "build_vars.txt"))
+ tool_prefix = os.path.join(out_dir, build_vars['android_tool_prefix'])
+ else:
+ tool_prefix = ''
+ return out_dir, tool_prefix
+
+
+def _Analyze(apk_path, chartjson, args):
+
+ def report_func(*args):
+ # Do not add any new metrics without also documenting them in:
+ # //docs/speed/binary_size/metrics.md.
+ perf_tests_results_helper.ReportPerfResult(chartjson, *args)
+
+ out_dir, tool_prefix = _ConfigOutDirAndToolsPrefix(args.out_dir)
+ apks_path = args.input if args.input.endswith('.apks') else None
+ _DoApkAnalysis(apk_path, apks_path, tool_prefix, out_dir, report_func)
+ _DoDexAnalysis(apk_path, report_func)
+ if args.estimate_patch_size:
+ _PrintPatchSizeEstimate(apk_path, args.reference_apk_builder,
+ args.reference_apk_bucket, report_func)
+
+
+def ResourceSizes(args):
+ chartjson = _BASE_CHART.copy() if args.output_format else None
+
+ if args.input.endswith('.apk'):
+ _Analyze(args.input, chartjson, args)
+ elif args.input.endswith('.apks'):
+ with tempfile.NamedTemporaryFile(suffix='.apk') as f:
+ with zipfile.ZipFile(args.input) as z:
+ # Currently bundletool is creating two apks when .apks is created
+ # without specifying an sdkVersion. Always measure the one with an
+ # uncompressed shared library.
+ try:
+ info = z.getinfo('splits/base-master_2.apk')
+ except KeyError:
+ info = z.getinfo('splits/base-master.apk')
+ f.write(z.read(info))
+ f.flush()
+ _Analyze(f.name, chartjson, args)
+ else:
+ raise Exception('Unknown file type: ' + args.input)
+
+ if chartjson:
+ results_path = os.path.join(args.output_dir, 'results-chart.json')
+ logging.critical('Dumping chartjson to %s', results_path)
+ with open(results_path, 'w') as json_file:
+ json.dump(chartjson, json_file)
+
+ # We would ideally generate a histogram set directly instead of generating
+ # chartjson then converting. However, perf_tests_results_helper is in
+ # //build, which doesn't seem to have any precedent for depending on
+ # anything in Catapult. This can probably be fixed, but since this doesn't
+ # need to be super fast or anything, converting is a good enough solution
+ # for the time being.
+ if args.output_format == 'histograms':
+ histogram_result = convert_chart_json.ConvertChartJson(results_path)
+ if histogram_result.returncode != 0:
+ logging.error('chartjson conversion failed with error: %s',
+ histogram_result.stdout)
+ return 1
+
+ histogram_path = os.path.join(args.output_dir, 'perf_results.json')
+ logging.critical('Dumping histograms to %s', histogram_path)
+ with open(histogram_path, 'w') as json_file:
+ json_file.write(histogram_result.stdout)
+
+ return 0
+
+
+def main():
+ argparser = argparse.ArgumentParser(description='Print APK size metrics.')
+ argparser.add_argument(
+ '--min-pak-resource-size',
+ type=int,
+ default=20 * 1024,
+ help='Minimum byte size of displayed pak resources.')
+ argparser.add_argument(
+ '--chromium-output-directory',
+ dest='out_dir',
+ type=os.path.realpath,
+ help='Location of the build artifacts.')
+ argparser.add_argument(
+ '--chartjson',
+ action='store_true',
+ help='DEPRECATED. Use --output-format=chartjson '
+ 'instead.')
+ argparser.add_argument(
+ '--output-format',
+ choices=['chartjson', 'histograms'],
+ help='Output the results to a file in the given '
+ 'format instead of printing the results.')
+ argparser.add_argument(
+ '--output-dir', default='.', help='Directory to save chartjson to.')
+ argparser.add_argument('--loadable_module', help='Obsolete (ignored).')
+ argparser.add_argument(
+ '--estimate-patch-size',
+ action='store_true',
+ help='Include patch size estimates. Useful for perf '
+ 'builders where a reference APK is available but adds '
+ '~3 mins to run time.')
+ argparser.add_argument(
+ '--reference-apk-builder',
+ default=apk_downloader.DEFAULT_BUILDER,
+ help='Builder name to use for reference APK for patch '
+ 'size estimates.')
+ argparser.add_argument(
+ '--reference-apk-bucket',
+ default=apk_downloader.DEFAULT_BUCKET,
+ help='Storage bucket holding reference APKs.')
+
+ # Accepted to conform to the isolated script interface, but ignored.
+ argparser.add_argument(
+ '--isolated-script-test-filter', help=argparse.SUPPRESS)
+ argparser.add_argument(
+ '--isolated-script-test-output',
+ type=os.path.realpath,
+ help='File to which results will be written in the '
+ 'simplified JSON output format.')
+
+ argparser.add_argument('input', help='Path to .apk or .apks file to measure.')
+ args = argparser.parse_args()
+
+ devil_chromium.Initialize(output_directory=args.out_dir)
+
+ # TODO(bsheedy): Remove this once uses of --chartjson have been removed.
+ if args.chartjson:
+ args.output_format = 'chartjson'
+
+ isolated_script_output = {'valid': False, 'failures': []}
+
+ try:
+ result = ResourceSizes(args)
+ isolated_script_output = {
+ 'valid': True,
+ 'failures': ['resource_sizes'] if result else [],
+ }
+ finally:
+ if args.isolated_script_test_output:
+ with open(args.isolated_script_test_output, 'w') as output_file:
+ json.dump(isolated_script_output, output_file)
+
+ return result
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/resource_sizes.pydeps b/deps/v8/build/android/resource_sizes.pydeps
new file mode 100644
index 0000000000..7c075c2477
--- /dev/null
+++ b/deps/v8/build/android/resource_sizes.pydeps
@@ -0,0 +1,63 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android --output build/android/resource_sizes.pydeps build/android/resource_sizes.py
+../../third_party/apk-patch-size-estimator/apk_patch_size_estimator.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/android/__init__.py
+../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../third_party/catapult/devil/devil/android/sdk/build_tools.py
+../../third_party/catapult/devil/devil/android/sdk/dexdump.py
+../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/constants/__init__.py
+../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../third_party/catapult/devil/devil/devil_env.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/cmd_helper.py
+../../third_party/catapult/devil/devil/utils/lazy/__init__.py
+../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/third_party/vinn/vinn/__init__.py
+../../third_party/catapult/third_party/vinn/vinn/_vinn.py
+../../third_party/catapult/third_party/zipfile/zipfile_2_7_13.py
+../../third_party/catapult/tracing/tracing/__init__.py
+../../third_party/catapult/tracing/tracing/value/__init__.py
+../../third_party/catapult/tracing/tracing/value/convert_chart_json.py
+../../third_party/catapult/tracing/tracing_project.py
+../../third_party/depot_tools/download_from_google_storage.py
+../../third_party/depot_tools/subprocess2.py
+../../third_party/depot_tools/upload_to_google_storage.py
+../gn_helpers.py
+../util/lib/common/perf_result_data_type.py
+../util/lib/common/perf_tests_results_helper.py
+binary_size/__init__.py
+binary_size/apk_downloader.py
+devil_chromium.py
+gyp/util/__init__.py
+gyp/util/build_utils.py
+gyp/util/md5_check.py
+method_count.py
+pylib/__init__.py
+pylib/constants/__init__.py
+pylib/constants/host_paths.py
+resource_sizes.py
diff --git a/deps/v8/build/android/screenshot.py b/deps/v8/build/android/screenshot.py
new file mode 100755
index 0000000000..6ab906086d
--- /dev/null
+++ b/deps/v8/build/android/screenshot.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import devil_chromium
+from devil.android.tools import screenshot
+
+if __name__ == '__main__':
+ devil_chromium.Initialize()
+ sys.exit(screenshot.main())
diff --git a/deps/v8/build/android/stacktrace/BUILD.gn b/deps/v8/build/android/stacktrace/BUILD.gn
new file mode 100644
index 0000000000..a3957fec3d
--- /dev/null
+++ b/deps/v8/build/android/stacktrace/BUILD.gn
@@ -0,0 +1,17 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+java_binary("java_deobfuscate") {
+ main_class = "org.chromium.build.FlushingReTrace"
+ java_files = [ "java/org/chromium/build/FlushingReTrace.java" ]
+ deps = [
+ "//third_party/proguard:retrace_java",
+ ]
+ data = [
+ "$root_build_dir/lib.java/build/android/stacktrace/java_deobfuscate.jar",
+ "$root_build_dir/bin/java_deobfuscate",
+ ]
+}
diff --git a/deps/v8/build/android/stacktrace/README.md b/deps/v8/build/android/stacktrace/README.md
new file mode 100644
index 0000000000..bfa537c5ad
--- /dev/null
+++ b/deps/v8/build/android/stacktrace/README.md
@@ -0,0 +1,23 @@
+# java_deobfuscate
+
+A wrapper around ProGuard's ReTrace tool, which:
+
+1) Updates the regular expression used to identify stack lines, and
+2) Streams its output.
+
+The second point here is what allows you to run:
+
+ adb logcat | out/Default/bin/java_deobfuscate out/Default/apks/ChromePublic.apk.mapping
+
+And have it actually show output without logcat terminating.
+
+
+# stackwalker.py
+
+Extracts Breakpad microdumps from a log file and uses `stackwalker` to symbolize
+them.
+
+
+# crashpad_stackwalker.py
+
+Fetches Crashpad dumps from a given device, walks and symbolizes the stacks.
diff --git a/deps/v8/build/android/stacktrace/crashpad_stackwalker.py b/deps/v8/build/android/stacktrace/crashpad_stackwalker.py
new file mode 100755
index 0000000000..a538105be4
--- /dev/null
+++ b/deps/v8/build/android/stacktrace/crashpad_stackwalker.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Fetches Crashpad dumps from a given device, walks and symbolizes the stacks.
+# All the non-trivial operations are performed by generate_breakpad_symbols.py,
+# dump_syms, minidump_dump and minidump_stackwalk.
+
+import argparse
+import logging
+import os
+import posixpath
+import re
+import sys
+import shutil
+import subprocess
+import tempfile
+
+_BUILD_ANDROID_PATH = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..'))
+sys.path.append(_BUILD_ANDROID_PATH)
+import devil_chromium
+from devil.android import device_utils
+from devil.utils import timeout_retry
+
+
+def _CreateSymbolsDir(build_path, dynamic_library_names):
+ generator = os.path.normpath(
+ os.path.join(_BUILD_ANDROID_PATH, '..', '..', 'components', 'crash',
+ 'content', 'tools', 'generate_breakpad_symbols.py'))
+ syms_dir = os.path.join(build_path, 'crashpad_syms')
+ shutil.rmtree(syms_dir, ignore_errors=True)
+ os.mkdir(syms_dir)
+ for lib in dynamic_library_names:
+ unstripped_library_path = os.path.join(build_path, 'lib.unstripped', lib)
+ if not os.path.exists(unstripped_library_path):
+ continue
+ logging.info('Generating symbols for: %s', unstripped_library_path)
+ cmd = [
+ generator,
+ '--symbols-dir',
+ syms_dir,
+ '--build-dir',
+ build_path,
+ '--binary',
+ unstripped_library_path,
+ ]
+ return_code = subprocess.call(cmd)
+ if return_code != 0:
+ logging.error('Could not extract symbols, command failed: %s',
+ ' '.join(cmd))
+ return syms_dir
+
+
+def _ChooseLatestCrashpadDump(device, crashpad_dump_path):
+ if not device.PathExists(crashpad_dump_path):
+ logging.warning('Crashpad dump directory does not exist: %s',
+ crashpad_dump_path)
+ return None
+ latest = None
+ latest_timestamp = 0
+ for crashpad_file in device.ListDirectory(crashpad_dump_path):
+ if crashpad_file.endswith('.dmp'):
+ stat = device.StatPath(posixpath.join(crashpad_dump_path, crashpad_file))
+ current_timestamp = stat['st_mtime']
+ if current_timestamp > latest_timestamp:
+ latest_timestamp = current_timestamp
+ latest = crashpad_file
+ return latest
+
+
+def _ExtractLibraryNamesFromDump(build_path, dump_path):
+ default_library_name = 'libmonochrome.so'
+ dumper_path = os.path.join(build_path, 'minidump_dump')
+ if not os.access(dumper_path, os.X_OK):
+ logging.warning(
+ 'Cannot extract library name from dump because %s is not found, '
+ 'default to: %s', dumper_path, default_library_name)
+ return [default_library_name]
+ p = subprocess.Popen([dumper_path, dump_path],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ stdout, stderr = p.communicate()
+ if p.returncode != 0:
+ # Dumper errors often do not affect stack walkability, just a warning.
+ logging.warning('Reading minidump failed with output:\n%s', stderr)
+
+ library_names = []
+ module_library_line_re = re.compile(r'[(]code_file[)]\s+= '
+ r'"(?P<library_name>lib[^. ]+.so)"')
+ in_module = False
+ for line in stdout.splitlines():
+ line = line.lstrip().rstrip('\n')
+ if line == 'MDRawModule':
+ in_module = True
+ continue
+ if line == '':
+ in_module = False
+ continue
+ if in_module:
+ m = module_library_line_re.match(line)
+ if m:
+ library_names.append(m.group('library_name'))
+ if not library_names:
+ logging.warning(
+ 'Could not find any library name in the dump, '
+ 'default to: %s', default_library_name)
+ return [default_library_name]
+ return library_names
+
+
+def main():
+ logging.basicConfig(level=logging.INFO)
+ parser = argparse.ArgumentParser(
+ description='Fetches Crashpad dumps from a given device, '
+ 'walks and symbolizes the stacks.')
+ parser.add_argument('--device', required=True, help='Device serial number')
+ parser.add_argument(
+ '--adb-path', required=True, help='Path to the "adb" command')
+ parser.add_argument(
+ '--build-path',
+ required=True,
+ help='Build output directory, equivalent to CHROMIUM_OUTPUT_DIR')
+ parser.add_argument(
+ '--chrome-cache-path',
+ required=True,
+ help='Directory on the device where Chrome stores cached files,'
+ ' crashpad stores dumps in a subdirectory of it')
+ args = parser.parse_args()
+
+ stackwalk_path = os.path.join(args.build_path, 'minidump_stackwalk')
+ if not os.path.exists(stackwalk_path):
+ logging.error('Missing minidump_stackwalk executable')
+ return 1
+
+ devil_chromium.Initialize(adb_path=args.adb_path)
+ device = device_utils.DeviceUtils(args.device)
+
+ device_crashpad_path = posixpath.join(args.chrome_cache_path, 'Crashpad',
+ 'pending')
+
+ def CrashpadDumpExists():
+ return _ChooseLatestCrashpadDump(device, device_crashpad_path)
+
+ crashpad_file = timeout_retry.WaitFor(
+ CrashpadDumpExists, wait_period=1, max_tries=9)
+ if not crashpad_file:
+ logging.error('Could not locate a crashpad dump')
+ return 1
+
+ dump_dir = tempfile.mkdtemp()
+ symbols_dir = None
+ try:
+ device.PullFile(
+ device_path=posixpath.join(device_crashpad_path, crashpad_file),
+ host_path=dump_dir)
+ dump_full_path = os.path.join(dump_dir, crashpad_file)
+ library_names = _ExtractLibraryNamesFromDump(args.build_path,
+ dump_full_path)
+ symbols_dir = _CreateSymbolsDir(args.build_path, library_names)
+ stackwalk_cmd = [stackwalk_path, dump_full_path, symbols_dir]
+ subprocess.call(stackwalk_cmd)
+ finally:
+ shutil.rmtree(dump_dir, ignore_errors=True)
+ if symbols_dir:
+ shutil.rmtree(symbols_dir, ignore_errors=True)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java b/deps/v8/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java
new file mode 100644
index 0000000000..baa931328b
--- /dev/null
+++ b/deps/v8/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java
@@ -0,0 +1,116 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.LineNumberReader;
+import java.io.OutputStreamWriter;
+import java.io.PrintWriter;
+
+import proguard.retrace.ReTrace;
+
+/**
+ * A wrapper around ReTrace that:
+ * 1. Hardcodes a more useful line regular expression
+ * 2. Disables output buffering
+ */
+public class FlushingReTrace {
+ // E.g.: D/ConnectivityService(18029): Message
+ // E.g.: W/GCM ( 151): Message
+ // E.g.: 09-08 14:22:59.995 18029 18055 I ProcessStatsService: Message
+ // E.g.: 09-08 14:30:59.145 17731 18020 D MDnsDS : Message
+ private static final String LOGCAT_PREFIX =
+ "(?:[VDIWEF]/.*?\\( *\\d+\\): |\\d\\d-\\d\\d [0-9:. ]+[VDIWEF] .*?: )?";
+
+ // Note: Order of these sub-patterns defines their precedence.
+ // Note: Deobfuscation of methods without the presense of line numbers basically never works.
+ // There is a test for these pattern at //build/android/stacktrace/java_deobfuscate_test.py
+ private static final String LINE_PARSE_REGEX =
+ // Eagerly match logcat prefix to avoid conflicting with the patterns below.
+ LOGCAT_PREFIX
+ + "(?:"
+ // Based on default ReTrace regex, but with whitespaces allowed in file:line parentheses
+ // and "at" changed to to allow :
+ // E.g.: 06-22 13:58:02.895 4674 4674 E THREAD_STATE: bLA.a( PG : 173 )
+ // Normal stack trace lines look like:
+ // \tat org.chromium.chrome.browser.tab.Tab.handleJavaCrash(Tab.java:682)
+ + "(?:.*?(?::|\\bat)\\s+%c\\.%m\\s*\\(\\s*%s(?:\\s*:\\s*%l\\s*)?\\))|"
+ // E.g.: Caused by: java.lang.NullPointerException: Attempt to read from field 'int bLA'
+ // on a null object reference
+ + "(?:.*java\\.lang\\.NullPointerException.*[\"']%t\\s*%c\\.(?:%f|%m\\(%a\\))[\"'].*)|"
+ // E.g.: java.lang.VerifyError: bLA
+ + "(?:java\\.lang\\.VerifyError: %c)|"
+ // E.g.: java.lang.NoSuchFieldError: No instance field e of type L...; in class LbxK;
+ + "(?:java\\.lang\\.NoSuchFieldError: No instance field %f of type .*? in class L%C;)|"
+ // E.g.: Object of type Clazz was not destroyed... (See LifetimeAssert.java)
+ + "(?:.*?Object of type %c .*)|"
+ // E.g.: VFY: unable to resolve new-instance 3810 (LSome/Framework/Class;) in Lfoo/Bar;
+ + "(?:.*L%C;.*)|"
+ // E.g.: END SomeTestClass#someMethod
+ + "(?:.*?%c#%m.*?)|"
+ // Special-case for a common junit logcat message:
+ // E.g.: java.lang.NoClassDefFoundError: SomeFrameworkClass in isTestClass for Foo
+ + "(?:.* isTestClass for %c)|"
+ // E.g.: Caused by: java.lang.RuntimeException: Intentional Java Crash
+ + "(?:Caused by: %c:.*)|"
+ // Quoted values and lines that end with a class / class+method:
+ // E.g.: The class: Foo
+ // E.g.: INSTRUMENTATION_STATUS: class=Foo
+ // E.g.: NoClassDefFoundError: SomeFrameworkClass in isTestClass for Foo
+ // E.g.: Could not find class 'SomeFrameworkClass', referenced from method Foo.bar
+ // E.g.: Could not find method SomeFrameworkMethod, referenced from method Foo.bar
+ // E.g.: The member "Foo.bar"
+ // E.g.: The class "Foobar"
+ // Be careful about matching %c without %m since language tags look like class names.
+ + "(?:.*?%c\\.%m)|"
+ + "(?:.*?\"%c\\.%m\".*)|"
+ + "(?:.*\\b(?:[Cc]lass|[Tt]ype)\\b.*?\"%c\".*)|"
+ + "(?:.*\\b(?:[Cc]lass|[Tt]ype)\\b.*?%c)|"
+ // E.g.: java.lang.RuntimeException: Intentional Java Crash
+ + "(?:%c:.*)|"
+ // See if entire line matches a class name (e.g. for manual deobfuscation)
+ + "(?:%c)"
+ + ")";
+
+ private static void usage() {
+ System.err.println("Usage: echo $OBFUSCATED_CLASS | java_deobfuscate Foo.apk.mapping");
+ System.err.println("Usage: java_deobfuscate Foo.apk.mapping < foo.log");
+ System.err.println("Note: Deobfuscation of symbols outside the context of stack "
+ + "traces will work only when lines match the regular expression defined "
+ + "in FlushingReTrace.java.");
+ System.err.println("Also: Deobfuscation of method names without associated line "
+ + "numbers does not seem to work.");
+ System.exit(1);
+ }
+
+ public static void main(String[] args) {
+ if (args.length != 1 || args[0].startsWith("-")) {
+ usage();
+ }
+
+ File mappingFile = new File(args[0]);
+ try {
+ LineNumberReader reader = new LineNumberReader(
+ new BufferedReader(new InputStreamReader(System.in, "UTF-8")));
+
+ // Enabling autoFlush is the main difference from ReTrace.main().
+ boolean autoFlush = true;
+ PrintWriter writer =
+ new PrintWriter(new OutputStreamWriter(System.out, "UTF-8"), autoFlush);
+
+ boolean verbose = false;
+ new ReTrace(LINE_PARSE_REGEX, verbose, mappingFile).retrace(reader, writer);
+ } catch (IOException ex) {
+ // Print a verbose stack trace.
+ ex.printStackTrace();
+ System.exit(1);
+ }
+
+ System.exit(0);
+ }
+}
diff --git a/deps/v8/build/android/stacktrace/java_deobfuscate_test.py b/deps/v8/build/android/stacktrace/java_deobfuscate_test.py
new file mode 100755
index 0000000000..98b66dd02e
--- /dev/null
+++ b/deps/v8/build/android/stacktrace/java_deobfuscate_test.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env python
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for java_deobfuscate."""
+
+import argparse
+import os
+import subprocess
+import sys
+import tempfile
+import unittest
+
+# Set by command-line argument.
+_JAVA_DEOBFUSCATE_PATH = None
+
+LINE_PREFIXES = [
+ '',
+ # logcat -v threadtime
+ '09-08 14:38:35.535 18029 18084 E qcom_sensors_hal: ',
+ # logcat
+ 'W/GCM (15158): ',
+ 'W/GCM ( 158): ',
+]
+
+TEST_MAP = """\
+this.was.Deobfuscated -> FOO:
+ int[] mFontFamily -> a
+ 1:3:void someMethod(int,android.os.Bundle):65:67 -> bar
+never.Deobfuscated -> NOTFOO:
+ int[] mFontFamily -> a
+ 1:3:void someMethod(int,android.os.Bundle):65:67 -> bar
+"""
+
+TEST_DATA = [
+ '',
+ 'FOO',
+ 'FOO.bar',
+ 'Here is a FOO',
+ 'Here is a class FOO',
+ 'Here is a class FOO baz',
+ 'Here is a "FOO" baz',
+ 'Here is a type "FOO" baz',
+ 'Here is a "FOO.bar" baz',
+ 'SomeError: SomeFrameworkClass in isTestClass for FOO',
+ 'Here is a FOO.bar',
+ 'Here is a FOO.bar baz',
+ 'END FOO#bar',
+ 'new-instance 3810 (LSome/Framework/Class;) in LFOO;',
+ 'FOO: Error message',
+ 'Caused by: FOO: Error message',
+ '\tat FOO.bar(PG:1)',
+ '\t at\t FOO.bar\t (\t PG:\t 1\t )',
+ ('Unable to start activity ComponentInfo{garbage.in/here.test}:'
+ ' java.lang.NullPointerException: Attempt to invoke interface method'
+ ' \'void FOO.bar(int,android.os.Bundle)\' on a null object reference'),
+ ('Caused by: java.lang.NullPointerException: Attempt to read from field'
+ ' \'int[] FOO.a\' on a null object reference'),
+ 'java.lang.VerifyError: FOO',
+ ('java.lang.NoSuchFieldError: No instance field a of type '
+ 'Ljava/lang/Class; in class LFOO;'),
+ 'NOTFOO: Object of type FOO was not destroyed...',
+]
+
+EXPECTED_OUTPUT = [
+ '',
+ 'this.was.Deobfuscated',
+ 'this.was.Deobfuscated.someMethod',
+ 'Here is a FOO',
+ 'Here is a class this.was.Deobfuscated',
+ 'Here is a class FOO baz',
+ 'Here is a "FOO" baz',
+ 'Here is a type "this.was.Deobfuscated" baz',
+ 'Here is a "this.was.Deobfuscated.someMethod" baz',
+ 'SomeError: SomeFrameworkClass in isTestClass for this.was.Deobfuscated',
+ 'Here is a this.was.Deobfuscated.someMethod',
+ 'Here is a FOO.bar baz',
+ 'END this.was.Deobfuscated#someMethod',
+ 'new-instance 3810 (LSome/Framework/Class;) in Lthis/was/Deobfuscated;',
+ 'this.was.Deobfuscated: Error message',
+ 'Caused by: this.was.Deobfuscated: Error message',
+ '\tat this.was.Deobfuscated.someMethod(Deobfuscated.java:65)',
+ ('\t at\t this.was.Deobfuscated.someMethod\t '
+ '(\t Deobfuscated.java:\t 65\t )'),
+ ('Unable to start activity ComponentInfo{garbage.in/here.test}:'
+ ' java.lang.NullPointerException: Attempt to invoke interface method'
+ ' \'void this.was.Deobfuscated.someMethod(int,android.os.Bundle)\' on a'
+ ' null object reference'),
+ ('Caused by: java.lang.NullPointerException: Attempt to read from field'
+ ' \'int[] this.was.Deobfuscated.mFontFamily\' on a null object reference'),
+ 'java.lang.VerifyError: this.was.Deobfuscated',
+ ('java.lang.NoSuchFieldError: No instance field mFontFamily of type '
+ 'Ljava/lang/Class; in class Lthis/was/Deobfuscated;'),
+ 'NOTFOO: Object of type this.was.Deobfuscated was not destroyed...',
+]
+TEST_DATA = [s + '\n' for s in TEST_DATA]
+EXPECTED_OUTPUT = [s + '\n' for s in EXPECTED_OUTPUT]
+
+
+class JavaDeobfuscateTest(unittest.TestCase):
+
+ def __init__(self, *args, **kwargs):
+ super(JavaDeobfuscateTest, self).__init__(*args, **kwargs)
+ self._map_file = None
+
+ def setUp(self):
+ self._map_file = tempfile.NamedTemporaryFile()
+ self._map_file.write(TEST_MAP)
+ self._map_file.flush()
+
+ def tearDown(self):
+ if self._map_file:
+ self._map_file.close()
+
+ def _testImpl(self, input_lines=None, expected_output_lines=None,
+ prefix=''):
+ self.assertTrue(bool(input_lines) == bool(expected_output_lines))
+
+ if not input_lines:
+ input_lines = [prefix + x for x in TEST_DATA]
+ if not expected_output_lines:
+ expected_output_lines = [prefix + x for x in EXPECTED_OUTPUT]
+
+ cmd = [_JAVA_DEOBFUSCATE_PATH, self._map_file.name]
+ proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ proc_output, _ = proc.communicate(''.join(input_lines))
+ actual_output_lines = proc_output.splitlines(True)
+ for actual, expected in zip(actual_output_lines, expected_output_lines):
+ self.assertTrue(
+ actual == expected or actual.replace('bar', 'someMethod') == expected,
+ msg=''.join([
+ 'Deobfuscation failed.\n',
+ ' actual: %s' % actual,
+ ' expected: %s' % expected]))
+
+ def testNoPrefix(self):
+ self._testImpl(prefix='')
+
+ def testThreadtimePrefix(self):
+ self._testImpl(prefix='09-08 14:38:35.535 18029 18084 E qcom_sensors_hal: ')
+
+ def testStandardPrefix(self):
+ self._testImpl(prefix='W/GCM (15158): ')
+
+ def testStandardPrefixWithPadding(self):
+ self._testImpl(prefix='W/GCM ( 158): ')
+
+ @unittest.skip('causes java_deobfuscate to hang, see crbug.com/876539')
+ def testIndefiniteHang(self):
+ # Test for crbug.com/876539.
+ self._testImpl(
+ input_lines=[
+ 'VFY: unable to resolve virtual method 2: LFOO;'
+ + '.onDescendantInvalidated '
+ + '(Landroid/view/View;Landroid/view/View;)V',
+ ],
+ expected_output_lines=[
+ 'VFY: unable to resolve virtual method 2: Lthis.was.Deobfuscated;'
+ + '.onDescendantInvalidated '
+ + '(Landroid/view/View;Landroid/view/View;)V',
+ ])
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--java-deobfuscate-path', type=os.path.realpath,
+ required=True)
+ known_args, unittest_args = parser.parse_known_args()
+ _JAVA_DEOBFUSCATE_PATH = known_args.java_deobfuscate_path
+ unittest_args = [sys.argv[0]] + unittest_args
+ unittest.main(argv=unittest_args)
diff --git a/deps/v8/build/android/stacktrace/stackwalker.py b/deps/v8/build/android/stacktrace/stackwalker.py
new file mode 100755
index 0000000000..db54354e3a
--- /dev/null
+++ b/deps/v8/build/android/stacktrace/stackwalker.py
@@ -0,0 +1,135 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import sys
+import tempfile
+
+if __name__ == '__main__':
+ sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from pylib.constants import host_paths
+
+if host_paths.DEVIL_PATH not in sys.path:
+ sys.path.append(host_paths.DEVIL_PATH)
+from devil.utils import cmd_helper
+
+
+_MICRODUMP_BEGIN = re.compile(
+ '.*google-breakpad: -----BEGIN BREAKPAD MICRODUMP-----')
+_MICRODUMP_END = re.compile(
+ '.*google-breakpad: -----END BREAKPAD MICRODUMP-----')
+
+""" Example Microdump
+<timestamp> 6270 6131 F google-breakpad: -----BEGIN BREAKPAD MICRODUMP-----
+<timestamp> 6270 6131 F google-breakpad: V Chrome_Android:54.0.2790.0
+...
+<timestamp> 6270 6131 F google-breakpad: -----END BREAKPAD MICRODUMP-----
+
+"""
+
+
+def GetMicroDumps(dump_path):
+ """Returns all microdumps found in given log file
+
+ Args:
+ dump_path: Path to the log file.
+
+ Returns:
+ List of all microdumps as lists of lines.
+ """
+ with open(dump_path, 'r') as d:
+ data = d.read()
+ all_dumps = []
+ current_dump = None
+ for line in data.splitlines():
+ if current_dump is not None:
+ if _MICRODUMP_END.match(line):
+ current_dump.append(line)
+ all_dumps.append(current_dump)
+ current_dump = None
+ else:
+ current_dump.append(line)
+ elif _MICRODUMP_BEGIN.match(line):
+ current_dump = []
+ current_dump.append(line)
+ return all_dumps
+
+
+def SymbolizeMicroDump(stackwalker_binary_path, dump, symbols_path):
+ """Runs stackwalker on microdump.
+
+ Runs the stackwalker binary at stackwalker_binary_path on a given microdump
+ using the symbols at symbols_path.
+
+ Args:
+ stackwalker_binary_path: Path to the stackwalker binary.
+ dump: The microdump to run the stackwalker on.
+ symbols_path: Path the the symbols file to use.
+
+ Returns:
+ Output from stackwalker tool.
+ """
+ with tempfile.NamedTemporaryFile() as tf:
+ for l in dump:
+ tf.write('%s\n' % l)
+ cmd = [stackwalker_binary_path, tf.name, symbols_path]
+ return cmd_helper.GetCmdOutput(cmd)
+
+
+def AddArguments(parser):
+ parser.add_argument('--stackwalker-binary-path', required=True,
+ help='Path to stackwalker binary.')
+ parser.add_argument('--stack-trace-path', required=True,
+ help='Path to stacktrace containing microdump.')
+ parser.add_argument('--symbols-path', required=True,
+ help='Path to symbols file.')
+ parser.add_argument('--output-file',
+ help='Path to dump stacktrace output to')
+
+
+def _PrintAndLog(line, fp):
+ if fp:
+ fp.write('%s\n' % line)
+ print line
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ AddArguments(parser)
+ args = parser.parse_args()
+
+ micro_dumps = GetMicroDumps(args.stack_trace_path)
+ if not micro_dumps:
+ print 'No microdump found. Exiting.'
+ return 0
+
+ symbolized_dumps = []
+ for micro_dump in micro_dumps:
+ symbolized_dumps.append(SymbolizeMicroDump(
+ args.stackwalker_binary_path, micro_dump, args.symbols_path))
+
+ try:
+ fp = open(args.output_file, 'w') if args.output_file else None
+ _PrintAndLog('%d microdumps found.' % len(micro_dumps), fp)
+ _PrintAndLog('---------- Start output from stackwalker ----------', fp)
+ for index, symbolized_dump in list(enumerate(symbolized_dumps)):
+ _PrintAndLog(
+ '------------------ Start dump %d ------------------' % index, fp)
+ _PrintAndLog(symbolized_dump, fp)
+ _PrintAndLog(
+ '------------------- End dump %d -------------------' % index, fp)
+ _PrintAndLog('----------- End output from stackwalker -----------', fp)
+ except Exception:
+ if fp:
+ fp.close()
+ raise
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/test_runner.py b/deps/v8/build/android/test_runner.py
new file mode 100755
index 0000000000..b26bade595
--- /dev/null
+++ b/deps/v8/build/android/test_runner.py
@@ -0,0 +1,1065 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs all types of tests from one unified interface."""
+
+import argparse
+import collections
+import contextlib
+import itertools
+import logging
+import os
+import shutil
+import signal
+import sys
+import tempfile
+import threading
+import traceback
+import unittest
+
+# Import _strptime before threaded code. datetime.datetime.strptime is
+# threadsafe except for the initial import of the _strptime module.
+# See http://crbug.com/724524 and https://bugs.python.org/issue7980.
+import _strptime # pylint: disable=unused-import
+
+# pylint: disable=ungrouped-imports
+from pylib.constants import host_paths
+
+if host_paths.DEVIL_PATH not in sys.path:
+ sys.path.append(host_paths.DEVIL_PATH)
+
+from devil import base_error
+from devil.utils import reraiser_thread
+from devil.utils import run_tests_helper
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import environment_factory
+from pylib.base import output_manager
+from pylib.base import output_manager_factory
+from pylib.base import test_instance_factory
+from pylib.base import test_run_factory
+from pylib.results import json_results
+from pylib.results import report_results
+from pylib.results.presentation import test_results_presentation
+from pylib.utils import logdog_helper
+from pylib.utils import logging_utils
+from pylib.utils import test_filter
+
+from py_utils import contextlib_ext
+
+_DEVIL_STATIC_CONFIG_FILE = os.path.abspath(os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'devil_config.json'))
+
+
+def _RealPath(arg):
+ if arg.startswith('//'):
+ arg = os.path.abspath(os.path.join(host_paths.DIR_SOURCE_ROOT,
+ arg[2:].replace('/', os.sep)))
+ return os.path.realpath(arg)
+
+
+def AddTestLauncherOptions(parser):
+ """Adds arguments mirroring //base/test/launcher.
+
+ Args:
+ parser: The parser to which arguments should be added.
+ Returns:
+ The given parser.
+ """
+ parser.add_argument(
+ '--test-launcher-retry-limit',
+ '--test_launcher_retry_limit',
+ '--num_retries', '--num-retries',
+ '--isolated-script-test-launcher-retry-limit',
+ dest='num_retries', type=int, default=2,
+ help='Number of retries for a test before '
+ 'giving up (default: %(default)s).')
+ parser.add_argument(
+ '--test-launcher-summary-output',
+ '--json-results-file',
+ dest='json_results_file', type=os.path.realpath,
+ help='If set, will dump results in JSON form to the specified file. '
+ 'Note that this will also trigger saving per-test logcats to '
+ 'logdog.')
+ parser.add_argument(
+ '--test-launcher-shard-index',
+ type=int, default=os.environ.get('GTEST_SHARD_INDEX', 0),
+ help='Index of the external shard to run.')
+ parser.add_argument(
+ '--test-launcher-total-shards',
+ type=int, default=os.environ.get('GTEST_TOTAL_SHARDS', 1),
+ help='Total number of external shards.')
+
+ test_filter.AddFilterOptions(parser)
+
+ return parser
+
+
+def AddCommandLineOptions(parser):
+ """Adds arguments to support passing command-line flags to the device."""
+ parser.add_argument(
+ '--device-flags-file',
+ type=os.path.realpath,
+ help='The relative filepath to a file containing '
+ 'command-line flags to set on the device')
+ parser.add_argument(
+ '--use-apk-under-test-flags-file',
+ action='store_true',
+ help='Wether to use the flags file for the apk under test. If set, '
+ "the filename will be looked up in the APK's PackageInfo.")
+ parser.set_defaults(allow_unknown=True)
+ parser.set_defaults(command_line_flags=None)
+
+
+def AddTracingOptions(parser):
+ # TODO(shenghuazhang): Move this into AddCommonOptions once it's supported
+ # for all test types.
+ parser.add_argument(
+ '--trace-output',
+ metavar='FILENAME', type=os.path.realpath,
+ help='Path to save test_runner trace json output to.')
+
+ parser.add_argument(
+ '--trace-all',
+ action='store_true',
+ help='Whether to trace all function calls.')
+
+
+def AddCommonOptions(parser):
+ """Adds all common options to |parser|."""
+
+ default_build_type = os.environ.get('BUILDTYPE', 'Debug')
+
+ debug_or_release_group = parser.add_mutually_exclusive_group()
+ debug_or_release_group.add_argument(
+ '--debug',
+ action='store_const', const='Debug', dest='build_type',
+ default=default_build_type,
+ help='If set, run test suites under out/Debug. '
+ 'Default is env var BUILDTYPE or Debug.')
+ debug_or_release_group.add_argument(
+ '--release',
+ action='store_const', const='Release', dest='build_type',
+ help='If set, run test suites under out/Release. '
+ 'Default is env var BUILDTYPE or Debug.')
+
+ parser.add_argument(
+ '--break-on-failure', '--break_on_failure',
+ dest='break_on_failure', action='store_true',
+ help='Whether to break on failure.')
+
+ # TODO(jbudorick): Remove this once everything has switched to platform
+ # mode.
+ parser.add_argument(
+ '--enable-platform-mode',
+ action='store_true',
+ help='Run the test scripts in platform mode, which '
+ 'conceptually separates the test runner from the '
+ '"device" (local or remote, real or emulated) on '
+ 'which the tests are running. [experimental]')
+
+ parser.add_argument(
+ '-e', '--environment',
+ default='local', choices=constants.VALID_ENVIRONMENTS,
+ help='Test environment to run in (default: %(default)s).')
+
+ parser.add_argument(
+ '--local-output',
+ action='store_true',
+ help='Whether to archive test output locally and generate '
+ 'a local results detail page.')
+
+ class FastLocalDevAction(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ namespace.verbose_count = max(namespace.verbose_count, 1)
+ namespace.num_retries = 0
+ namespace.enable_device_cache = True
+ namespace.enable_concurrent_adb = True
+ namespace.skip_clear_data = True
+ namespace.extract_test_list_from_filter = True
+
+ parser.add_argument(
+ '--fast-local-dev',
+ type=bool, nargs=0, action=FastLocalDevAction,
+ help='Alias for: --verbose --num-retries=0 '
+ '--enable-device-cache --enable-concurrent-adb '
+ '--skip-clear-data --extract-test-list-from-filter')
+
+ # TODO(jbudorick): Remove this once downstream bots have switched to
+ # api.test_results.
+ parser.add_argument(
+ '--flakiness-dashboard-server',
+ dest='flakiness_dashboard_server',
+ help=argparse.SUPPRESS)
+ parser.add_argument(
+ '--gs-results-bucket',
+ help='Google Storage bucket to upload results to.')
+
+ parser.add_argument(
+ '--output-directory',
+ dest='output_directory', type=os.path.realpath,
+ help='Path to the directory in which build files are'
+ ' located (must include build type). This will take'
+ ' precedence over --debug and --release')
+ parser.add_argument(
+ '-v', '--verbose',
+ dest='verbose_count', default=0, action='count',
+ help='Verbose level (multiple times for more)')
+
+ parser.add_argument(
+ '--repeat', '--gtest_repeat', '--gtest-repeat',
+ '--isolated-script-test-repeat',
+ dest='repeat', type=int, default=0,
+ help='Number of times to repeat the specified set of tests.')
+ # This is currently only implemented for gtests and instrumentation tests.
+ parser.add_argument(
+ '--gtest_also_run_disabled_tests', '--gtest-also-run-disabled-tests',
+ '--isolated-script-test-also-run-disabled-tests',
+ dest='run_disabled', action='store_true',
+ help='Also run disabled tests if applicable.')
+
+ AddTestLauncherOptions(parser)
+
+
+def ProcessCommonOptions(args):
+ """Processes and handles all common options."""
+ run_tests_helper.SetLogLevel(args.verbose_count, add_handler=False)
+ # pylint: disable=redefined-variable-type
+ if args.verbose_count > 0:
+ handler = logging_utils.ColorStreamHandler()
+ else:
+ handler = logging.StreamHandler(sys.stdout)
+ # pylint: enable=redefined-variable-type
+ handler.setFormatter(run_tests_helper.CustomFormatter())
+ logging.getLogger().addHandler(handler)
+
+ constants.SetBuildType(args.build_type)
+ if args.output_directory:
+ constants.SetOutputDirectory(args.output_directory)
+
+
+def AddDeviceOptions(parser):
+ """Adds device options to |parser|."""
+
+ parser = parser.add_argument_group('device arguments')
+
+ parser.add_argument(
+ '--adb-path',
+ type=os.path.realpath,
+ help='Specify the absolute path of the adb binary that '
+ 'should be used.')
+ parser.add_argument(
+ '--blacklist-file',
+ type=os.path.realpath,
+ help='Device blacklist file.')
+ parser.add_argument(
+ '-d', '--device', nargs='+',
+ dest='test_devices',
+ help='Target device(s) for the test suite to run on.')
+ parser.add_argument(
+ '--enable-concurrent-adb',
+ action='store_true',
+ help='Run multiple adb commands at the same time, even '
+ 'for the same device.')
+ parser.add_argument(
+ '--enable-device-cache',
+ action='store_true',
+ help='Cache device state to disk between runs')
+ parser.add_argument(
+ '--skip-clear-data',
+ action='store_true',
+ help='Do not wipe app data between tests. Use this to '
+ 'speed up local development and never on bots '
+ '(increases flakiness)')
+ parser.add_argument(
+ '--recover-devices',
+ action='store_true',
+ help='Attempt to recover devices prior to the final retry. Warning: '
+ 'this will cause all devices to reboot.')
+ parser.add_argument(
+ '--tool',
+ dest='tool',
+ help='Run the test under a tool '
+ '(use --tool help to list them)')
+
+ parser.add_argument(
+ '--upload-logcats-file',
+ action='store_true',
+ dest='upload_logcats_file',
+ help='Whether to upload logcat file to logdog.')
+
+ logcat_output_group = parser.add_mutually_exclusive_group()
+ logcat_output_group.add_argument(
+ '--logcat-output-dir', type=os.path.realpath,
+ help='If set, will dump logcats recorded during test run to directory. '
+ 'File names will be the device ids with timestamps.')
+ logcat_output_group.add_argument(
+ '--logcat-output-file', type=os.path.realpath,
+ help='If set, will merge logcats recorded during test run and dump them '
+ 'to the specified file.')
+
+
+def AddGTestOptions(parser):
+ """Adds gtest options to |parser|."""
+
+ parser = parser.add_argument_group('gtest arguments')
+
+ parser.add_argument(
+ '--app-data-file',
+ action='append', dest='app_data_files',
+ help='A file path relative to the app data directory '
+ 'that should be saved to the host.')
+ parser.add_argument(
+ '--app-data-file-dir',
+ help='Host directory to which app data files will be'
+ ' saved. Used with --app-data-file.')
+ parser.add_argument(
+ '--isolated-script-test-perf-output',
+ help='If present, store chartjson results on this path.')
+ parser.add_argument(
+ '--delete-stale-data',
+ dest='delete_stale_data', action='store_true',
+ help='Delete stale test data on the device.')
+ parser.add_argument(
+ '--enable-xml-result-parsing',
+ action='store_true', help=argparse.SUPPRESS)
+ parser.add_argument(
+ '--executable-dist-dir',
+ type=os.path.realpath,
+ help="Path to executable's dist directory for native"
+ " (non-apk) tests.")
+ parser.add_argument(
+ '--extract-test-list-from-filter',
+ action='store_true',
+ help='When a test filter is specified, and the list of '
+ 'tests can be determined from it, skip querying the '
+ 'device for the list of all tests. Speeds up local '
+ 'development, but is not safe to use on bots ('
+ 'http://crbug.com/549214')
+ parser.add_argument(
+ '--gs-test-artifacts-bucket',
+ help=('If present, test artifacts will be uploaded to this Google '
+ 'Storage bucket.'))
+ parser.add_argument(
+ '--runtime-deps-path',
+ dest='runtime_deps_path', type=os.path.realpath,
+ help='Runtime data dependency file from GN.')
+ parser.add_argument(
+ '-t', '--shard-timeout',
+ dest='shard_timeout', type=int, default=120,
+ help='Timeout to wait for each test (default: %(default)s).')
+ parser.add_argument(
+ '--store-tombstones',
+ dest='store_tombstones', action='store_true',
+ help='Add tombstones in results if crash.')
+ parser.add_argument(
+ '-s', '--suite',
+ dest='suite_name', nargs='+', metavar='SUITE_NAME', required=True,
+ help='Executable name of the test suite to run.')
+ parser.add_argument(
+ '--test-apk-incremental-install-json',
+ type=os.path.realpath,
+ help='Path to install json for the test apk.')
+ parser.add_argument(
+ '-w', '--wait-for-java-debugger', action='store_true',
+ help='Wait for java debugger to attach before running any application '
+ 'code. Also disables test timeouts and sets retries=0.')
+
+
+def AddInstrumentationTestOptions(parser):
+ """Adds Instrumentation test options to |parser|."""
+
+ parser.add_argument_group('instrumentation arguments')
+
+ parser.add_argument(
+ '--additional-apk',
+ action='append', dest='additional_apks', default=[],
+ type=_RealPath,
+ help='Additional apk that must be installed on '
+ 'the device when the tests are run')
+ parser.add_argument(
+ '-A', '--annotation',
+ dest='annotation_str',
+ help='Comma-separated list of annotations. Run only tests with any of '
+ 'the given annotations. An annotation can be either a key or a '
+ 'key-values pair. A test that has no annotation is considered '
+ '"SmallTest".')
+ # TODO(jbudorick): Remove support for name-style APK specification once
+ # bots are no longer doing it.
+ parser.add_argument(
+ '--apk-under-test',
+ help='Path or name of the apk under test.')
+ parser.add_argument(
+ '--coverage-dir',
+ type=os.path.realpath,
+ help='Directory in which to place all generated '
+ 'EMMA coverage files.')
+ parser.add_argument(
+ '--delete-stale-data',
+ action='store_true', dest='delete_stale_data',
+ help='Delete stale test data on the device.')
+ parser.add_argument(
+ '--disable-dalvik-asserts',
+ dest='set_asserts', action='store_false', default=True,
+ help='Removes the dalvik.vm.enableassertions property')
+ parser.add_argument(
+ '--enable-java-deobfuscation',
+ action='store_true',
+ help='Deobfuscate java stack traces in test output and logcat.')
+ parser.add_argument(
+ '-E', '--exclude-annotation',
+ dest='exclude_annotation_str',
+ help='Comma-separated list of annotations. Exclude tests with these '
+ 'annotations.')
+ def package_replacement(arg):
+ split_arg = arg.split(',')
+ if len(split_arg) != 2:
+ raise argparse.ArgumentError(
+ arg,
+ 'Expected two comma-separated strings for --replace-system-package, '
+ 'received %d' % len(split_arg))
+ PackageReplacement = collections.namedtuple('PackageReplacement',
+ ['package', 'replacement_apk'])
+ return PackageReplacement(package=split_arg[0],
+ replacement_apk=_RealPath(split_arg[1]))
+ parser.add_argument(
+ '--replace-system-package',
+ type=package_replacement, default=None,
+ help='Specifies a system package to replace with a given APK for the '
+ 'duration of the test. Given as a comma-separated pair of strings, '
+ 'the first element being the package and the second the path to the '
+ 'replacement APK. Only supports replacing one package. Example: '
+ '--replace-system-package com.example.app,path/to/some.apk')
+
+ parser.add_argument(
+ '--use-webview-provider',
+ type=_RealPath, default=None,
+ help='Use this apk as the webview provider during test. '
+ 'The original provider will be restored if possible, '
+ "on Nougat the provider can't be determined and so "
+ 'the system will choose the default provider.')
+ parser.add_argument(
+ '--runtime-deps-path',
+ dest='runtime_deps_path', type=os.path.realpath,
+ help='Runtime data dependency file from GN.')
+ parser.add_argument(
+ '--screenshot-directory',
+ dest='screenshot_dir', type=os.path.realpath,
+ help='Capture screenshots of test failures')
+ parser.add_argument(
+ '--shared-prefs-file',
+ dest='shared_prefs_file', type=_RealPath,
+ help='The relative path to a file containing JSON list of shared '
+ 'preference files to edit and how to do so. Example list: '
+ '[{'
+ ' "package": "com.package.example",'
+ ' "filename": "ExampleSettings.xml",'
+ ' "set": {'
+ ' "boolean_key_in_xml": true,'
+ ' "string_key_in_xml": "string_value"'
+ ' },'
+ ' "remove": ['
+ ' "key_in_xml_to_remove"'
+ ' ]'
+ '}]')
+ parser.add_argument(
+ '--store-tombstones',
+ action='store_true', dest='store_tombstones',
+ help='Add tombstones in results if crash.')
+ parser.add_argument(
+ '--strict-mode',
+ dest='strict_mode', default='testing',
+ help='StrictMode command-line flag set on the device, '
+ 'death/testing to kill the process, off to stop '
+ 'checking, flash to flash only. (default: %(default)s)')
+ parser.add_argument(
+ '--test-apk',
+ required=True,
+ help='Path or name of the apk containing the tests.')
+ parser.add_argument(
+ '--test-jar',
+ help='Path of jar containing test java files.')
+ parser.add_argument(
+ '--timeout-scale',
+ type=float,
+ help='Factor by which timeouts should be scaled.')
+ parser.add_argument(
+ '-w', '--wait-for-java-debugger', action='store_true',
+ help='Wait for java debugger to attach before running any application '
+ 'code. Also disables test timeouts and sets retries=0.')
+
+ # These arguments are suppressed from the help text because they should
+ # only ever be specified by an intermediate script.
+ parser.add_argument(
+ '--apk-under-test-incremental-install-json',
+ help=argparse.SUPPRESS)
+ parser.add_argument(
+ '--test-apk-incremental-install-json',
+ type=os.path.realpath,
+ help=argparse.SUPPRESS)
+
+
+def AddJUnitTestOptions(parser):
+ """Adds junit test options to |parser|."""
+
+ parser = parser.add_argument_group('junit arguments')
+
+ parser.add_argument(
+ '--jacoco', action='store_true',
+ help='Generate jacoco report.')
+ parser.add_argument(
+ '--coverage-dir', type=os.path.realpath,
+ help='Directory to store coverage info.')
+ parser.add_argument(
+ '--package-filter',
+ help='Filters tests by package.')
+ parser.add_argument(
+ '--runner-filter',
+ help='Filters tests by runner class. Must be fully qualified.')
+ parser.add_argument(
+ '-s', '--test-suite', required=True,
+ help='JUnit test suite to run.')
+ debug_group = parser.add_mutually_exclusive_group()
+ debug_group.add_argument(
+ '-w', '--wait-for-java-debugger', action='store_const', const='8701',
+ dest='debug_socket', help='Alias for --debug-socket=8701')
+ debug_group.add_argument(
+ '--debug-socket',
+ help='Wait for java debugger to attach at specified socket address '
+ 'before running any application code. Also disables test timeouts '
+ 'and sets retries=0.')
+
+ # These arguments are for Android Robolectric tests.
+ parser.add_argument(
+ '--android-manifest-path',
+ help='Path to Android Manifest to configure Robolectric.')
+ parser.add_argument(
+ '--package-name',
+ help='Default app package name for Robolectric tests.')
+ parser.add_argument(
+ '--resource-zip',
+ action='append', dest='resource_zips', default=[],
+ help='Path to resource zips to configure Robolectric.')
+ parser.add_argument(
+ '--robolectric-runtime-deps-dir',
+ help='Path to runtime deps for Robolectric.')
+
+
+def AddLinkerTestOptions(parser):
+
+ parser.add_argument_group('linker arguments')
+
+ parser.add_argument(
+ '--test-apk',
+ type=os.path.realpath,
+ help='Path to the linker test APK.')
+
+
+def AddMonkeyTestOptions(parser):
+ """Adds monkey test options to |parser|."""
+
+ parser = parser.add_argument_group('monkey arguments')
+
+ parser.add_argument(
+ '--browser',
+ required=True, choices=constants.PACKAGE_INFO.keys(),
+ metavar='BROWSER', help='Browser under test.')
+ parser.add_argument(
+ '--category',
+ nargs='*', dest='categories', default=[],
+ help='A list of allowed categories. Monkey will only visit activities '
+ 'that are listed with one of the specified categories.')
+ parser.add_argument(
+ '--event-count',
+ default=10000, type=int,
+ help='Number of events to generate (default: %(default)s).')
+ parser.add_argument(
+ '--seed',
+ type=int,
+ help='Seed value for pseudo-random generator. Same seed value generates '
+ 'the same sequence of events. Seed is randomized by default.')
+ parser.add_argument(
+ '--throttle',
+ default=100, type=int,
+ help='Delay between events (ms) (default: %(default)s). ')
+
+
+def AddPerfTestOptions(parser):
+ """Adds perf test options to |parser|."""
+
+ parser = parser.add_argument_group('perf arguments')
+
+ class SingleStepAction(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ if values and not namespace.single_step:
+ parser.error('single step command provided, '
+ 'but --single-step not specified.')
+ elif namespace.single_step and not values:
+ parser.error('--single-step specified, '
+ 'but no single step command provided.')
+ setattr(namespace, self.dest, values)
+
+ step_group = parser.add_mutually_exclusive_group(required=True)
+ # TODO(jbudorick): Revise --single-step to use argparse.REMAINDER.
+ # This requires removing "--" from client calls.
+ step_group.add_argument(
+ '--print-step',
+ help='The name of a previously executed perf step to print.')
+ step_group.add_argument(
+ '--single-step',
+ action='store_true',
+ help='Execute the given command with retries, but only print the result '
+ 'for the "most successful" round.')
+ step_group.add_argument(
+ '--steps',
+ help='JSON file containing the list of commands to run.')
+
+ parser.add_argument(
+ '--collect-chartjson-data',
+ action='store_true',
+ help='Cache the telemetry chartjson output from each step for later use.')
+ parser.add_argument(
+ '--dry-run',
+ action='store_true',
+ help='Just print the steps without executing.')
+ # TODO(rnephew): Remove this when everything moves to new option in platform
+ # mode.
+ parser.add_argument(
+ '--get-output-dir-archive',
+ metavar='FILENAME', type=os.path.realpath,
+ help='Write the cached output directory archived by a step into the'
+ ' given ZIP file.')
+ parser.add_argument(
+ '--known-devices-file',
+ help='Path to known device list.')
+ # Uses 0.1 degrees C because that's what Android does.
+ parser.add_argument(
+ '--max-battery-temp',
+ type=int,
+ help='Only start tests when the battery is at or below the given '
+ 'temperature (0.1 C)')
+ parser.add_argument(
+ '--min-battery-level',
+ type=int,
+ help='Only starts tests when the battery is charged above '
+ 'given level.')
+ parser.add_argument(
+ '--no-timeout',
+ action='store_true',
+ help='Do not impose a timeout. Each perf step is responsible for '
+ 'implementing the timeout logic.')
+ parser.add_argument(
+ '--output-chartjson-data',
+ type=os.path.realpath,
+ help='Writes telemetry chartjson formatted output into the given file.')
+ parser.add_argument(
+ '--output-dir-archive-path',
+ metavar='FILENAME', type=os.path.realpath,
+ help='Write the cached output directory archived by a step into the'
+ ' given ZIP file.')
+ parser.add_argument(
+ '--output-json-list',
+ type=os.path.realpath,
+ help='Writes a JSON list of information for each --steps into the given '
+ 'file. Information includes runtime and device affinity for each '
+ '--steps.')
+ parser.add_argument(
+ '--write-buildbot-json',
+ action='store_true',
+ help='Whether to output buildbot json.')
+
+ parser.add_argument(
+ 'single_step_command',
+ nargs='*', action=SingleStepAction,
+ help='If --single-step is specified, the command to run.')
+
+
+def AddPythonTestOptions(parser):
+
+ parser = parser.add_argument_group('python arguments')
+
+ parser.add_argument(
+ '-s', '--suite',
+ dest='suite_name', metavar='SUITE_NAME',
+ choices=constants.PYTHON_UNIT_TEST_SUITES.keys(),
+ help='Name of the test suite to run.')
+
+
+def _RunPythonTests(args):
+ """Subcommand of RunTestsCommand which runs python unit tests."""
+ suite_vars = constants.PYTHON_UNIT_TEST_SUITES[args.suite_name]
+ suite_path = suite_vars['path']
+ suite_test_modules = suite_vars['test_modules']
+
+ sys.path = [suite_path] + sys.path
+ try:
+ suite = unittest.TestSuite()
+ suite.addTests(unittest.defaultTestLoader.loadTestsFromName(m)
+ for m in suite_test_modules)
+ runner = unittest.TextTestRunner(verbosity=1+args.verbose_count)
+ return 0 if runner.run(suite).wasSuccessful() else 1
+ finally:
+ sys.path = sys.path[1:]
+
+
+_DEFAULT_PLATFORM_MODE_TESTS = ['gtest', 'instrumentation', 'junit',
+ 'linker', 'monkey', 'perf']
+
+
+def RunTestsCommand(args):
+ """Checks test type and dispatches to the appropriate function.
+
+ Args:
+ args: argparse.Namespace object.
+
+ Returns:
+ Integer indicated exit code.
+
+ Raises:
+ Exception: Unknown command name passed in, or an exception from an
+ individual test runner.
+ """
+ command = args.command
+
+ ProcessCommonOptions(args)
+ logging.info('command: %s', ' '.join(sys.argv))
+ if args.enable_platform_mode or command in _DEFAULT_PLATFORM_MODE_TESTS:
+ return RunTestsInPlatformMode(args)
+
+ if command == 'python':
+ return _RunPythonTests(args)
+ else:
+ raise Exception('Unknown test type.')
+
+
+_SUPPORTED_IN_PLATFORM_MODE = [
+ # TODO(jbudorick): Add support for more test types.
+ 'gtest',
+ 'instrumentation',
+ 'junit',
+ 'linker',
+ 'monkey',
+ 'perf',
+]
+
+
+def RunTestsInPlatformMode(args):
+
+ def infra_error(message):
+ logging.fatal(message)
+ sys.exit(constants.INFRA_EXIT_CODE)
+
+ if args.command not in _SUPPORTED_IN_PLATFORM_MODE:
+ infra_error('%s is not yet supported in platform mode' % args.command)
+
+ ### Set up sigterm handler.
+
+ contexts_to_notify_on_sigterm = []
+ def unexpected_sigterm(_signum, _frame):
+ msg = [
+ 'Received SIGTERM. Shutting down.',
+ ]
+ for live_thread in threading.enumerate():
+ # pylint: disable=protected-access
+ thread_stack = ''.join(traceback.format_stack(
+ sys._current_frames()[live_thread.ident]))
+ msg.extend([
+ 'Thread "%s" (ident: %s) is currently running:' % (
+ live_thread.name, live_thread.ident),
+ thread_stack])
+
+ for context in contexts_to_notify_on_sigterm:
+ context.ReceivedSigterm()
+
+ infra_error('\n'.join(msg))
+
+ signal.signal(signal.SIGTERM, unexpected_sigterm)
+
+ ### Set up results handling.
+ # TODO(jbudorick): Rewrite results handling.
+
+ # all_raw_results is a list of lists of
+ # base_test_result.TestRunResults objects. Each instance of
+ # TestRunResults contains all test results produced by a single try,
+ # while each list of TestRunResults contains all tries in a single
+ # iteration.
+ all_raw_results = []
+
+ # all_iteration_results is a list of base_test_result.TestRunResults
+ # objects. Each instance of TestRunResults contains the last test
+ # result for each test run in that iteration.
+ all_iteration_results = []
+
+ global_results_tags = set()
+
+ json_file = tempfile.NamedTemporaryFile(delete=False)
+ json_file.close()
+
+ @contextlib.contextmanager
+ def json_finalizer():
+ try:
+ yield
+ finally:
+ if args.json_results_file and os.path.exists(json_file.name):
+ shutil.move(json_file.name, args.json_results_file)
+ else:
+ os.remove(json_file.name)
+
+ @contextlib.contextmanager
+ def json_writer():
+ try:
+ yield
+ except Exception:
+ global_results_tags.add('UNRELIABLE_RESULTS')
+ raise
+ finally:
+ json_results.GenerateJsonResultsFile(
+ all_raw_results, json_file.name,
+ global_tags=list(global_results_tags),
+ indent=2)
+
+ @contextlib.contextmanager
+ def upload_logcats_file():
+ try:
+ yield
+ finally:
+ if not args.logcat_output_file:
+ logging.critical('Cannot upload logcat file: no file specified.')
+ elif not os.path.exists(args.logcat_output_file):
+ logging.critical("Cannot upload logcat file: file doesn't exist.")
+ else:
+ with open(args.logcat_output_file) as src:
+ dst = logdog_helper.open_text('unified_logcats')
+ if dst:
+ shutil.copyfileobj(src, dst)
+ dst.close()
+ logging.critical(
+ 'Logcat: %s', logdog_helper.get_viewer_url('unified_logcats'))
+
+
+ logcats_uploader = contextlib_ext.Optional(
+ upload_logcats_file(),
+ 'upload_logcats_file' in args and args.upload_logcats_file)
+
+ ### Set up test objects.
+
+ out_manager = output_manager_factory.CreateOutputManager(args)
+ env = environment_factory.CreateEnvironment(
+ args, out_manager, infra_error)
+ test_instance = test_instance_factory.CreateTestInstance(args, infra_error)
+ test_run = test_run_factory.CreateTestRun(
+ args, env, test_instance, infra_error)
+
+ contexts_to_notify_on_sigterm.append(env)
+ contexts_to_notify_on_sigterm.append(test_run)
+
+ ### Run.
+ with out_manager, json_finalizer():
+ with json_writer(), logcats_uploader, env, test_instance, test_run:
+
+ repetitions = (xrange(args.repeat + 1) if args.repeat >= 0
+ else itertools.count())
+ result_counts = collections.defaultdict(
+ lambda: collections.defaultdict(int))
+ iteration_count = 0
+ for _ in repetitions:
+ # raw_results will be populated with base_test_result.TestRunResults by
+ # test_run.RunTests(). It is immediately added to all_raw_results so
+ # that in the event of an exception, all_raw_results will already have
+ # the up-to-date results and those can be written to disk.
+ raw_results = []
+ all_raw_results.append(raw_results)
+
+ test_run.RunTests(raw_results)
+ if not raw_results:
+ all_raw_results.pop()
+ continue
+
+ iteration_results = base_test_result.TestRunResults()
+ for r in reversed(raw_results):
+ iteration_results.AddTestRunResults(r)
+ all_iteration_results.append(iteration_results)
+
+ iteration_count += 1
+ for r in iteration_results.GetAll():
+ result_counts[r.GetName()][r.GetType()] += 1
+ report_results.LogFull(
+ results=iteration_results,
+ test_type=test_instance.TestType(),
+ test_package=test_run.TestPackage(),
+ annotation=getattr(args, 'annotations', None),
+ flakiness_server=getattr(args, 'flakiness_dashboard_server',
+ None))
+ if args.break_on_failure and not iteration_results.DidRunPass():
+ break
+
+ if iteration_count > 1:
+ # display summary results
+ # only display results for a test if at least one test did not pass
+ all_pass = 0
+ tot_tests = 0
+ for test_name in result_counts:
+ tot_tests += 1
+ if any(result_counts[test_name][x] for x in (
+ base_test_result.ResultType.FAIL,
+ base_test_result.ResultType.CRASH,
+ base_test_result.ResultType.TIMEOUT,
+ base_test_result.ResultType.UNKNOWN)):
+ logging.critical(
+ '%s: %s',
+ test_name,
+ ', '.join('%s %s' % (str(result_counts[test_name][i]), i)
+ for i in base_test_result.ResultType.GetTypes()))
+ else:
+ all_pass += 1
+
+ logging.critical('%s of %s tests passed in all %s runs',
+ str(all_pass),
+ str(tot_tests),
+ str(iteration_count))
+
+ if args.local_output:
+ with out_manager.ArchivedTempfile(
+ 'test_results_presentation.html',
+ 'test_results_presentation',
+ output_manager.Datatype.HTML) as results_detail_file:
+ result_html_string, _, _ = test_results_presentation.result_details(
+ json_path=json_file.name,
+ test_name=args.command,
+ cs_base_url='http://cs.chromium.org',
+ local_output=True)
+ results_detail_file.write(result_html_string)
+ results_detail_file.flush()
+ logging.critical('TEST RESULTS: %s', results_detail_file.Link())
+
+ ui_screenshots = test_results_presentation.ui_screenshot_set(
+ json_file.name)
+ if ui_screenshots:
+ with out_manager.ArchivedTempfile(
+ 'ui_screenshots.json',
+ 'ui_capture',
+ output_manager.Datatype.JSON) as ui_screenshot_file:
+ ui_screenshot_file.write(ui_screenshots)
+ logging.critical('UI Screenshots: %s', ui_screenshot_file.Link())
+
+ if args.command == 'perf' and (args.steps or args.single_step):
+ return 0
+
+ return (0 if all(r.DidRunPass() for r in all_iteration_results)
+ else constants.ERROR_EXIT_CODE)
+
+
+def DumpThreadStacks(_signal, _frame):
+ for thread in threading.enumerate():
+ reraiser_thread.LogThreadStack(thread)
+
+
+def main():
+ signal.signal(signal.SIGUSR1, DumpThreadStacks)
+
+ parser = argparse.ArgumentParser()
+ command_parsers = parser.add_subparsers(
+ title='test types', dest='command')
+
+ subp = command_parsers.add_parser(
+ 'gtest',
+ help='googletest-based C++ tests')
+ AddCommonOptions(subp)
+ AddDeviceOptions(subp)
+ AddGTestOptions(subp)
+ AddTracingOptions(subp)
+ AddCommandLineOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'instrumentation',
+ help='InstrumentationTestCase-based Java tests')
+ AddCommonOptions(subp)
+ AddDeviceOptions(subp)
+ AddInstrumentationTestOptions(subp)
+ AddTracingOptions(subp)
+ AddCommandLineOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'junit',
+ help='JUnit4-based Java tests')
+ AddCommonOptions(subp)
+ AddJUnitTestOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'linker',
+ help='linker tests')
+ AddCommonOptions(subp)
+ AddDeviceOptions(subp)
+ AddLinkerTestOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'monkey',
+ help="tests based on Android's monkey command")
+ AddCommonOptions(subp)
+ AddDeviceOptions(subp)
+ AddMonkeyTestOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'perf',
+ help='performance tests')
+ AddCommonOptions(subp)
+ AddDeviceOptions(subp)
+ AddPerfTestOptions(subp)
+ AddTracingOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'python',
+ help='python tests based on unittest.TestCase')
+ AddCommonOptions(subp)
+ AddPythonTestOptions(subp)
+
+ args, unknown_args = parser.parse_known_args()
+ if unknown_args:
+ if hasattr(args, 'allow_unknown') and args.allow_unknown:
+ args.command_line_flags = unknown_args
+ else:
+ parser.error('unrecognized arguments: %s' % ' '.join(unknown_args))
+
+ # --replace-system-package has the potential to cause issues if
+ # --enable-concurrent-adb is set, so disallow that combination
+ if (hasattr(args, 'replace_system_package') and
+ hasattr(args, 'enable_concurrent_adb') and args.replace_system_package and
+ args.enable_concurrent_adb):
+ parser.error('--replace-system-package and --enable-concurrent-adb cannot '
+ 'be used together')
+
+ # --use-webview-provider has the potential to cause issues if
+ # --enable-concurrent-adb is set, so disallow that combination
+ if (hasattr(args, 'use_webview_provider') and
+ hasattr(args, 'enable_concurrent_adb') and args.use_webview_provider and
+ args.enable_concurrent_adb):
+ parser.error('--use-webview-provider and --enable-concurrent-adb cannot '
+ 'be used together')
+
+ if (getattr(args, 'jacoco', False) and
+ not getattr(args, 'coverage_dir', '')):
+ parser.error('--jacoco requires --coverage-dir')
+
+ if (hasattr(args, 'debug_socket') or
+ (hasattr(args, 'wait_for_java_debugger') and
+ args.wait_for_java_debugger)):
+ args.num_retries = 0
+
+ try:
+ return RunTestsCommand(args)
+ except base_error.BaseError as e:
+ logging.exception('Error occurred.')
+ if e.is_infra_error:
+ return constants.INFRA_EXIT_CODE
+ return constants.ERROR_EXIT_CODE
+ except: # pylint: disable=W0702
+ logging.exception('Unrecognized error occurred.')
+ return constants.ERROR_EXIT_CODE
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/test_runner.pydeps b/deps/v8/build/android/test_runner.pydeps
new file mode 100644
index 0000000000..ac5be4658e
--- /dev/null
+++ b/deps/v8/build/android/test_runner.pydeps
@@ -0,0 +1,210 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android --output build/android/test_runner.pydeps build/android/test_runner.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/__init__.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/__init__.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/decorators.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/log.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/meta_class.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/multiprocessing_shim.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_proto_classes.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_trace_writer.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_time.py
+../../third_party/catapult/common/py_trace_event/third_party/protobuf/encoder.py
+../../third_party/catapult/common/py_trace_event/third_party/protobuf/wire_format.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../third_party/catapult/common/py_utils/py_utils/contextlib_ext.py
+../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../third_party/catapult/common/py_utils/py_utils/modules_util.py
+../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
+../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/android/__init__.py
+../../third_party/catapult/devil/devil/android/apk_helper.py
+../../third_party/catapult/devil/devil/android/battery_utils.py
+../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../third_party/catapult/devil/devil/android/constants/file_system.py
+../../third_party/catapult/devil/devil/android/crash_handler.py
+../../third_party/catapult/devil/devil/android/decorators.py
+../../third_party/catapult/devil/devil/android/device_blacklist.py
+../../third_party/catapult/devil/devil/android/device_errors.py
+../../third_party/catapult/devil/devil/android/device_list.py
+../../third_party/catapult/devil/devil/android/device_signal.py
+../../third_party/catapult/devil/devil/android/device_temp_file.py
+../../third_party/catapult/devil/devil/android/device_utils.py
+../../third_party/catapult/devil/devil/android/flag_changer.py
+../../third_party/catapult/devil/devil/android/forwarder.py
+../../third_party/catapult/devil/devil/android/install_commands.py
+../../third_party/catapult/devil/devil/android/logcat_monitor.py
+../../third_party/catapult/devil/devil/android/md5sum.py
+../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../third_party/catapult/devil/devil/android/ports.py
+../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../third_party/catapult/devil/devil/android/sdk/aapt.py
+../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py
+../../third_party/catapult/devil/devil/android/sdk/build_tools.py
+../../third_party/catapult/devil/devil/android/sdk/intent.py
+../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../third_party/catapult/devil/devil/android/sdk/shared_prefs.py
+../../third_party/catapult/devil/devil/android/sdk/split_select.py
+../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/android/tools/__init__.py
+../../third_party/catapult/devil/devil/android/tools/device_recovery.py
+../../third_party/catapult/devil/devil/android/tools/device_status.py
+../../third_party/catapult/devil/devil/android/tools/script_common.py
+../../third_party/catapult/devil/devil/android/tools/system_app.py
+../../third_party/catapult/devil/devil/android/tools/webview_app.py
+../../third_party/catapult/devil/devil/android/valgrind_tools/__init__.py
+../../third_party/catapult/devil/devil/android/valgrind_tools/base_tool.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/constants/__init__.py
+../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../third_party/catapult/devil/devil/devil_env.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/cmd_helper.py
+../../third_party/catapult/devil/devil/utils/file_utils.py
+../../third_party/catapult/devil/devil/utils/host_utils.py
+../../third_party/catapult/devil/devil/utils/lazy/__init__.py
+../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py
+../../third_party/catapult/devil/devil/utils/logging_common.py
+../../third_party/catapult/devil/devil/utils/lsusb.py
+../../third_party/catapult/devil/devil/utils/parallelizer.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/reset_usb.py
+../../third_party/catapult/devil/devil/utils/run_tests_helper.py
+../../third_party/catapult/devil/devil/utils/signal_handler.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/devil/devil/utils/zip_utils.py
+../../third_party/catapult/third_party/zipfile/zipfile_2_7_13.py
+../../third_party/colorama/src/colorama/__init__.py
+../../third_party/colorama/src/colorama/ansi.py
+../../third_party/colorama/src/colorama/ansitowin32.py
+../../third_party/colorama/src/colorama/initialise.py
+../../third_party/colorama/src/colorama/win32.py
+../../third_party/colorama/src/colorama/winterm.py
+../../third_party/jinja2/__init__.py
+../../third_party/jinja2/_compat.py
+../../third_party/jinja2/bccache.py
+../../third_party/jinja2/compiler.py
+../../third_party/jinja2/defaults.py
+../../third_party/jinja2/environment.py
+../../third_party/jinja2/exceptions.py
+../../third_party/jinja2/filters.py
+../../third_party/jinja2/idtracking.py
+../../third_party/jinja2/lexer.py
+../../third_party/jinja2/loaders.py
+../../third_party/jinja2/nodes.py
+../../third_party/jinja2/optimizer.py
+../../third_party/jinja2/parser.py
+../../third_party/jinja2/runtime.py
+../../third_party/jinja2/tests.py
+../../third_party/jinja2/utils.py
+../../third_party/jinja2/visitor.py
+../../third_party/markupsafe/__init__.py
+../../third_party/markupsafe/_compat.py
+../../third_party/markupsafe/_native.py
+../../tools/swarming_client/libs/__init__.py
+../../tools/swarming_client/libs/logdog/__init__.py
+../../tools/swarming_client/libs/logdog/bootstrap.py
+../../tools/swarming_client/libs/logdog/stream.py
+../../tools/swarming_client/libs/logdog/streamname.py
+../../tools/swarming_client/libs/logdog/varint.py
+../gn_helpers.py
+../util/lib/common/chrome_test_server_spawner.py
+../util/lib/common/unittest_util.py
+devil_chromium.py
+gyp/util/__init__.py
+gyp/util/build_utils.py
+gyp/util/md5_check.py
+incremental_install/__init__.py
+incremental_install/installer.py
+pylib/__init__.py
+pylib/base/__init__.py
+pylib/base/base_test_result.py
+pylib/base/environment.py
+pylib/base/environment_factory.py
+pylib/base/output_manager.py
+pylib/base/output_manager_factory.py
+pylib/base/test_collection.py
+pylib/base/test_exception.py
+pylib/base/test_instance.py
+pylib/base/test_instance_factory.py
+pylib/base/test_run.py
+pylib/base/test_run_factory.py
+pylib/base/test_server.py
+pylib/constants/__init__.py
+pylib/constants/host_paths.py
+pylib/gtest/__init__.py
+pylib/gtest/gtest_test_instance.py
+pylib/instrumentation/__init__.py
+pylib/instrumentation/instrumentation_parser.py
+pylib/instrumentation/instrumentation_test_instance.py
+pylib/instrumentation/test_result.py
+pylib/junit/__init__.py
+pylib/junit/junit_test_instance.py
+pylib/linker/__init__.py
+pylib/linker/linker_test_instance.py
+pylib/linker/test_case.py
+pylib/local/__init__.py
+pylib/local/device/__init__.py
+pylib/local/device/local_device_environment.py
+pylib/local/device/local_device_gtest_run.py
+pylib/local/device/local_device_instrumentation_test_run.py
+pylib/local/device/local_device_linker_test_run.py
+pylib/local/device/local_device_monkey_test_run.py
+pylib/local/device/local_device_perf_test_run.py
+pylib/local/device/local_device_test_run.py
+pylib/local/local_test_server_spawner.py
+pylib/local/machine/__init__.py
+pylib/local/machine/local_machine_environment.py
+pylib/local/machine/local_machine_junit_test_run.py
+pylib/monkey/__init__.py
+pylib/monkey/monkey_test_instance.py
+pylib/output/__init__.py
+pylib/output/local_output_manager.py
+pylib/output/noop_output_manager.py
+pylib/output/remote_output_manager.py
+pylib/perf/__init__.py
+pylib/perf/perf_test_instance.py
+pylib/results/__init__.py
+pylib/results/flakiness_dashboard/__init__.py
+pylib/results/flakiness_dashboard/json_results_generator.py
+pylib/results/flakiness_dashboard/results_uploader.py
+pylib/results/json_results.py
+pylib/results/presentation/__init__.py
+pylib/results/presentation/standard_gtest_merge.py
+pylib/results/presentation/test_results_presentation.py
+pylib/results/report_results.py
+pylib/symbols/__init__.py
+pylib/symbols/deobfuscator.py
+pylib/symbols/stack_symbolizer.py
+pylib/utils/__init__.py
+pylib/utils/decorators.py
+pylib/utils/device_dependencies.py
+pylib/utils/dexdump.py
+pylib/utils/google_storage_helper.py
+pylib/utils/instrumentation_tracing.py
+pylib/utils/logdog_helper.py
+pylib/utils/logging_utils.py
+pylib/utils/proguard.py
+pylib/utils/repo_utils.py
+pylib/utils/shared_preference_utils.py
+pylib/utils/test_filter.py
+pylib/utils/time_profile.py
+pylib/valgrind_tools.py
+test_runner.py
+tombstones.py
diff --git a/deps/v8/build/android/test_wrapper/logdog_wrapper.py b/deps/v8/build/android/test_wrapper/logdog_wrapper.py
new file mode 100755
index 0000000000..fda9f147d5
--- /dev/null
+++ b/deps/v8/build/android/test_wrapper/logdog_wrapper.py
@@ -0,0 +1,136 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper for adding logdog streaming support to swarming tasks."""
+
+import argparse
+import contextlib
+import logging
+import os
+import signal
+import subprocess
+import sys
+
+_SRC_PATH = os.path.abspath(os.path.join(
+ os.path.dirname(__file__), '..', '..', '..'))
+sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'devil'))
+sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'common',
+ 'py_utils'))
+
+from devil.utils import signal_handler
+from devil.utils import timeout_retry
+from py_utils import tempfile_ext
+
+PROJECT = 'chromium'
+OUTPUT = 'logdog'
+COORDINATOR_HOST = 'luci-logdog.appspot.com'
+SERVICE_ACCOUNT_JSON = ('/creds/service_accounts'
+ '/service-account-luci-logdog-publisher.json')
+LOGDOG_TERMINATION_TIMEOUT = 30
+
+
+def CommandParser():
+ # Parses the command line arguments being passed in
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--target',
+ help='The test target to be run. If not set, any extra '
+ 'args passed to this script are assumed to be the '
+ 'full test command to run.')
+ parser.add_argument('--logdog-bin-cmd', required=True,
+ help='The logdog bin cmd.')
+ return parser
+
+
+def CreateStopTestsMethod(proc):
+ def StopTests(signum, _frame):
+ logging.error('Forwarding signal %s to test process', str(signum))
+ proc.send_signal(signum)
+ return StopTests
+
+
+@contextlib.contextmanager
+def NoLeakingProcesses(popen):
+ try:
+ yield popen
+ finally:
+ if popen is not None:
+ try:
+ if popen.poll() is None:
+ popen.kill()
+ except OSError:
+ logging.warning('Failed to kill %s. Process may be leaked.',
+ str(popen.pid))
+
+
+def main():
+ parser = CommandParser()
+ args, extra_cmd_args = parser.parse_known_args(sys.argv[1:])
+
+ logging.basicConfig(level=logging.INFO)
+ if args.target:
+ test_cmd = [os.path.join('bin', 'run_%s' % args.target), '-v']
+ test_cmd += extra_cmd_args
+ else:
+ test_cmd = extra_cmd_args
+
+ test_env = dict(os.environ)
+ logdog_cmd = []
+
+ with tempfile_ext.NamedTemporaryDirectory(
+ prefix='tmp_android_logdog_wrapper') as temp_directory:
+ if not os.path.exists(args.logdog_bin_cmd):
+ logging.error(
+ 'Logdog binary %s unavailable. Unable to create logdog client',
+ args.logdog_bin_cmd)
+ else:
+ streamserver_uri = 'unix:%s' % os.path.join(temp_directory,
+ 'butler.sock')
+ prefix = os.path.join('android', 'swarming', 'logcats',
+ os.environ.get('SWARMING_TASK_ID'))
+
+ logdog_cmd = [
+ args.logdog_bin_cmd,
+ '-project', PROJECT,
+ '-output', OUTPUT,
+ '-prefix', prefix,
+ '--service-account-json', SERVICE_ACCOUNT_JSON,
+ '-coordinator-host', COORDINATOR_HOST,
+ 'serve',
+ '-streamserver-uri', streamserver_uri]
+ test_env.update({
+ 'LOGDOG_STREAM_PROJECT': PROJECT,
+ 'LOGDOG_STREAM_PREFIX': prefix,
+ 'LOGDOG_STREAM_SERVER_PATH': streamserver_uri,
+ 'LOGDOG_COORDINATOR_HOST': COORDINATOR_HOST,
+ })
+
+ logdog_proc = None
+ if logdog_cmd:
+ logdog_proc = subprocess.Popen(logdog_cmd)
+
+ with NoLeakingProcesses(logdog_proc):
+ with NoLeakingProcesses(
+ subprocess.Popen(test_cmd, env=test_env)) as test_proc:
+ with signal_handler.SignalHandler(signal.SIGTERM,
+ CreateStopTestsMethod(test_proc)):
+ result = test_proc.wait()
+ if logdog_proc:
+ def logdog_stopped():
+ return logdog_proc.poll() is not None
+
+ logdog_proc.terminate()
+ timeout_retry.WaitFor(logdog_stopped, wait_period=1,
+ max_tries=LOGDOG_TERMINATION_TIMEOUT)
+
+ # If logdog_proc hasn't finished by this point, allow
+ # NoLeakingProcesses to kill it.
+
+
+ return result
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/test_wrapper/logdog_wrapper.pydeps b/deps/v8/build/android/test_wrapper/logdog_wrapper.pydeps
new file mode 100644
index 0000000000..bb696587e3
--- /dev/null
+++ b/deps/v8/build/android/test_wrapper/logdog_wrapper.pydeps
@@ -0,0 +1,12 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android --output build/android/test_wrapper/logdog_wrapper.pydeps build/android/test_wrapper/logdog_wrapper.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/signal_handler.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+test_wrapper/logdog_wrapper.py
diff --git a/deps/v8/build/android/tests/symbolize/Makefile b/deps/v8/build/android/tests/symbolize/Makefile
new file mode 100644
index 0000000000..4fc53dad56
--- /dev/null
+++ b/deps/v8/build/android/tests/symbolize/Makefile
@@ -0,0 +1,11 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+TOOLCHAIN=../../../../third_party/android_ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/arm-linux-androideabi-
+CXX=$(TOOLCHAIN)g++
+
+lib%.so: %.cc
+ $(CXX) -nostdlib -g -fPIC -shared $< -o $@
+
+all: liba.so libb.so
diff --git a/deps/v8/build/android/tests/symbolize/a.cc b/deps/v8/build/android/tests/symbolize/a.cc
new file mode 100644
index 0000000000..f0c7ca4c67
--- /dev/null
+++ b/deps/v8/build/android/tests/symbolize/a.cc
@@ -0,0 +1,14 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+class A {
+ public:
+ A();
+ void Foo(int i);
+ void Bar(const char* c);
+};
+
+A::A() {}
+void A::Foo(int i) {}
+void A::Bar(const char* c) {}
diff --git a/deps/v8/build/android/tests/symbolize/b.cc b/deps/v8/build/android/tests/symbolize/b.cc
new file mode 100644
index 0000000000..db8752099a
--- /dev/null
+++ b/deps/v8/build/android/tests/symbolize/b.cc
@@ -0,0 +1,14 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+class B {
+ public:
+ B();
+ void Baz(float f);
+ void Qux(double d);
+};
+
+B::B() {}
+void B::Baz(float f) {}
+void B::Qux(double d) {}
diff --git a/deps/v8/build/android/tombstones.py b/deps/v8/build/android/tombstones.py
new file mode 100755
index 0000000000..e1a2d76d11
--- /dev/null
+++ b/deps/v8/build/android/tombstones.py
@@ -0,0 +1,282 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Find the most recent tombstone file(s) on all connected devices
+# and prints their stacks.
+#
+# Assumes tombstone file was created with current symbols.
+
+import argparse
+import datetime
+import logging
+import os
+import sys
+
+from multiprocessing.pool import ThreadPool
+
+import devil_chromium
+
+from devil.android import device_blacklist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.utils import run_tests_helper
+from pylib import constants
+from pylib.symbols import stack_symbolizer
+
+
+_TZ_UTC = {'TZ': 'UTC'}
+
+
+def _ListTombstones(device):
+ """List the tombstone files on the device.
+
+ Args:
+ device: An instance of DeviceUtils.
+
+ Yields:
+ Tuples of (tombstone filename, date time of file on device).
+ """
+ try:
+ if not device.PathExists('/data/tombstones', as_root=True):
+ return
+ entries = device.StatDirectory('/data/tombstones', as_root=True)
+ for entry in entries:
+ if 'tombstone' in entry['filename']:
+ yield (entry['filename'],
+ datetime.datetime.fromtimestamp(entry['st_mtime']))
+ except device_errors.CommandFailedError:
+ logging.exception('Could not retrieve tombstones.')
+ except device_errors.DeviceUnreachableError:
+ logging.exception('Device unreachable retrieving tombstones.')
+ except device_errors.CommandTimeoutError:
+ logging.exception('Timed out retrieving tombstones.')
+
+
+def _GetDeviceDateTime(device):
+ """Determine the date time on the device.
+
+ Args:
+ device: An instance of DeviceUtils.
+
+ Returns:
+ A datetime instance.
+ """
+ device_now_string = device.RunShellCommand(
+ ['date'], check_return=True, env=_TZ_UTC)
+ return datetime.datetime.strptime(
+ device_now_string[0], '%a %b %d %H:%M:%S %Z %Y')
+
+
+def _GetTombstoneData(device, tombstone_file):
+ """Retrieve the tombstone data from the device
+
+ Args:
+ device: An instance of DeviceUtils.
+ tombstone_file: the tombstone to retrieve
+
+ Returns:
+ A list of lines
+ """
+ return device.ReadFile(
+ '/data/tombstones/' + tombstone_file, as_root=True).splitlines()
+
+
+def _EraseTombstone(device, tombstone_file):
+ """Deletes a tombstone from the device.
+
+ Args:
+ device: An instance of DeviceUtils.
+ tombstone_file: the tombstone to delete.
+ """
+ return device.RunShellCommand(
+ ['rm', '/data/tombstones/' + tombstone_file],
+ as_root=True, check_return=True)
+
+
+def _ResolveTombstone(args):
+ tombstone = args[0]
+ tombstone_symbolizer = args[1]
+ lines = []
+ lines += [tombstone['file'] + ' created on ' + str(tombstone['time']) +
+ ', about this long ago: ' +
+ (str(tombstone['device_now'] - tombstone['time']) +
+ ' Device: ' + tombstone['serial'])]
+ logging.info('\n'.join(lines))
+ logging.info('Resolving...')
+ lines += tombstone_symbolizer.ExtractAndResolveNativeStackTraces(
+ tombstone['data'],
+ tombstone['device_abi'],
+ tombstone['stack'])
+ return lines
+
+
+def _ResolveTombstones(jobs, tombstones, tombstone_symbolizer):
+ """Resolve a list of tombstones.
+
+ Args:
+ jobs: the number of jobs to use with multithread.
+ tombstones: a list of tombstones.
+ """
+ if not tombstones:
+ logging.warning('No tombstones to resolve.')
+ return []
+ if len(tombstones) == 1:
+ data = [_ResolveTombstone([tombstones[0], tombstone_symbolizer])]
+ else:
+ pool = ThreadPool(jobs)
+ data = pool.map(
+ _ResolveTombstone,
+ [[tombstone, tombstone_symbolizer] for tombstone in tombstones])
+ pool.close()
+ pool.join()
+ resolved_tombstones = []
+ for tombstone in data:
+ resolved_tombstones.extend(tombstone)
+ return resolved_tombstones
+
+
+def _GetTombstonesForDevice(device, resolve_all_tombstones,
+ include_stack_symbols,
+ wipe_tombstones):
+ """Returns a list of tombstones on a given device.
+
+ Args:
+ device: An instance of DeviceUtils.
+ resolve_all_tombstone: Whether to resolve every tombstone.
+ include_stack_symbols: Whether to include symbols for stack data.
+ wipe_tombstones: Whether to wipe tombstones.
+ """
+ ret = []
+ all_tombstones = list(_ListTombstones(device))
+ if not all_tombstones:
+ logging.warning('No tombstones.')
+ return ret
+
+ # Sort the tombstones in date order, descending
+ all_tombstones.sort(cmp=lambda a, b: cmp(b[1], a[1]))
+
+ # Only resolve the most recent unless --all-tombstones given.
+ tombstones = all_tombstones if resolve_all_tombstones else [all_tombstones[0]]
+
+ device_now = _GetDeviceDateTime(device)
+ try:
+ for tombstone_file, tombstone_time in tombstones:
+ ret += [{'serial': str(device),
+ 'device_abi': device.product_cpu_abi,
+ 'device_now': device_now,
+ 'time': tombstone_time,
+ 'file': tombstone_file,
+ 'stack': include_stack_symbols,
+ 'data': _GetTombstoneData(device, tombstone_file)}]
+ except device_errors.CommandFailedError:
+ for entry in device.StatDirectory(
+ '/data/tombstones', as_root=True, timeout=60):
+ logging.info('%s: %s', str(device), entry)
+ raise
+
+ # Erase all the tombstones if desired.
+ if wipe_tombstones:
+ for tombstone_file, _ in all_tombstones:
+ _EraseTombstone(device, tombstone_file)
+
+ return ret
+
+
+def ClearAllTombstones(device):
+ """Clear all tombstones in the device.
+
+ Args:
+ device: An instance of DeviceUtils.
+ """
+ all_tombstones = list(_ListTombstones(device))
+ if not all_tombstones:
+ logging.warning('No tombstones to clear.')
+
+ for tombstone_file, _ in all_tombstones:
+ _EraseTombstone(device, tombstone_file)
+
+
+def ResolveTombstones(device, resolve_all_tombstones, include_stack_symbols,
+ wipe_tombstones, jobs=4, apk_under_test=None,
+ tombstone_symbolizer=None):
+ """Resolve tombstones in the device.
+
+ Args:
+ device: An instance of DeviceUtils.
+ resolve_all_tombstone: Whether to resolve every tombstone.
+ include_stack_symbols: Whether to include symbols for stack data.
+ wipe_tombstones: Whether to wipe tombstones.
+ jobs: Number of jobs to use when processing multiple crash stacks.
+
+ Returns:
+ A list of resolved tombstones.
+ """
+ return _ResolveTombstones(jobs,
+ _GetTombstonesForDevice(device,
+ resolve_all_tombstones,
+ include_stack_symbols,
+ wipe_tombstones),
+ (tombstone_symbolizer
+ or stack_symbolizer.Symbolizer(apk_under_test)))
+
+
+def main():
+ custom_handler = logging.StreamHandler(sys.stdout)
+ custom_handler.setFormatter(run_tests_helper.CustomFormatter())
+ logging.getLogger().addHandler(custom_handler)
+ logging.getLogger().setLevel(logging.INFO)
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--device',
+ help='The serial number of the device. If not specified '
+ 'will use all devices.')
+ parser.add_argument('--blacklist-file', help='Device blacklist JSON file.')
+ parser.add_argument('-a', '--all-tombstones', action='store_true',
+ help='Resolve symbols for all tombstones, rather than '
+ 'just the most recent.')
+ parser.add_argument('-s', '--stack', action='store_true',
+ help='Also include symbols for stack data')
+ parser.add_argument('-w', '--wipe-tombstones', action='store_true',
+ help='Erase all tombstones from device after processing')
+ parser.add_argument('-j', '--jobs', type=int,
+ default=4,
+ help='Number of jobs to use when processing multiple '
+ 'crash stacks.')
+ parser.add_argument('--output-directory',
+ help='Path to the root build directory.')
+ parser.add_argument('--adb-path', type=os.path.abspath,
+ help='Path to the adb binary.')
+ args = parser.parse_args()
+
+ devil_chromium.Initialize(adb_path=args.adb_path)
+
+ blacklist = (device_blacklist.Blacklist(args.blacklist_file)
+ if args.blacklist_file
+ else None)
+
+ if args.output_directory:
+ constants.SetOutputDirectory(args.output_directory)
+ # Do an up-front test that the output directory is known.
+ constants.CheckOutputDirectory()
+
+ if args.device:
+ devices = [device_utils.DeviceUtils(args.device)]
+ else:
+ devices = device_utils.DeviceUtils.HealthyDevices(blacklist)
+
+ # This must be done serially because strptime can hit a race condition if
+ # used for the first time in a multithreaded environment.
+ # http://bugs.python.org/issue7980
+ for device in devices:
+ resolved_tombstones = ResolveTombstones(
+ device, args.all_tombstones,
+ args.stack, args.wipe_tombstones, args.jobs)
+ for line in resolved_tombstones:
+ logging.info(line)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/update_deps/update_third_party_deps.py b/deps/v8/build/android/update_deps/update_third_party_deps.py
new file mode 100755
index 0000000000..3a869c43ec
--- /dev/null
+++ b/deps/v8/build/android/update_deps/update_third_party_deps.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Uploads or downloads third party libraries to or from google cloud storage.
+
+This script will only work for Android checkouts.
+"""
+
+import argparse
+import logging
+import os
+import sys
+
+
+sys.path.append(os.path.abspath(
+ os.path.join(os.path.dirname(__file__), os.pardir)))
+from pylib import constants
+from pylib.constants import host_paths
+
+sys.path.append(
+ os.path.abspath(
+ os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'depot_tools')))
+import download_from_google_storage
+import upload_to_google_storage
+
+
+def _AddBasicArguments(parser):
+ parser.add_argument(
+ '--sdk-root', default=constants.ANDROID_SDK_ROOT,
+ help='base path to the Android SDK root')
+ parser.add_argument(
+ '-v', '--verbose', action='store_true', help='print debug information')
+ parser.add_argument(
+ '-b', '--bucket-path', required=True,
+ help='The path of the lib file in Google Cloud Storage.')
+ parser.add_argument(
+ '-l', '--local-path', required=True,
+ help='The base path of the third_party directory')
+
+
+def _CheckPaths(bucket_path, local_path):
+ if bucket_path.startswith('gs://'):
+ bucket_url = bucket_path
+ else:
+ bucket_url = 'gs://%s' % bucket_path
+ local_path = os.path.join(host_paths.DIR_SOURCE_ROOT, local_path)
+ if not os.path.isdir(local_path):
+ raise IOError(
+ 'The library local path is not a valid directory: %s' % local_path)
+ return bucket_url, local_path
+
+
+def _CheckFileList(local_path, file_list):
+ local_path = os.path.abspath(local_path)
+ abs_path_list = [os.path.abspath(f) for f in file_list]
+ for f in abs_path_list:
+ if os.path.commonprefix([f, local_path]) != local_path:
+ raise IOError(
+ '%s in the arguments is not descendant of the specified directory %s'
+ % (f, local_path))
+ return abs_path_list
+
+
+def _PurgeSymlinks(local_path):
+ for dirpath, _, filenames in os.walk(local_path):
+ for f in filenames:
+ path = os.path.join(dirpath, f)
+ if os.path.islink(path):
+ os.remove(path)
+
+
+def Upload(arguments):
+ """Upload files in a third_party directory to google storage"""
+ bucket_url, local_path = _CheckPaths(arguments.bucket_path,
+ arguments.local_path)
+ file_list = _CheckFileList(local_path, arguments.file_list)
+ return upload_to_google_storage.upload_to_google_storage(
+ input_filenames=file_list,
+ base_url=bucket_url,
+ gsutil=arguments.gsutil,
+ force=False,
+ use_md5=False,
+ num_threads=1,
+ skip_hashing=False,
+ gzip=None)
+
+
+def Download(arguments):
+ """Download files based on sha1 files in a third_party dir from gcs"""
+ bucket_url, local_path = _CheckPaths(arguments.bucket_path,
+ arguments.local_path)
+ _PurgeSymlinks(local_path)
+ return download_from_google_storage.download_from_google_storage(
+ local_path,
+ bucket_url,
+ gsutil=arguments.gsutil,
+ num_threads=1,
+ directory=True,
+ recursive=True,
+ force=False,
+ output=None,
+ ignore_errors=False,
+ sha1_file=None,
+ verbose=arguments.verbose,
+ auto_platform=False,
+ extract=False)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser()
+ subparsers = parser.add_subparsers(title='commands')
+ download_parser = subparsers.add_parser(
+ 'download', help='download the library from the cloud storage')
+ _AddBasicArguments(download_parser)
+ download_parser.set_defaults(func=Download)
+
+ upload_parser = subparsers.add_parser(
+ 'upload', help='find all jar files in a third_party directory and ' +
+ 'upload them to cloud storage')
+ _AddBasicArguments(upload_parser)
+ upload_parser.set_defaults(func=Upload)
+ upload_parser.add_argument(
+ '-f', '--file-list', nargs='+', required=True,
+ help='A list of base paths for files in third_party to upload.')
+
+ arguments = parser.parse_args(argv)
+ if not os.path.isdir(arguments.sdk_root):
+ logging.debug('Did not find the Android SDK root directory at "%s".',
+ arguments.sdk_root)
+ logging.info('Skipping, not on an android checkout.')
+ return 0
+
+ arguments.gsutil = download_from_google_storage.Gsutil(
+ download_from_google_storage.GSUTIL_DEFAULT_PATH)
+ return arguments.func(arguments)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/update_verification.py b/deps/v8/build/android/update_verification.py
new file mode 100755
index 0000000000..40cb64ac5d
--- /dev/null
+++ b/deps/v8/build/android/update_verification.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs semi-automated update testing on a non-rooted device.
+
+This script will help verify that app data is preserved during an update.
+To use this script first run it with the create_app_data option.
+
+./update_verification.py create_app_data --old-apk <path> --app-data <path>
+
+The script will then install the old apk, prompt you to create some app data
+(bookmarks, etc.), and then save the app data in the path you gave it.
+
+Next, once you have some app data saved, run this script with the test_update
+option.
+
+./update_verification.py test_update --old-apk <path> --new-apk <path>
+--app-data <path>
+
+This will install the old apk, load the saved app data, install the new apk,
+and ask the user to verify that all of the app data was preserved.
+"""
+
+import argparse
+import logging
+import sys
+
+import devil_chromium
+
+from devil.android import apk_helper
+from devil.android import device_blacklist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.utils import run_tests_helper
+
+def CreateAppData(device, old_apk, app_data, package_name):
+ device.Install(old_apk)
+ raw_input('Set the application state. Once ready, press enter and '
+ 'select "Backup my data" on the device.')
+ device.adb.Backup(app_data, packages=[package_name])
+ logging.critical('Application data saved to %s', app_data)
+
+def TestUpdate(device, old_apk, new_apk, app_data, package_name):
+ device.Install(old_apk)
+ device.adb.Restore(app_data)
+ # Restore command is not synchronous
+ raw_input('Select "Restore my data" on the device. Then press enter to '
+ 'continue.')
+ device_path = device.GetApplicationPaths(package_name)
+ if not device_path:
+ raise Exception('Expected package %s to already be installed. '
+ 'Package name might have changed!' % package_name)
+
+ logging.info('Verifying that %s can be overinstalled.', new_apk)
+ device.adb.Install(new_apk, reinstall=True)
+ logging.critical('Successfully updated to the new apk. Please verify that '
+ 'the application data is preserved.')
+
+def main():
+ parser = argparse.ArgumentParser(
+ description="Script to do semi-automated upgrade testing.")
+ parser.add_argument('-v', '--verbose', action='count',
+ help='Print verbose log information.')
+ parser.add_argument('--blacklist-file', help='Device blacklist JSON file.')
+ command_parsers = parser.add_subparsers(dest='command')
+
+ subparser = command_parsers.add_parser('create_app_data')
+ subparser.add_argument('--old-apk', required=True,
+ help='Path to apk to update from.')
+ subparser.add_argument('--app-data', required=True,
+ help='Path to where the app data backup should be '
+ 'saved to.')
+ subparser.add_argument('--package-name',
+ help='Chrome apk package name.')
+
+ subparser = command_parsers.add_parser('test_update')
+ subparser.add_argument('--old-apk', required=True,
+ help='Path to apk to update from.')
+ subparser.add_argument('--new-apk', required=True,
+ help='Path to apk to update to.')
+ subparser.add_argument('--app-data', required=True,
+ help='Path to where the app data backup is saved.')
+ subparser.add_argument('--package-name',
+ help='Chrome apk package name.')
+
+ args = parser.parse_args()
+ run_tests_helper.SetLogLevel(args.verbose)
+
+ devil_chromium.Initialize()
+
+ blacklist = (device_blacklist.Blacklist(args.blacklist_file)
+ if args.blacklist_file
+ else None)
+
+ devices = device_utils.DeviceUtils.HealthyDevices(blacklist)
+ if not devices:
+ raise device_errors.NoDevicesError()
+ device = devices[0]
+ logging.info('Using device %s for testing.', str(device))
+
+ package_name = (args.package_name if args.package_name
+ else apk_helper.GetPackageName(args.old_apk))
+ if args.command == 'create_app_data':
+ CreateAppData(device, args.old_apk, args.app_data, package_name)
+ elif args.command == 'test_update':
+ TestUpdate(
+ device, args.old_apk, args.new_apk, args.app_data, package_name)
+ else:
+ raise Exception('Unknown test command: %s' % args.command)
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/video_recorder.py b/deps/v8/build/android/video_recorder.py
new file mode 100755
index 0000000000..b21759a35a
--- /dev/null
+++ b/deps/v8/build/android/video_recorder.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import devil_chromium
+from devil.android.tools import video_recorder
+
+if __name__ == '__main__':
+ devil_chromium.Initialize()
+ sys.exit(video_recorder.main())
diff --git a/deps/v8/build/apply_locales.py b/deps/v8/build/apply_locales.py
new file mode 100755
index 0000000000..6af7280fad
--- /dev/null
+++ b/deps/v8/build/apply_locales.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO: remove this script when GYP has for loops
+
+import sys
+import optparse
+
+def main(argv):
+
+ parser = optparse.OptionParser()
+ usage = 'usage: %s [options ...] format_string locale_list'
+ parser.set_usage(usage.replace('%s', '%prog'))
+ parser.add_option('-d', dest='dash_to_underscore', action="store_true",
+ default=False,
+ help='map "en-US" to "en" and "-" to "_" in locales')
+
+ (options, arglist) = parser.parse_args(argv)
+
+ if len(arglist) < 3:
+ print 'ERROR: need string and list of locales'
+ return 1
+
+ str_template = arglist[1]
+ locales = arglist[2:]
+
+ results = []
+ for locale in locales:
+ # For Cocoa to find the locale at runtime, it needs to use '_' instead
+ # of '-' (http://crbug.com/20441). Also, 'en-US' should be represented
+ # simply as 'en' (http://crbug.com/19165, http://crbug.com/25578).
+ if options.dash_to_underscore:
+ if locale == 'en-US':
+ locale = 'en'
+ locale = locale.replace('-', '_')
+ results.append(str_template.replace('ZZLOCALE', locale))
+
+ # Quote each element so filename spaces don't mess up GYP's attempt to parse
+ # it into a list.
+ print ' '.join(["'%s'" % x for x in results])
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/deps/v8/build/args/OWNERS b/deps/v8/build/args/OWNERS
new file mode 100644
index 0000000000..d218b6bdbd
--- /dev/null
+++ b/deps/v8/build/args/OWNERS
@@ -0,0 +1 @@
+per-file headless.gn=file://headless/OWNERS
diff --git a/deps/v8/build/args/README.txt b/deps/v8/build/args/README.txt
new file mode 100644
index 0000000000..825bf64c69
--- /dev/null
+++ b/deps/v8/build/args/README.txt
@@ -0,0 +1,31 @@
+This directory is here to hold .gni files that contain sets of GN build
+arguments for given configurations.
+
+(Currently this directory is empty because we removed the only thing here, but
+this has come up several times so I'm confident we'll need this again. If this
+directory is still empty by 2017, feel free to delete it. --Brett)
+
+Some projects or bots may have build configurations with specific combinations
+of flags. Rather than making a new global flag for your specific project and
+adding it all over the build to each arg it should affect, you can add a .gni
+file here with the variables.
+
+For example, for project foo you may put in build/args/foo.gni:
+
+ target_os = "android"
+ use_pulseaudio = false
+ use_ozone = true
+ system_libdir = "foo"
+
+Users wanting to build this configuration would run:
+
+ $ gn args out/mybuild
+
+And add the following line to their args for that build directory:
+
+ import("//build/args/foo.gni")
+ # You can set any other args here like normal.
+ is_component_build = false
+
+This way everybody can agree on a set of flags for a project, and their builds
+stay in sync as the flags in foo.gni are modified.
diff --git a/deps/v8/build/args/fuchsia.gn b/deps/v8/build/args/fuchsia.gn
new file mode 100644
index 0000000000..ba10b884cc
--- /dev/null
+++ b/deps/v8/build/args/fuchsia.gn
@@ -0,0 +1,7 @@
+import("//build/args/headless.gn")
+
+target_os = "fuchsia"
+enable_basic_printing = false
+headless_fontconfig_utils = false
+toolkit_views = false
+enable_plugins = false
diff --git a/deps/v8/build/args/headless.gn b/deps/v8/build/args/headless.gn
new file mode 100644
index 0000000000..9a6bfd103c
--- /dev/null
+++ b/deps/v8/build/args/headless.gn
@@ -0,0 +1,43 @@
+# GN args template for the Headless Chrome library
+#
+# Add import to arg.gn in out directory and run gn gen on the directory to use.
+# E.g. for out directory out/foo:
+# echo 'import("//build/args/headless.gn")' > out/foo/args.gn
+# gn gen out/foo
+#
+# Use gn args to add your own build preference args.
+
+use_ozone = true
+ozone_auto_platforms = false
+ozone_platform = "headless"
+ozone_platform_headless = true
+
+# Embed resource.pak into binary to simplify deployment.
+headless_use_embedded_resources = true
+
+# Expose headless bindings for freetype library bundled with Chromium.
+headless_fontconfig_utils = true
+
+# Remove a dependency on a system fontconfig library.
+use_bundled_fontconfig = true
+
+# In order to simplify deployment we build ICU data file
+# into binary.
+icu_use_data_file = false
+
+# Use embedded data instead external files for headless in order
+# to simplify deployment.
+v8_use_external_startup_data = false
+
+enable_nacl = false
+enable_print_preview = false
+enable_remoting = false
+use_alsa = false
+use_cups = false
+use_dbus = false
+use_gio = false
+use_kerberos = false
+use_libpci = false
+use_pulseaudio = false
+use_udev = false
+rtc_use_pipewire = false
diff --git a/deps/v8/build/build-ctags.sh b/deps/v8/build/build-ctags.sh
new file mode 100755
index 0000000000..61e017e329
--- /dev/null
+++ b/deps/v8/build/build-ctags.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if [[ a"`ctags --version | head -1 | grep \"^Exuberant Ctags\"`" == "a" ]]; then
+ cat <<EOF
+ You must be using Exuberant Ctags, not just standard GNU ctags. If you are on
+ Debian or a related flavor of Linux, you may want to try running
+ apt-get install exuberant-ctags.
+EOF
+ exit
+fi
+
+CHROME_SRC_DIR="$PWD"
+
+fail() {
+ echo "Failed to create ctags for $1"
+ exit 1
+}
+
+ctags_cmd() {
+ echo "ctags --languages=C++ $1 --exclude=.git -R -f .tmp_tags"
+}
+
+build_dir() {
+ local extraexcludes=""
+ if [[ a"$1" == "a--extra-excludes" ]]; then
+ extraexcludes="--exclude=third_party --exclude=build --exclude=out"
+ shift
+ fi
+
+ cd "$CHROME_SRC_DIR/$1" || fail $1
+ # Redirect error messages so they aren't seen because they are almost always
+ # errors about components that you just happen to have not built (NaCl, for
+ # example).
+ $(ctags_cmd "$extraexcludes") 2> /dev/null || fail $1
+ mv -f .tmp_tags tags
+}
+
+# We always build the top level but leave all submodules as optional.
+build_dir --extra-excludes "" "top level"
+
+# Build any other directies that are listed on the command line.
+for dir in $@; do
+ build_dir "$1"
+ shift
+done
diff --git a/deps/v8/build/build_config.h b/deps/v8/build/build_config.h
new file mode 100644
index 0000000000..4d1ba77f2b
--- /dev/null
+++ b/deps/v8/build/build_config.h
@@ -0,0 +1,207 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file adds defines about the platform we're currently building on.
+// Operating System:
+// OS_WIN / OS_MACOSX / OS_LINUX / OS_POSIX (MACOSX or LINUX) /
+// OS_NACL (NACL_SFI or NACL_NONSFI) / OS_NACL_SFI / OS_NACL_NONSFI
+// OS_CHROMEOS is set by the build system
+// Compiler:
+// COMPILER_MSVC / COMPILER_GCC
+// Processor:
+// ARCH_CPU_X86 / ARCH_CPU_X86_64 / ARCH_CPU_X86_FAMILY (X86 or X86_64)
+// ARCH_CPU_32_BITS / ARCH_CPU_64_BITS
+
+#ifndef BUILD_BUILD_CONFIG_H_
+#define BUILD_BUILD_CONFIG_H_
+
+// A set of macros to use for platform detection.
+#if defined(__native_client__)
+// __native_client__ must be first, so that other OS_ defines are not set.
+#define OS_NACL 1
+// OS_NACL comes in two sandboxing technology flavors, SFI or Non-SFI.
+// PNaCl toolchain defines __native_client_nonsfi__ macro in Non-SFI build
+// mode, while it does not in SFI build mode.
+#if defined(__native_client_nonsfi__)
+#define OS_NACL_NONSFI
+#else
+#define OS_NACL_SFI
+#endif
+#elif defined(ANDROID)
+#define OS_ANDROID 1
+#elif defined(__APPLE__)
+// only include TargetConditions after testing ANDROID as some android builds
+// on mac don't have this header available and it's not needed unless the target
+// is really mac/ios.
+#include <TargetConditionals.h>
+#define OS_MACOSX 1
+#if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+#define OS_IOS 1
+#endif // defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+#elif defined(__linux__)
+#define OS_LINUX 1
+// include a system header to pull in features.h for glibc/uclibc macros.
+#include <unistd.h>
+#if defined(__GLIBC__) && !defined(__UCLIBC__)
+// we really are using glibc, not uClibc pretending to be glibc
+#define LIBC_GLIBC 1
+#endif
+#elif defined(_WIN32)
+#define OS_WIN 1
+#elif defined(__Fuchsia__)
+#define OS_FUCHSIA 1
+#elif defined(__FreeBSD__)
+#define OS_FREEBSD 1
+#elif defined(__NetBSD__)
+#define OS_NETBSD 1
+#elif defined(__OpenBSD__)
+#define OS_OPENBSD 1
+#elif defined(__sun)
+#define OS_SOLARIS 1
+#elif defined(__QNXNTO__)
+#define OS_QNX 1
+#elif defined(_AIX)
+#define OS_AIX 1
+#elif defined(__asmjs__)
+#define OS_ASMJS
+#else
+#error Please add support for your platform in build/build_config.h
+#endif
+// NOTE: Adding a new port? Please follow
+// https://chromium.googlesource.com/chromium/src/+/master/docs/new_port_policy.md
+
+// For access to standard BSD features, use OS_BSD instead of a
+// more specific macro.
+#if defined(OS_FREEBSD) || defined(OS_NETBSD) || defined(OS_OPENBSD)
+#define OS_BSD 1
+#endif
+
+// For access to standard POSIXish features, use OS_POSIX instead of a
+// more specific macro.
+#if defined(OS_AIX) || defined(OS_ANDROID) || defined(OS_ASMJS) || \
+ defined(OS_FREEBSD) || defined(OS_LINUX) || defined(OS_MACOSX) || \
+ defined(OS_NACL) || defined(OS_NETBSD) || defined(OS_OPENBSD) || \
+ defined(OS_QNX) || defined(OS_SOLARIS)
+#define OS_POSIX 1
+#endif
+
+// Use tcmalloc
+#if (defined(OS_WIN) || defined(OS_LINUX) || defined(OS_ANDROID)) && \
+ !defined(NO_TCMALLOC)
+#define USE_TCMALLOC 1
+#endif
+
+// Compiler detection.
+#if defined(__GNUC__)
+#define COMPILER_GCC 1
+#elif defined(_MSC_VER)
+#define COMPILER_MSVC 1
+#else
+#error Please add support for your compiler in build/build_config.h
+#endif
+
+// Processor architecture detection. For more info on what's defined, see:
+// http://msdn.microsoft.com/en-us/library/b0084kay.aspx
+// http://www.agner.org/optimize/calling_conventions.pdf
+// or with gcc, run: "echo | gcc -E -dM -"
+#if defined(_M_X64) || defined(__x86_64__)
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86_64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(_M_IX86) || defined(__i386__)
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__s390x__)
+#define ARCH_CPU_S390_FAMILY 1
+#define ARCH_CPU_S390X 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#elif defined(__s390__)
+#define ARCH_CPU_S390_FAMILY 1
+#define ARCH_CPU_S390 1
+#define ARCH_CPU_31_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#elif (defined(__PPC64__) || defined(__PPC__)) && defined(__BIG_ENDIAN__)
+#define ARCH_CPU_PPC64_FAMILY 1
+#define ARCH_CPU_PPC64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#elif defined(__PPC64__)
+#define ARCH_CPU_PPC64_FAMILY 1
+#define ARCH_CPU_PPC64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__ARMEL__)
+#define ARCH_CPU_ARM_FAMILY 1
+#define ARCH_CPU_ARMEL 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__aarch64__) || defined(_M_ARM64)
+#define ARCH_CPU_ARM_FAMILY 1
+#define ARCH_CPU_ARM64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__pnacl__) || defined(__asmjs__)
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__MIPSEL__)
+#if defined(__LP64__)
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPS64EL 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#else
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPSEL 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#endif
+#elif defined(__MIPSEB__)
+#if defined(__LP64__)
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPS64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#else
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPS 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_BIG_ENDIAN 1
+#endif
+#else
+#error Please add support for your architecture in build/build_config.h
+#endif
+
+// Type detection for wchar_t.
+#if defined(OS_WIN)
+#define WCHAR_T_IS_UTF16
+#elif defined(OS_FUCHSIA)
+#define WCHAR_T_IS_UTF32
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && defined(__WCHAR_MAX__) && \
+ (__WCHAR_MAX__ == 0x7fffffff || __WCHAR_MAX__ == 0xffffffff)
+#define WCHAR_T_IS_UTF32
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && defined(__WCHAR_MAX__) && \
+ (__WCHAR_MAX__ == 0x7fff || __WCHAR_MAX__ == 0xffff)
+// On Posix, we'll detect short wchar_t, but projects aren't guaranteed to
+// compile in this mode (in particular, Chrome doesn't). This is intended for
+// other projects using base who manage their own dependencies and make sure
+// short wchar works for them.
+#define WCHAR_T_IS_UTF16
+#else
+#error Please add support for your compiler in build/build_config.h
+#endif
+
+#if defined(OS_ANDROID)
+// The compiler thinks std::string::const_iterator and "const char*" are
+// equivalent types.
+#define STD_STRING_ITERATOR_IS_CHAR_POINTER
+// The compiler thinks base::string16::const_iterator and "char16*" are
+// equivalent types.
+#define BASE_STRING16_ITERATOR_IS_CHAR16_POINTER
+#endif
+
+#endif // BUILD_BUILD_CONFIG_H_
diff --git a/deps/v8/build/buildflag.h b/deps/v8/build/buildflag.h
new file mode 100644
index 0000000000..5776a754c4
--- /dev/null
+++ b/deps/v8/build/buildflag.h
@@ -0,0 +1,47 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef BUILD_BUILDFLAG_H_
+#define BUILD_BUILDFLAG_H_
+
+// These macros un-mangle the names of the build flags in a way that looks
+// natural, and gives errors if the flag is not defined. Normally in the
+// preprocessor it's easy to make mistakes that interpret "you haven't done
+// the setup to know what the flag is" as "flag is off". Normally you would
+// include the generated header rather than include this file directly.
+//
+// This is for use with generated headers. See build/buildflag_header.gni.
+
+// This dance of two macros does a concatenation of two preprocessor args using
+// ## doubly indirectly because using ## directly prevents macros in that
+// parameter from being expanded.
+#define BUILDFLAG_CAT_INDIRECT(a, b) a ## b
+#define BUILDFLAG_CAT(a, b) BUILDFLAG_CAT_INDIRECT(a, b)
+
+// Accessor for build flags.
+//
+// To test for a value, if the build file specifies:
+//
+// ENABLE_FOO=true
+//
+// Then you would check at build-time in source code with:
+//
+// #include "foo_flags.h" // The header the build file specified.
+//
+// #if BUILDFLAG(ENABLE_FOO)
+// ...
+// #endif
+//
+// There will no #define called ENABLE_FOO so if you accidentally test for
+// whether that is defined, it will always be negative. You can also use
+// the value in expressions:
+//
+// const char kSpamServerName[] = BUILDFLAG(SPAM_SERVER_NAME);
+//
+// Because the flag is accessed as a preprocessor macro with (), an error
+// will be thrown if the proper header defining the internal flag value has
+// not been included.
+#define BUILDFLAG(flag) (BUILDFLAG_CAT(BUILDFLAG_INTERNAL_, flag)())
+
+#endif // BUILD_BUILDFLAG_H_
diff --git a/deps/v8/build/buildflag_header.gni b/deps/v8/build/buildflag_header.gni
new file mode 100644
index 0000000000..281c1646a9
--- /dev/null
+++ b/deps/v8/build/buildflag_header.gni
@@ -0,0 +1,141 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Generates a header with preprocessor defines specified by the build file.
+#
+# The flags are converted to function-style defines with mangled names and
+# code uses an accessor macro to access the values. This is to try to
+# minimize bugs where code checks whether something is defined or not, and
+# the proper header isn't included, meaning the answer will always be silently
+# false or might vary across the code base.
+#
+# In the GN template, specify build flags in the template as a list
+# of strings that encode key/value pairs like this:
+#
+# flags = [ "ENABLE_FOO=1", "ENABLE_BAR=$enable_bar" ]
+#
+# The GN values "true" and "false" will be mapped to 0 and 1 for boolean
+# #if flags to be expressed naturally. This means you can't directly make a
+# define that generates C++ value of true or false for use in code. If you
+# REALLY need this, you can also use the string "(true)" and "(false)" to
+# prevent the rewriting.
+
+# To check the value of the flag in C code:
+#
+# #include "path/to/here/header_file.h"
+#
+# #if BUILDFLAG(ENABLE_FOO)
+# ...
+# #endif
+#
+# const char kSpamServerUrl[] = BUILDFLAG(SPAM_SERVER_URL);
+#
+# There will no #define called ENABLE_FOO so if you accidentally test for that
+# in an ifdef it will always be negative.
+#
+#
+# Template parameters
+#
+# flags [required, list of strings]
+# Flag values as described above.
+#
+# header [required, string]
+# File name for generated header. By default, this will go in the
+# generated file directory for this target, and you would include it
+# with:
+# #include "<path_to_this_BUILD_file>/<header>"
+#
+# header_dir [optional, string]
+# Override the default location of the generated header. The string will
+# be treated as a subdirectory of the root_gen_dir. For example:
+# header_dir = "foo/bar"
+# Then you can include the header as:
+# #include "foo/bar/baz.h"
+#
+# deps, public_deps, testonly, visibility
+# Normal meaning.
+#
+#
+# Grit defines
+#
+# If one .grd file uses a flag, just add to the grit target:
+#
+# defines = [
+# "enable_doom_melon=$enable_doom_melon",
+# ]
+#
+# If multiple .grd files use it, you'll want to put the defines in a .gni file
+# so it can be shared. Generally this .gni file should include all grit defines
+# for a given module (for some definition of "module"). Then do:
+#
+# defines = ui_grit_defines
+#
+# If you forget to do this, the flag will be implicitly false in the .grd file
+# and those resources won't be compiled. You'll know because the resource
+# #define won't be generated and any code that uses it won't compile. If you
+# see a missing IDS_* string, this is probably the reason.
+#
+#
+# Example
+#
+# buildflag_header("foo_buildflags") {
+# header = "foo_buildflags.h"
+#
+# flags = [
+# # This uses the GN build flag enable_doom_melon as the definition.
+# "ENABLE_DOOM_MELON=$enable_doom_melon",
+#
+# # This force-enables the flag.
+# "ENABLE_SPACE_LASER=true",
+#
+# # This will expand to the quoted C string when used in source code.
+# "SPAM_SERVER_URL=\"http://www.example.com/\"",
+# ]
+# }
+template("buildflag_header") {
+ action(target_name) {
+ script = "//build/write_buildflag_header.py"
+
+ if (defined(invoker.header_dir)) {
+ header_file = "${invoker.header_dir}/${invoker.header}"
+ } else {
+ # Compute the path from the root to this file.
+ header_file = rebase_path(".", "//") + "/${invoker.header}"
+ }
+
+ outputs = [
+ "$root_gen_dir/$header_file",
+ ]
+
+ # Always write --flags to the file so it's not empty. Empty will confuse GN
+ # into thinking the response file isn't used.
+ response_file_contents = [ "--flags" ]
+ if (defined(invoker.flags)) {
+ response_file_contents += invoker.flags
+ }
+
+ args = [
+ "--output",
+ header_file, # Not rebased, Python script puts it inside gen-dir.
+ "--rulename",
+ get_label_info(":$target_name", "label_no_toolchain"),
+ "--gen-dir",
+ rebase_path(root_gen_dir, root_build_dir),
+ "--definitions",
+ "{{response_file_name}}",
+ ]
+
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "public_deps",
+ "testonly",
+ "visibility",
+ ])
+
+ public_deps = [
+ "//build:buildflag_header_h",
+ ]
+ }
+}
diff --git a/deps/v8/build/check_gn_headers.py b/deps/v8/build/check_gn_headers.py
new file mode 100755
index 0000000000..2de11b8328
--- /dev/null
+++ b/deps/v8/build/check_gn_headers.py
@@ -0,0 +1,308 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Find header files missing in GN.
+
+This script gets all the header files from ninja_deps, which is from the true
+dependency generated by the compiler, and report if they don't exist in GN.
+"""
+
+import argparse
+import json
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+from multiprocessing import Process, Queue
+
+SRC_DIR = os.path.abspath(
+ os.path.join(os.path.abspath(os.path.dirname(__file__)), os.path.pardir))
+DEPOT_TOOLS_DIR = os.path.join(SRC_DIR, 'third_party', 'depot_tools')
+
+
+def GetHeadersFromNinja(out_dir, skip_obj, q):
+ """Return all the header files from ninja_deps"""
+
+ def NinjaSource():
+ cmd = [os.path.join(DEPOT_TOOLS_DIR, 'ninja'), '-C', out_dir, '-t', 'deps']
+ # A negative bufsize means to use the system default, which usually
+ # means fully buffered.
+ popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, bufsize=-1)
+ for line in iter(popen.stdout.readline, ''):
+ yield line.rstrip()
+
+ popen.stdout.close()
+ return_code = popen.wait()
+ if return_code:
+ raise subprocess.CalledProcessError(return_code, cmd)
+
+ ans, err = set(), None
+ try:
+ ans = ParseNinjaDepsOutput(NinjaSource(), out_dir, skip_obj)
+ except Exception as e:
+ err = str(e)
+ q.put((ans, err))
+
+
+def ParseNinjaDepsOutput(ninja_out, out_dir, skip_obj):
+ """Parse ninja output and get the header files"""
+ all_headers = {}
+
+ # Ninja always uses "/", even on Windows.
+ prefix = '../../'
+
+ is_valid = False
+ obj_file = ''
+ for line in ninja_out:
+ if line.startswith(' '):
+ if not is_valid:
+ continue
+ if line.endswith('.h') or line.endswith('.hh'):
+ f = line.strip()
+ if f.startswith(prefix):
+ f = f[6:] # Remove the '../../' prefix
+ # build/ only contains build-specific files like build_config.h
+ # and buildflag.h, and system header files, so they should be
+ # skipped.
+ if f.startswith(out_dir) or f.startswith('out'):
+ continue
+ if not f.startswith('build'):
+ all_headers.setdefault(f, [])
+ if not skip_obj:
+ all_headers[f].append(obj_file)
+ else:
+ is_valid = line.endswith('(VALID)')
+ obj_file = line.split(':')[0]
+
+ return all_headers
+
+
+def GetHeadersFromGN(out_dir, q):
+ """Return all the header files from GN"""
+
+ tmp = None
+ ans, err = set(), None
+ try:
+ # Argument |dir| is needed to make sure it's on the same drive on Windows.
+ # dir='' means dir='.', but doesn't introduce an unneeded prefix.
+ tmp = tempfile.mkdtemp(dir='')
+ shutil.copy2(os.path.join(out_dir, 'args.gn'),
+ os.path.join(tmp, 'args.gn'))
+ # Do "gn gen" in a temp dir to prevent dirtying |out_dir|.
+ gn_exe = 'gn.bat' if sys.platform == 'win32' else 'gn'
+ subprocess.check_call([
+ os.path.join(DEPOT_TOOLS_DIR, gn_exe), 'gen', tmp, '--ide=json', '-q'])
+ gn_json = json.load(open(os.path.join(tmp, 'project.json')))
+ ans = ParseGNProjectJSON(gn_json, out_dir, tmp)
+ except Exception as e:
+ err = str(e)
+ finally:
+ if tmp:
+ shutil.rmtree(tmp)
+ q.put((ans, err))
+
+
+def ParseGNProjectJSON(gn, out_dir, tmp_out):
+ """Parse GN output and get the header files"""
+ all_headers = set()
+
+ for _target, properties in gn['targets'].iteritems():
+ sources = properties.get('sources', [])
+ public = properties.get('public', [])
+ # Exclude '"public": "*"'.
+ if type(public) is list:
+ sources += public
+ for f in sources:
+ if f.endswith('.h') or f.endswith('.hh'):
+ if f.startswith('//'):
+ f = f[2:] # Strip the '//' prefix.
+ if f.startswith(tmp_out):
+ f = out_dir + f[len(tmp_out):]
+ all_headers.add(f)
+
+ return all_headers
+
+
+def GetDepsPrefixes(q):
+ """Return all the folders controlled by DEPS file"""
+ prefixes, err = set(), None
+ try:
+ gclient_exe = 'gclient.bat' if sys.platform == 'win32' else 'gclient'
+ gclient_out = subprocess.check_output([
+ os.path.join(DEPOT_TOOLS_DIR, gclient_exe),
+ 'recurse', '--no-progress', '-j1',
+ 'python', '-c', 'import os;print os.environ["GCLIENT_DEP_PATH"]'],
+ universal_newlines=True)
+ for i in gclient_out.split('\n'):
+ if i.startswith('src/'):
+ i = i[4:]
+ prefixes.add(i)
+ except Exception as e:
+ err = str(e)
+ q.put((prefixes, err))
+
+
+def IsBuildClean(out_dir):
+ cmd = [os.path.join(DEPOT_TOOLS_DIR, 'ninja'), '-C', out_dir, '-n']
+ try:
+ out = subprocess.check_output(cmd)
+ return 'no work to do.' in out
+ except Exception as e:
+ print e
+ return False
+
+def ParseWhiteList(whitelist):
+ out = set()
+ for line in whitelist.split('\n'):
+ line = re.sub(r'#.*', '', line).strip()
+ if line:
+ out.add(line)
+ return out
+
+
+def FilterOutDepsedRepo(files, deps):
+ return {f for f in files if not any(f.startswith(d) for d in deps)}
+
+
+def GetNonExistingFiles(lst):
+ out = set()
+ for f in lst:
+ if not os.path.isfile(f):
+ out.add(f)
+ return out
+
+
+def main():
+
+ def DumpJson(data):
+ if args.json:
+ with open(args.json, 'w') as f:
+ json.dump(data, f)
+
+ def PrintError(msg):
+ DumpJson([])
+ parser.error(msg)
+
+ parser = argparse.ArgumentParser(description='''
+ NOTE: Use ninja to build all targets in OUT_DIR before running
+ this script.''')
+ parser.add_argument('--out-dir', metavar='OUT_DIR', default='out/Release',
+ help='output directory of the build')
+ parser.add_argument('--json',
+ help='JSON output filename for missing headers')
+ parser.add_argument('--whitelist', help='file containing whitelist')
+ parser.add_argument('--skip-dirty-check', action='store_true',
+ help='skip checking whether the build is dirty')
+ parser.add_argument('--verbose', action='store_true',
+ help='print more diagnostic info')
+
+ args, _extras = parser.parse_known_args()
+
+ if not os.path.isdir(args.out_dir):
+ parser.error('OUT_DIR "%s" does not exist.' % args.out_dir)
+
+ if not args.skip_dirty_check and not IsBuildClean(args.out_dir):
+ dirty_msg = 'OUT_DIR looks dirty. You need to build all there.'
+ if args.json:
+ # Assume running on the bots. Silently skip this step.
+ # This is possible because "analyze" step can be wrong due to
+ # underspecified header files. See crbug.com/725877
+ print dirty_msg
+ DumpJson([])
+ return 0
+ else:
+ # Assume running interactively.
+ parser.error(dirty_msg)
+
+ d_q = Queue()
+ d_p = Process(target=GetHeadersFromNinja, args=(args.out_dir, True, d_q,))
+ d_p.start()
+
+ gn_q = Queue()
+ gn_p = Process(target=GetHeadersFromGN, args=(args.out_dir, gn_q,))
+ gn_p.start()
+
+ deps_q = Queue()
+ deps_p = Process(target=GetDepsPrefixes, args=(deps_q,))
+ deps_p.start()
+
+ d, d_err = d_q.get()
+ gn, gn_err = gn_q.get()
+ missing = set(d.keys()) - gn
+ nonexisting = GetNonExistingFiles(gn)
+
+ deps, deps_err = deps_q.get()
+ missing = FilterOutDepsedRepo(missing, deps)
+ nonexisting = FilterOutDepsedRepo(nonexisting, deps)
+
+ d_p.join()
+ gn_p.join()
+ deps_p.join()
+
+ if d_err:
+ PrintError(d_err)
+ if gn_err:
+ PrintError(gn_err)
+ if deps_err:
+ PrintError(deps_err)
+ if len(GetNonExistingFiles(d)) > 0:
+ print 'Non-existing files in ninja deps:', GetNonExistingFiles(d)
+ PrintError('Found non-existing files in ninja deps. You should ' +
+ 'build all in OUT_DIR.')
+ if len(d) == 0:
+ PrintError('OUT_DIR looks empty. You should build all there.')
+ if any((('/gen/' in i) for i in nonexisting)):
+ PrintError('OUT_DIR looks wrong. You should build all there.')
+
+ if args.whitelist:
+ whitelist = ParseWhiteList(open(args.whitelist).read())
+ missing -= whitelist
+ nonexisting -= whitelist
+
+ missing = sorted(missing)
+ nonexisting = sorted(nonexisting)
+
+ DumpJson(sorted(missing + nonexisting))
+
+ if len(missing) == 0 and len(nonexisting) == 0:
+ return 0
+
+ if len(missing) > 0:
+ print '\nThe following files should be included in gn files:'
+ for i in missing:
+ print i
+
+ if len(nonexisting) > 0:
+ print '\nThe following non-existing files should be removed from gn files:'
+ for i in nonexisting:
+ print i
+
+ if args.verbose:
+ # Only get detailed obj dependency here since it is slower.
+ GetHeadersFromNinja(args.out_dir, False, d_q)
+ d, d_err = d_q.get()
+ print '\nDetailed dependency info:'
+ for f in missing:
+ print f
+ for cc in d[f]:
+ print ' ', cc
+
+ print '\nMissing headers sorted by number of affected object files:'
+ count = {k: len(v) for (k, v) in d.iteritems()}
+ for f in sorted(count, key=count.get, reverse=True):
+ if f in missing:
+ print count[f], f
+
+ if args.json:
+ # Assume running on the bots. Temporarily return 0 before
+ # https://crbug.com/937847 is fixed.
+ return 0
+ return 1
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/check_gn_headers_unittest.py b/deps/v8/build/check_gn_headers_unittest.py
new file mode 100755
index 0000000000..20c3b13897
--- /dev/null
+++ b/deps/v8/build/check_gn_headers_unittest.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import json
+import unittest
+import check_gn_headers
+
+
+ninja_input = r'''
+obj/a.o: #deps 1, deps mtime 123 (VALID)
+ ../../a.cc
+ ../../dir/path/b.h
+ ../../c.hh
+
+obj/b.o: #deps 1, deps mtime 123 (STALE)
+ ../../b.cc
+ ../../dir2/path/b.h
+ ../../c2.hh
+
+obj/c.o: #deps 1, deps mtime 123 (VALID)
+ ../../c.cc
+ ../../build/a.h
+ gen/b.h
+ ../../out/Release/gen/no.h
+ ../../dir3/path/b.h
+ ../../c3.hh
+'''
+
+
+gn_input = json.loads(r'''
+{
+ "others": [],
+ "targets": {
+ "//:All": {
+ },
+ "//:base": {
+ "public": [ "//base/p.h" ],
+ "sources": [ "//base/a.cc", "//base/a.h", "//base/b.hh" ],
+ "visibility": [ "*" ]
+ },
+ "//:star_public": {
+ "public": "*",
+ "sources": [ "//base/c.h", "//tmp/gen/a.h" ],
+ "visibility": [ "*" ]
+ }
+ }
+}
+''')
+
+
+whitelist = r'''
+ white-front.c
+a/b/c/white-end.c # comment
+ dir/white-both.c #more comment
+
+# empty line above
+a/b/c
+'''
+
+
+class CheckGnHeadersTest(unittest.TestCase):
+ def testNinja(self):
+ headers = check_gn_headers.ParseNinjaDepsOutput(
+ ninja_input.split('\n'), 'out/Release', False)
+ expected = {
+ 'dir/path/b.h': ['obj/a.o'],
+ 'c.hh': ['obj/a.o'],
+ 'dir3/path/b.h': ['obj/c.o'],
+ 'c3.hh': ['obj/c.o'],
+ }
+ self.assertEquals(headers, expected)
+
+ def testGn(self):
+ headers = check_gn_headers.ParseGNProjectJSON(gn_input,
+ 'out/Release', 'tmp')
+ expected = set([
+ 'base/a.h',
+ 'base/b.hh',
+ 'base/c.h',
+ 'base/p.h',
+ 'out/Release/gen/a.h',
+ ])
+ self.assertEquals(headers, expected)
+
+ def testWhitelist(self):
+ output = check_gn_headers.ParseWhiteList(whitelist)
+ expected = set([
+ 'white-front.c',
+ 'a/b/c/white-end.c',
+ 'dir/white-both.c',
+ 'a/b/c',
+ ])
+ self.assertEquals(output, expected)
+
+
+if __name__ == '__main__':
+ logging.getLogger().setLevel(logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/deps/v8/build/check_gn_headers_whitelist.txt b/deps/v8/build/check_gn_headers_whitelist.txt
new file mode 100644
index 0000000000..40a73d0308
--- /dev/null
+++ b/deps/v8/build/check_gn_headers_whitelist.txt
@@ -0,0 +1,357 @@
+# Do not add files to this whitelist unless you are adding a new OS or
+# changing the GN arguments on bots.
+
+ash/accelerators/accelerator_controller_delegate.h
+ash/accelerators/accelerator_controller_delegate_aura.h
+ash/accelerators/accelerator_table.h
+ash/ash_export.h
+ash/frame/frame_header.h
+ash/metrics/task_switch_metrics_recorder.h
+ash/metrics/task_switch_source.h
+ash/metrics/user_metrics_action.h
+ash/metrics/user_metrics_recorder.h
+ash/public/cpp/ash_public_export.h
+ash/public/cpp/ash_switches.h
+ash/public/cpp/config.h
+ash/public/cpp/shelf_types.h
+ash/session/session_observer.h
+ash/shell.h
+ash/system/devicetype_utils.h
+ash/wm/system_modal_container_event_filter_delegate.h
+cc/base/ring_buffer.h
+cc/cc_export.h
+cc/input/browser_controls_state.h
+cc/input/event_listener_properties.h
+cc/input/scrollbar.h
+cc/input/scroller_size_metrics.h
+cc/layers/performance_properties.h
+cc/layers/scrollbar_theme_painter.h
+cc/output/bsp_compare_result.h
+cc/resources/release_callback_impl.h
+cc/resources/return_callback.h
+cc/surfaces/surface_observer.h
+chrome/browser/android/android_theme_resources.h
+chrome/browser/android/resource_id.h
+chrome/browser/chromeos/certificate_provider/certificate_info.h
+chrome/browser/chromeos/certificate_provider/certificate_provider.h
+chrome/browser/chromeos/certificate_provider/certificate_provider_service.h
+chrome/browser/chromeos/certificate_provider/certificate_provider_service_factory.h
+chrome/browser/chromeos/certificate_provider/certificate_requests.h
+chrome/browser/chromeos/certificate_provider/pin_dialog_manager.h
+chrome/browser/chromeos/certificate_provider/sign_requests.h
+chrome/browser/chromeos/certificate_provider/thread_safe_certificate_map.h
+chrome/browser/chromeos/login/easy_unlock/easy_unlock_service_observer.h
+chrome/browser/chromeos/login/signin/oauth2_login_manager.h
+chrome/browser/chromeos/login/signin/oauth2_login_verifier.h
+chrome/browser/chromeos/login/signin/oauth2_token_fetcher.h
+chrome/browser/chromeos/profiles/profile_helper.h
+chrome/browser/chromeos/settings/cros_settings.h
+chrome/browser/chromeos/ui/request_pin_view.h
+chrome/browser/component_updater/component_installer_errors.h
+chrome/browser/download/download_file_icon_extractor.h
+chrome/browser/extensions/api/networking_cast_private/chrome_networking_cast_private_delegate.h
+chrome/browser/extensions/api/omnibox/omnibox_api_testbase.h
+chrome/browser/mac/bluetooth_utility.h
+chrome/browser/media/router/mojo/media_route_provider_util_win.h
+chrome/browser/media/webrtc/desktop_media_list_ash.h
+chrome/browser/media/webrtc/desktop_media_list_observer.h
+chrome/browser/media/webrtc/rtp_dump_type.h
+chrome/browser/media_galleries/media_file_system_context.h
+chrome/browser/notifications/displayed_notifications_dispatch_callback.h
+chrome/browser/permissions/permission_queue_controller.h
+chrome/browser/prefs/active_profile_pref_service.h
+chrome/browser/ui/android/content_settings/subresource_filter_infobar_delegate.h
+chrome/browser/ui/app_icon_loader_delegate.h
+chrome/browser/ui/app_list/app_list_syncable_service_factory.h
+chrome/browser/ui/ash/ash_util.h
+chrome/browser/ui/ash/multi_user/multi_user_util.h
+chrome/browser/ui/network_profile_bubble.h
+chrome/browser/ui/views/frame/browser_frame_header_ash.h
+chrome/browser/ui/webui/large_icon_source.h
+chrome/common/mac/app_shim_launch.h
+chrome/common/mac/app_shim_param_traits.h
+chrome/install_static/chromium_install_modes.h
+chrome/install_static/install_constants.h
+chrome/install_static/install_details.h
+chrome/install_static/install_modes.h
+chrome/install_static/install_util.h
+chrome/install_static/test/scoped_install_details.h
+chrome/installer/util/browser_distribution.h
+chrome/installer/util/google_update_constants.h
+chrome/installer/util/google_update_settings.h
+chrome/installer/util/util_constants.h
+components/browser_watcher/features.h
+components/browser_watcher/stability_paths.h
+components/cast_certificate/cast_crl_root_ca_cert_der-inc.h
+components/cdm/browser/cdm_message_filter_android.h
+components/contextual_search/browser/contextual_search_js_api_handler.h
+components/cryptauth/connection_finder.h
+components/cryptauth/connection_observer.h
+components/data_reduction_proxy/core/browser/data_use_group.h
+components/data_reduction_proxy/core/browser/data_use_group_provider.h
+components/data_use_measurement/core/url_request_classifier.h
+components/device_event_log/device_event_log_export.h
+components/dom_distiller/core/font_family_list.h
+components/dom_distiller/core/theme_list.h
+components/login/login_export.h
+components/nacl/browser/nacl_browser_delegate.h
+components/nacl/renderer/ppb_nacl_private.h
+components/omnibox/browser/autocomplete_i18n.h
+components/omnibox/browser/autocomplete_provider_client.h
+components/omnibox/browser/autocomplete_provider_listener.h
+components/password_manager/core/browser/keychain_migration_status_mac.h
+components/policy/core/browser/configuration_policy_handler_parameters.h
+components/policy/proto/policy_proto_export.h
+components/rlz/rlz_tracker_delegate.h
+components/session_manager/session_manager_types.h
+components/sessions/core/sessions_export.h
+components/sync/engine/connection_status.h
+components/sync/engine/net/network_time_update_callback.h
+components/translate/core/browser/translate_infobar_delegate.h
+components/user_manager/user.h
+components/user_manager/user_image/user_image.h
+components/user_manager/user_manager.h
+components/viz/display_compositor/display_provider.h
+components/viz/viz_export.h
+components/wifi/wifi_export.h
+components/wifi/wifi_service.h
+content/browser/background_fetch/background_fetch_constants.h
+content/browser/service_worker/service_worker_response_type.h
+content/common/mac/attributed_string_coder.h
+content/public/browser/context_factory.h
+content/public/browser/media_observer.h
+content/public/common/gpu_stream_constants.h
+content/renderer/external_popup_menu.h
+content/shell/android/shell_descriptors.h
+device/media_transfer_protocol/media_transfer_protocol_manager.h
+extensions/browser/api/clipboard/clipboard_api.h
+extensions/browser/api/networking_config/networking_config_service_factory.h
+extensions/browser/api/webcam_private/webcam.h
+extensions/browser/api/webcam_private/webcam_private_api.h
+extensions/browser/entry_info.h
+extensions/browser/extension_event_histogram_value.h
+extensions/browser/extension_function_histogram_value.h
+google_apis/gcm/base/encryptor.h
+google_apis/gcm/base/gcm_export.h
+gpu/GLES2/gl2chromium.h
+gpu/GLES2/gl2chromium_autogen.h
+gpu/GLES2/gl2extchromium.h
+gpu/command_buffer/client/context_support.h
+gpu/command_buffer/client/gles2_implementation_unittest_autogen.h
+gpu/command_buffer/client/gles2_interface_autogen.h
+gpu/command_buffer/client/gles2_interface_stub_autogen.h
+gpu/command_buffer/client/gles2_interface_stub_impl_autogen.h
+gpu/command_buffer/client/gpu_control_client.h
+gpu/command_buffer/client/ref_counted.h
+gpu/command_buffer/client/shared_memory_limits.h
+gpu/command_buffer/common/command_buffer_shared.h
+gpu/command_buffer/common/gles2_cmd_utils_autogen.h
+gpu/command_buffer/common/gles2_cmd_utils_implementation_autogen.h
+gpu/command_buffer/common/gpu_memory_allocation.h
+gpu/command_buffer/service/gl_stream_texture_image.h
+gpu/command_buffer/service/gles2_cmd_decoder_unittest_extensions_autogen.h
+gpu/command_buffer/service/memory_tracking.h
+gpu/config/gpu_lists_version.h
+gpu/gles2_conform_support/gtf/gtf_stubs.h
+gpu/gpu_export.h
+headless/lib/headless_macros.h
+headless/public/headless_tab_socket.h
+ipc/ipc_channel_proxy_unittest_messages.h
+ipc/ipc_message_null_macros.h
+ipc/param_traits_size_macros.h
+media/audio/audio_logging.h
+media/audio/sounds/test_data.h
+media/base/routing_token_callback.h
+media/base/video_renderer_sink.h
+media/cast/common/mod_util.h
+media/cast/net/rtcp/rtcp_session.h
+media/filters/ffmpeg_aac_bitstream_converter.h
+media/filters/ffmpeg_h264_to_annex_b_bitstream_converter.h
+media/filters/h264_to_annex_b_bitstream_converter.h
+media/formats/mp4/avc.h
+media/formats/mp4/bitstream_converter.h
+media/formats/mp4/fourccs.h
+media/formats/mp4/rcheck.h
+media/formats/mpeg/adts_stream_parser.h
+media/formats/mpeg/mpeg1_audio_stream_parser.h
+media/formats/mpeg/mpeg_audio_stream_parser_base.h
+media/gpu/media_gpu_export.h
+mojo/core/broker_messages.h
+mojo/core/system_impl_export.h
+mojo/public/cpp/bindings/strong_associated_binding_set.h
+mojo/public/cpp/bindings/tests/mojo_test_blink_export.h
+mojo/public/cpp/test_support/test_support.h
+net/base/winsock_init.h
+net/cert/cert_type.h
+net/cert/cert_verify_proc_android.h
+net/cert/scoped_nss_types.h
+net/dns/notify_watcher_mac.h
+net/http/http_status_code_list.h
+net/http/transport_security_state_static.h
+net/quic/core/session_notifier_interface.h
+ppapi/cpp/pass_ref.h
+ppapi/lib/gl/include/GLES2/gl2.h
+ppapi/lib/gl/include/GLES2/gl2ext.h
+ppapi/lib/gl/include/GLES2/gl2platform.h
+ppapi/lib/gl/include/KHR/khrplatform.h
+ppapi/nacl_irt/irt_manifest.h
+ppapi/nacl_irt/public/irt_ppapi.h
+ppapi/native_client/src/shared/ppapi_proxy/ppruntime.h
+ppapi/native_client/src/untrusted/pnacl_irt_shim/irt_shim_ppapi.h
+ppapi/native_client/src/untrusted/pnacl_irt_shim/pnacl_shim.h
+ppapi/native_client/src/untrusted/pnacl_irt_shim/shim_ppapi.h
+ppapi/proxy/dispatch_reply_message.h
+ppapi/proxy/plugin_proxy_delegate.h
+ppapi/proxy/plugin_resource_callback.h
+ppapi/proxy/ppapi_proxy_export.h
+ppapi/proxy/resource_message_filter.h
+ppapi/proxy/video_decoder_constants.h
+ppapi/shared_impl/api_id.h
+ppapi/shared_impl/dir_contents.h
+ppapi/shared_impl/ppapi_shared_export.h
+ppapi/shared_impl/singleton_resource_id.h
+remoting/base/chromoting_event_log_writer.h
+remoting/base/logging.h
+remoting/client/display/gl_renderer_delegate.h
+remoting/client/display/gl_texture_ids.h
+remoting/codec/webrtc_video_encoder.h
+remoting/host/linux/x11_keyboard.h
+remoting/host/worker_process_ipc_delegate.h
+remoting/protocol/audio_source.h
+remoting/protocol/audio_stream.h
+remoting/protocol/cursor_shape_stub.h
+remoting/protocol/message_channel_factory.h
+remoting/protocol/test_event_matchers.h
+remoting/protocol/video_feedback_stub.h
+remoting/protocol/video_stream.h
+sandbox/linux/system_headers/capability.h
+skia/ext/convolver_mips_dspr2.h
+skia/ext/skia_commit_hash.h
+testing/gmock_mutant.h
+third_party/blink/renderer/bindings/modules/v8/serialization/WebCryptoSubTags.h
+third_party/blink/renderer/core/animation/CSSInterpolationEnvironment.h
+third_party/blink/renderer/core/animation/SVGInterpolationEnvironment.h
+third_party/blink/renderer/core/css/resolver/StyleBuilder.h
+third_party/blink/renderer/core/css/threaded/MultiThreadedTestUtil.h
+third_party/blink/renderer/core/css/zoomAdjustedPixelValue.h
+third_party/blink/renderer/core/dom/ArrayBufferViewHelpers.h
+third_party/blink/renderer/core/editing/finder/FindOptions.h
+third_party/blink/renderer/core/paint/FindPaintOffsetAndVisualRectNeedingUpdate.h
+third_party/blink/renderer/core/style/ShapeValue.h
+third_party/blink/renderer/core/style/TransformOrigin.h
+third_party/blink/renderer/platform/EncryptedMediaRequest.h
+third_party/blink/renderer/platform/fonts/FontSelector.h
+third_party/blink/renderer/platform/fonts/Glyph.h
+third_party/blink/renderer/platform/graphics/cpu/arm/WebGLImageConversionNEON.h
+third_party/blink/renderer/platform/graphics/cpu/mips/WebGLImageConversionMSA.h
+third_party/blink/renderer/platform/graphics/paint/PaintImage.h
+third_party/blink/renderer/platform/scheduler/base/task_queue.h
+third_party/blink/renderer/platform/text/TabSize.h
+third_party/blink/renderer/platform/text/TextDirection.h
+third_party/blink/renderer/platform/transforms/TransformOperation.h
+third_party/blink/public/platform/WebSourceLocation.h
+third_party/blink/public/platform/WebTouchInfo.h
+third_party/blink/public/platform/modules/media_capabilities/WebMediaCapabilitiesInfo.h
+third_party/cacheinvalidation/src/google/cacheinvalidation/impl/build_constants.h
+third_party/expat/files/lib/ascii.h
+third_party/expat/files/lib/asciitab.h
+third_party/expat/files/lib/expat_config.h
+third_party/expat/files/lib/expat_external.h
+third_party/expat/files/lib/iasciitab.h
+third_party/expat/files/lib/internal.h
+third_party/expat/files/lib/latin1tab.h
+third_party/expat/files/lib/nametab.h
+third_party/expat/files/lib/utf8tab.h
+third_party/expat/files/lib/xmlrole.h
+third_party/expat/files/lib/xmltok.h
+third_party/expat/files/lib/xmltok_impl.h
+third_party/harfbuzz-ng/src/hb-ot-cbdt-table.hh
+third_party/harfbuzz-ng/src/hb-ot-cmap-table.hh
+third_party/harfbuzz-ng/src/hb-ot-glyf-table.hh
+third_party/harfbuzz-ng/src/hb-ot-layout-jstf-table.hh
+third_party/harfbuzz-ng/src/hb-ot-os2-table.hh
+third_party/hunspell/src/hunspell/hunvisapi.h
+third_party/khronos/EGL/egl.h
+third_party/khronos/EGL/eglext.h
+third_party/khronos/EGL/eglplatform.h
+third_party/khronos/GLES2/gl2.h
+third_party/khronos/GLES2/gl2ext.h
+third_party/khronos/GLES2/gl2platform.h
+third_party/khronos/GLES3/gl3.h
+third_party/khronos/GLES3/gl3platform.h
+third_party/khronos/KHR/khrplatform.h
+third_party/leveldatabase/chromium_logger.h
+third_party/libaddressinput/chromium/addressinput_util.h
+third_party/libphonenumber/phonenumber_api.h
+third_party/libudev/libudev0.h
+third_party/libudev/libudev1.h
+third_party/libvpx/source/config/linux/x64/vp8_rtcd.h
+third_party/libvpx/source/config/linux/x64/vp9_rtcd.h
+third_party/libvpx/source/config/linux/x64/vpx_config.h
+third_party/libvpx/source/config/linux/x64/vpx_dsp_rtcd.h
+third_party/libvpx/source/config/linux/x64/vpx_scale_rtcd.h
+third_party/libvpx/source/config/nacl/vp8_rtcd.h
+third_party/libvpx/source/config/nacl/vp9_rtcd.h
+third_party/libvpx/source/config/nacl/vpx_config.h
+third_party/libvpx/source/config/nacl/vpx_dsp_rtcd.h
+third_party/libvpx/source/config/nacl/vpx_scale_rtcd.h
+third_party/libvpx/source/config/vpx_version.h
+third_party/libxslt/src/libxslt/xsltwin32config.h
+third_party/opus/src/src/opus_private.h
+third_party/opus/src/tests/test_opus_common.h
+third_party/protobuf/src/google/protobuf/compiler/csharp/csharp_names.h
+third_party/protobuf/src/google/protobuf/compiler/javanano/javanano_params.h
+third_party/qcms/src/halffloat.h
+third_party/qcms/src/tests/qcms_test_util.h
+third_party/qcms/src/tests/timing.h
+third_party/snappy/linux/config.h
+third_party/speech-dispatcher/libspeechd.h
+third_party/sqlite/sqlite3.h
+third_party/tcmalloc/chromium/src/addressmap-inl.h
+third_party/tcmalloc/chromium/src/base/basictypes.h
+third_party/tcmalloc/chromium/src/base/dynamic_annotations.h
+third_party/tcmalloc/chromium/src/base/googleinit.h
+third_party/tcmalloc/chromium/src/base/linux_syscall_support.h
+third_party/tcmalloc/chromium/src/base/spinlock_linux-inl.h
+third_party/tcmalloc/chromium/src/base/stl_allocator.h
+third_party/tcmalloc/chromium/src/base/thread_annotations.h
+third_party/tcmalloc/chromium/src/base/thread_lister.h
+third_party/tcmalloc/chromium/src/gperftools/malloc_extension_c.h
+third_party/tcmalloc/chromium/src/gperftools/malloc_hook_c.h
+third_party/tcmalloc/chromium/src/gperftools/tcmalloc.h
+third_party/tcmalloc/chromium/src/heap-profile-stats.h
+third_party/tcmalloc/chromium/src/libc_override.h
+third_party/tcmalloc/chromium/src/malloc_hook_mmap_linux.h
+third_party/tcmalloc/chromium/src/packed-cache-inl.h
+third_party/tcmalloc/chromium/src/page_heap_allocator.h
+third_party/tcmalloc/chromium/src/pagemap.h
+third_party/tcmalloc/chromium/src/stacktrace_config.h
+third_party/tcmalloc/chromium/src/stacktrace_x86-inl.h
+third_party/tcmalloc/chromium/src/system-alloc.h
+third_party/tcmalloc/chromium/src/tcmalloc_guard.h
+third_party/wayland/include/config.h
+third_party/wayland/include/src/wayland-version.h
+third_party/woff2/src/port.h
+third_party/yasm/source/config/linux/config.h
+third_party/yasm/source/config/linux/libyasm-stdint.h
+third_party/zlib/contrib/minizip/crypt.h
+tools/gn/ordered_set.h
+tools/ipc_fuzzer/message_lib/all_message_null_macros.h
+ui/app_list/app_list_export.h
+ui/app_list/app_list_item.h
+ui/app_list/app_list_switches.h
+ui/base/clipboard/clipboard_test_template.h
+ui/events/keycodes/keyboard_codes_posix.h
+ui/gfx/overlay_transform.h
+ui/gfx/scoped_ns_graphics_context_save_gstate_mac.h
+ui/gfx/swap_result.h
+ui/gfx/sys_color_change_listener.h
+ui/gl/GL/glextchromium.h
+ui/gl/gl_bindings_api_autogen_egl.h
+ui/gl/gl_bindings_api_autogen_gl.h
+ui/gl/gl_bindings_api_autogen_glx.h
+ui/gl/gpu_preference.h
+ui/gl/gpu_switching_observer.h
+ui/gl/progress_reporter.h
+ui/ozone/ozone_base_export.h
+ui/ozone/public/ozone_switches.h
diff --git a/deps/v8/build/check_return_value.py b/deps/v8/build/check_return_value.py
new file mode 100755
index 0000000000..c659d1e967
--- /dev/null
+++ b/deps/v8/build/check_return_value.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This program wraps an arbitrary command and prints "1" if the command ran
+successfully."""
+
+import os
+import subprocess
+import sys
+
+devnull = open(os.devnull, 'wb')
+if not subprocess.call(sys.argv[1:], stdout=devnull, stderr=devnull):
+ print 1
+else:
+ print 0
diff --git a/deps/v8/build/chromeos/PRESUBMIT.py b/deps/v8/build/chromeos/PRESUBMIT.py
new file mode 100644
index 0000000000..5700f542bd
--- /dev/null
+++ b/deps/v8/build/chromeos/PRESUBMIT.py
@@ -0,0 +1,24 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Presubmit script for build/chromeos/.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
+details on the presubmit API built into depot_tools.
+"""
+
+
+def CommonChecks(input_api, output_api):
+ return input_api.canned_checks.RunPylint(
+ input_api,
+ output_api,
+ pylintrc='pylintrc')
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ return CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return CommonChecks(input_api, output_api)
diff --git a/deps/v8/build/chromeos/create_test_runner_script.py b/deps/v8/build/chromeos/create_test_runner_script.py
new file mode 100755
index 0000000000..0cbe38bab7
--- /dev/null
+++ b/deps/v8/build/chromeos/create_test_runner_script.py
@@ -0,0 +1,118 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a script that runs a CrOS VM test by delegating to
+build/chromeos/test_runner.py.
+"""
+
+import argparse
+import os
+import sys
+
+
+SCRIPT_TEMPLATE = """\
+#!/usr/bin/env python
+#
+# This file was generated by build/chromeos/create_test_runner_script.py
+
+import os
+import sys
+
+def main():
+ script_directory = os.path.dirname(__file__)
+ def ResolvePath(path):
+ return os.path.abspath(os.path.join(script_directory, path))
+
+ vm_test_script = os.path.abspath(
+ os.path.join(script_directory, '{vm_test_script}'))
+
+ vm_args = {vm_test_args}
+ path_args = {vm_test_path_args}
+ for arg, path in path_args:
+ vm_args.extend([arg, ResolvePath(path)])
+
+ os.execv(vm_test_script,
+ [vm_test_script] + vm_args + sys.argv[1:])
+
+if __name__ == '__main__':
+ sys.exit(main())
+"""
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--script-output-path')
+ parser.add_argument('--output-directory')
+ parser.add_argument('--test-exe')
+ parser.add_argument('--runtime-deps-path')
+ parser.add_argument('--cros-cache')
+ parser.add_argument('--board')
+ parser.add_argument('--use-vm', action='store_true')
+ parser.add_argument('--deploy-chrome', action='store_true')
+ parser.add_argument('--suite-name')
+ parser.add_argument('--tast-attr-expr')
+ parser.add_argument('--tast-tests', action='append')
+ args = parser.parse_args(args)
+
+ def RelativizePathToScript(path):
+ return os.path.relpath(path, os.path.dirname(args.script_output_path))
+
+ run_test_path = RelativizePathToScript(
+ os.path.join(os.path.dirname(__file__), 'test_runner.py'))
+
+ vm_test_args = [
+ '--board', args.board,
+ '-v',
+ ]
+ if args.use_vm:
+ vm_test_args += ['--use-vm']
+
+ if args.test_exe:
+ vm_test_args.extend([
+ 'vm-test',
+ '--test-exe',
+ args.test_exe,
+ ])
+ elif args.tast_attr_expr or args.tast_tests:
+ vm_test_args.extend([
+ 'tast',
+ '--suite-name',
+ args.suite_name,
+ ])
+ if args.tast_attr_expr:
+ vm_test_args.extend([
+ '--attr-expr',
+ args.tast_attr_expr,
+ ])
+ else:
+ for t in args.tast_tests:
+ vm_test_args.extend(['-t', t])
+ else:
+ vm_test_args.append('host-cmd')
+ if args.deploy_chrome:
+ vm_test_args.append('--deploy-chrome')
+
+ vm_test_path_args = [
+ ('--cros-cache', RelativizePathToScript(args.cros_cache)),
+ ]
+ if args.runtime_deps_path:
+ vm_test_path_args.append(
+ ('--runtime-deps-path', RelativizePathToScript(args.runtime_deps_path)))
+ if args.output_directory:
+ vm_test_path_args.append(
+ ('--path-to-outdir', RelativizePathToScript(args.output_directory)))
+
+ with open(args.script_output_path, 'w') as script:
+ script.write(SCRIPT_TEMPLATE.format(
+ vm_test_script=run_test_path,
+ vm_test_args=str(vm_test_args),
+ vm_test_path_args=str(vm_test_path_args)))
+
+ os.chmod(args.script_output_path, 0750)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/chromeos/pylintrc b/deps/v8/build/chromeos/pylintrc
new file mode 100644
index 0000000000..2a721bf270
--- /dev/null
+++ b/deps/v8/build/chromeos/pylintrc
@@ -0,0 +1,15 @@
+[FORMAT]
+
+max-line-length=80
+
+[MESSAGES CONTROL]
+
+disable=abstract-class-not-used,bad-continuation,bad-indentation,duplicate-code,fixme,invalid-name,locally-disabled,locally-enabled,missing-docstring,star-args,too-few-public-methods,too-many-arguments,too-many-branches,too-many-instance-attributes,too-many-lines,too-many-locals,too-many-public-methods,too-many-statements,wrong-import-position
+
+[REPORTS]
+
+reports=no
+
+[VARIABLES]
+
+dummy-variables-rgx=^_.*$|dummy
diff --git a/deps/v8/build/chromeos/test_runner.py b/deps/v8/build/chromeos/test_runner.py
new file mode 100755
index 0000000000..be3d7b6bac
--- /dev/null
+++ b/deps/v8/build/chromeos/test_runner.py
@@ -0,0 +1,765 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import json
+import logging
+import os
+import pipes
+import re
+import signal
+import socket
+import sys
+import tempfile
+
+import psutil # pylint: disable=import-error
+
+CHROMIUM_SRC_PATH = os.path.abspath(os.path.join(
+ os.path.dirname(__file__), '..', '..'))
+
+# Use the android test-runner's gtest results support library for generating
+# output json ourselves.
+sys.path.insert(0, os.path.join(CHROMIUM_SRC_PATH, 'build', 'android'))
+from pylib.base import base_test_result # pylint: disable=import-error
+from pylib.results import json_results # pylint: disable=import-error
+
+# Use luci-py's subprocess42.py
+sys.path.insert(
+ 0, os.path.join(CHROMIUM_SRC_PATH, 'tools', 'swarming_client', 'utils'))
+import subprocess42 # pylint: disable=import-error
+
+DEFAULT_CROS_CACHE = os.path.abspath(os.path.join(
+ CHROMIUM_SRC_PATH, 'build', 'cros_cache'))
+CHROMITE_PATH = os.path.abspath(os.path.join(
+ CHROMIUM_SRC_PATH, 'third_party', 'chromite'))
+CROS_RUN_TEST_PATH = os.path.abspath(os.path.join(
+ CHROMITE_PATH, 'bin', 'cros_run_test'))
+
+# GN target that corresponds to the cros browser sanity test.
+SANITY_TEST_TARGET = 'cros_vm_sanity_test'
+
+# This is a special hostname that resolves to a different DUT in the lab
+# depending on which lab machine you're on.
+LAB_DUT_HOSTNAME = 'variable_chromeos_device_hostname'
+
+
+class TestFormatError(Exception):
+ pass
+
+
+class RemoteTest(object):
+
+ # This is a basic shell script that can be appended to in order to invoke the
+ # test on the device.
+ BASIC_SHELL_SCRIPT = [
+ '#!/bin/sh',
+
+ # /home is mounted with "noexec" in the device, but some of our tools
+ # and tests use the home dir as a workspace (eg: vpython downloads
+ # python binaries to ~/.vpython-root). /tmp doesn't have this
+ # restriction, so change the location of the home dir for the
+ # duration of the test.
+ 'export HOME=/tmp',
+ ]
+
+ def __init__(self, args, unknown_args):
+ self._additional_args = unknown_args
+ self._path_to_outdir = args.path_to_outdir
+ self._test_launcher_summary_output = args.test_launcher_summary_output
+ self._logs_dir = args.logs_dir
+ self._use_vm = args.use_vm
+
+ self._retries = 0
+ self._timeout = None
+
+ # The location on disk of a shell script that can be optionally used to
+ # invoke the test on the device. If it's not set, we assume self._test_cmd
+ # contains the test invocation.
+ self._on_device_script = None
+
+ self._test_cmd = [
+ CROS_RUN_TEST_PATH,
+ '--board', args.board,
+ '--cache-dir', args.cros_cache,
+ ]
+ if args.use_vm:
+ self._test_cmd += [
+ '--start',
+ # Don't persist any filesystem changes after the VM shutsdown.
+ '--copy-on-write',
+ '--device', 'localhost'
+ ]
+ else:
+ self._test_cmd += [
+ '--device', args.device if args.device else LAB_DUT_HOSTNAME
+ ]
+ if args.logs_dir:
+ self._test_cmd += [
+ '--results-src', '/var/log/',
+ '--results-dest-dir', args.logs_dir,
+ ]
+
+ # This environment variable is set for tests that have been instrumented
+ # for code coverage. Its incoming value is expected to be a location
+ # inside a subdirectory of result_dir above. This is converted to an
+ # absolute path that the vm is able to write to, and passed in the
+ # --results-src flag to cros_run_vm_test for copying out of the vm before
+ # its termination.
+ self._llvm_profile_var = None
+ if os.environ.get('LLVM_PROFILE_FILE'):
+ _, llvm_profile_file = os.path.split(os.environ['LLVM_PROFILE_FILE'])
+ self._llvm_profile_var = '/tmp/profraw/%s' % llvm_profile_file
+
+ # This should make the vm test runner exfil the profiling data.
+ self._test_cmd += [
+ '--results-src', '/tmp/profraw'
+ ]
+
+ self._test_env = setup_env()
+
+ @property
+ def suite_name(self):
+ raise NotImplementedError('Child classes need to define suite name.')
+
+ @property
+ def test_cmd(self):
+ return self._test_cmd
+
+ def write_test_script_to_disk(self, script_contents):
+ # Since we're using an on_device_script to invoke the test, we'll need to
+ # set cwd.
+ self._test_cmd += [
+ '--remote-cmd',
+ '--cwd', os.path.relpath(self._path_to_outdir, CHROMIUM_SRC_PATH),
+ ]
+ logging.info('Running the following command on the device:')
+ logging.info('\n' + '\n'.join(script_contents))
+ fd, tmp_path = tempfile.mkstemp(suffix='.sh', dir=self._path_to_outdir)
+ os.fchmod(fd, 0755)
+ with os.fdopen(fd, 'wb') as f:
+ f.write('\n'.join(script_contents) + '\n')
+ return tmp_path
+
+ def run_test(self):
+ # Traps SIGTERM and kills all child processes of cros_run_test when it's
+ # caught. This will allow us to capture logs from the device if a test hangs
+ # and gets timeout-killed by swarming. See also:
+ # https://chromium.googlesource.com/infra/luci/luci-py/+/master/appengine/swarming/doc/Bot.md#graceful-termination_aka-the-sigterm-and-sigkill-dance
+ test_proc = None
+ def _kill_child_procs(trapped_signal, _):
+ logging.warning(
+ 'Received signal %d. Killing child processes of test.',
+ trapped_signal)
+ if not test_proc or not test_proc.pid:
+ # This shouldn't happen?
+ logging.error('Test process not running.')
+ return
+ for child in psutil.Process(test_proc.pid).children():
+ logging.warning('Killing process %s', child)
+ child.kill()
+
+ signal.signal(signal.SIGTERM, _kill_child_procs)
+
+ for i in xrange(self._retries+1):
+ logging.info('########################################')
+ logging.info('Test attempt #%d', i)
+ logging.info('########################################')
+ test_proc = subprocess42.Popen(
+ self._test_cmd, stdout=sys.stdout, stderr=sys.stderr,
+ env=self._test_env)
+ try:
+ test_proc.wait(timeout=self._timeout)
+ except subprocess42.TimeoutExpired:
+ logging.error('Test timed out. Sending SIGTERM.')
+ # SIGTERM the proc and wait 10s for it to close.
+ test_proc.terminate()
+ try:
+ test_proc.wait(timeout=10)
+ except subprocess42.TimeoutExpired:
+ # If it hasn't closed in 10s, SIGKILL it.
+ logging.error('Test did not exit in time. Sending SIGKILL.')
+ test_proc.kill()
+ test_proc.wait()
+ logging.info('Test exitted with %d.', test_proc.returncode)
+ if test_proc.returncode == 0:
+ break
+
+ self.post_run(test_proc.returncode)
+ return test_proc.returncode
+
+ def post_run(self, return_code):
+ if self._on_device_script:
+ os.remove(self._on_device_script)
+ # Create a simple json results file for a test run. The results will contain
+ # only one test (suite_name), and will either be a PASS or FAIL depending on
+ # return_code.
+ if self._test_launcher_summary_output:
+ result = (base_test_result.ResultType.FAIL if return_code else
+ base_test_result.ResultType.PASS)
+ suite_result = base_test_result.BaseTestResult(self.suite_name, result)
+ run_results = base_test_result.TestRunResults()
+ run_results.AddResult(suite_result)
+ with open(self._test_launcher_summary_output, 'w') as f:
+ json.dump(json_results.GenerateResultsDict([run_results]), f)
+
+
+class TastTest(RemoteTest):
+
+ def __init__(self, args, unknown_args):
+ super(TastTest, self).__init__(args, unknown_args)
+
+ self._suite_name = args.suite_name
+ self._tests = args.tests
+ self._conditional = args.conditional
+ self._use_host_tast = args.use_host_tast_bin
+
+ @property
+ def suite_name(self):
+ return self._suite_name
+
+ def build_test_command(self):
+ if '--gtest_filter=%s' % self.suite_name in self._additional_args:
+ logging.info(
+ 'GTest filtering not supported for tast tests. The '
+ '--gtest_filter arg will be ignored.')
+ self._additional_args.remove('--gtest_filter=%s' % self.suite_name)
+ if any(arg.startswith('--gtest_repeat') for arg in self._additional_args):
+ logging.info(
+ '--gtest_repeat not supported for tast tests. The arg will be '
+ 'ignored.')
+ self._additional_args = [
+ arg for arg in self._additional_args if not arg.startswith(
+ '--gtest_repeat')]
+
+ if self._additional_args:
+ logging.error(
+ 'Tast tests should not have additional args. These will be '
+ 'ignored: %s', self._additional_args)
+
+ # VMs don't have the disk space for an unstripped version of Chrome, so only
+ # strip when running on VMs.
+ if not self._use_vm:
+ self._test_cmd.append('--nostrip')
+
+ self._test_cmd += [
+ '--deploy',
+ '--mount',
+ '--build-dir', os.path.relpath(self._path_to_outdir, CHROMIUM_SRC_PATH),
+ ]
+
+ # Coverage tests require some special pre-test setup, so use an
+ # on_device_script in that case. For all other tests, use cros_run_test's
+ # built-in '--tast' option. This gives us much better results reporting.
+ # TODO(bpastene): s/True/self._llvm_profile_var/ once we parse Tast results.
+ if not self._use_host_tast:
+ # Build the shell script that will be used on the device to invoke the
+ # test.
+ device_test_script_contents = self.BASIC_SHELL_SCRIPT[:]
+ if self._llvm_profile_var:
+ device_test_script_contents += [
+ 'echo "LLVM_PROFILE_FILE=%s" >> /etc/chrome_dev.conf' % (
+ self._llvm_profile_var)
+ ]
+
+ local_test_runner_cmd = ['local_test_runner', '-waituntilready']
+ if self._use_vm:
+ # If we're running tests in VMs, tell the test runner to skip tests that
+ # aren't compatible.
+ local_test_runner_cmd.append('-extrauseflags=tast_vm')
+ if self._conditional:
+ local_test_runner_cmd.append(pipes.quote(self._conditional))
+ else:
+ local_test_runner_cmd.extend(self._tests)
+ device_test_script_contents.append(' '.join(local_test_runner_cmd))
+
+ self._on_device_script = self.write_test_script_to_disk(
+ device_test_script_contents)
+
+ self._test_cmd += [
+ '--files', os.path.relpath(self._on_device_script),
+ '--',
+ './' + os.path.relpath(self._on_device_script, self._path_to_outdir)
+ ]
+ else:
+ self._test_cmd += [
+ # Since we're not in a chroot, the tast bin won't automatically handle
+ # ssh auth. So point it to the ssh keys in chromite.
+ '--private-key',
+ os.path.join(CHROMITE_PATH, 'ssh_keys', 'testing_rsa'),
+ ]
+ # Capture tast's results in the logs dir as well.
+ if self._logs_dir:
+ self._test_cmd += [
+ '--results-dir', self._logs_dir,
+ ]
+ if self._conditional:
+ # Don't use pipes.quote() here. Something funky happens with the arg
+ # as it gets passed down from cros_run_test to tast. (Tast picks up the
+ # escaping single quotes and complains that the conditional "must be
+ # within parentheses".)
+ self._test_cmd.append('--tast=%s' % self._conditional)
+ else:
+ self._test_cmd.append('--tast')
+ self._test_cmd.extend(self._tests)
+
+
+class GTestTest(RemoteTest):
+
+ _FILE_BLACKLIST = [
+ re.compile(r'.*build/chromeos.*'),
+ re.compile(r'.*build/cros_cache.*'),
+ re.compile(r'.*third_party/chromite.*'),
+ ]
+
+ def __init__(self, args, unknown_args):
+ super(GTestTest, self).__init__(args, unknown_args)
+
+ self._test_exe = args.test_exe
+ self._runtime_deps_path = args.runtime_deps_path
+ self._vpython_dir = args.vpython_dir
+
+ self._test_launcher_shard_index = args.test_launcher_shard_index
+ self._test_launcher_total_shards = args.test_launcher_total_shards
+
+ self._on_device_script = None
+
+ @property
+ def suite_name(self):
+ return self._test_exe
+
+ def build_test_command(self):
+ # To keep things easy for us, ensure both types of output locations are
+ # the same.
+ if self._test_launcher_summary_output and self._logs_dir:
+ json_out_dir = os.path.dirname(self._test_launcher_summary_output) or '.'
+ if os.path.abspath(json_out_dir) != os.path.abspath(self._logs_dir):
+ raise TestFormatError(
+ '--test-launcher-summary-output and --logs-dir must point to '
+ 'the same directory.')
+
+ if self._test_launcher_summary_output:
+ result_dir, result_file = os.path.split(
+ self._test_launcher_summary_output)
+ # If args.test_launcher_summary_output is a file in cwd, result_dir will
+ # be an empty string, so replace it with '.' when this is the case so
+ # cros_run_test can correctly handle it.
+ if not result_dir:
+ result_dir = '.'
+ device_result_file = '/tmp/%s' % result_file
+ self._test_cmd += [
+ '--results-src', device_result_file,
+ '--results-dest-dir', result_dir,
+ ]
+
+ # Build the shell script that will be used on the device to invoke the test.
+ device_test_script_contents = self.BASIC_SHELL_SCRIPT[:]
+ if self._llvm_profile_var:
+ device_test_script_contents += [
+ 'export LLVM_PROFILE_FILE=%s'% self._llvm_profile_var,
+ ]
+
+ if self._vpython_dir:
+ vpython_spec_path = os.path.relpath(
+ os.path.join(CHROMIUM_SRC_PATH, '.vpython'),
+ self._path_to_outdir)
+ # Initialize the vpython cache. This can take 10-20s, and some tests
+ # can't afford to wait that long on the first invocation.
+ device_test_script_contents.extend([
+ 'export PATH=$PATH:$PWD/%s' % (self._vpython_dir),
+ 'vpython -vpython-spec %s -vpython-tool install' % (
+ vpython_spec_path),
+ ])
+
+ # Load vivid before running capture_unittests
+ # TODO(crbug.com/904730): Once we start loading vivid in init service,
+ # we can remove this code.
+ if self._test_exe == 'capture_unittests':
+ device_test_script_contents.append(
+ 'echo "test0000" | sudo -S modprobe vivid n_devs=1 node_types=0x1')
+
+ test_invocation = (
+ './%s --test-launcher-shard-index=%d '
+ '--test-launcher-total-shards=%d' % (
+ self._test_exe, self._test_launcher_shard_index,
+ self._test_launcher_total_shards)
+ )
+ if self._test_launcher_summary_output:
+ test_invocation += ' --test-launcher-summary-output=%s' % (
+ device_result_file)
+ if self._additional_args:
+ test_invocation += ' %s' % ' '.join(self._additional_args)
+
+ if self._test_exe == 'interactive_ui_tests':
+ # interactive_ui_tests needs some special setup. See crbug.com/946685#c4
+ # TODO(bpastene): Put all this behind a flag if more suites need it.
+ device_test_script_contents += [
+ 'stop ui',
+ ]
+ # The UI service on the device owns the chronos user session, so shutting
+ # it down as chronos kills the entire execution of the test. So we'll have
+ # to run as root up until the test invocation.
+ test_invocation = 'su chronos -c -- "%s"' % test_invocation
+ # And we'll need to chown everything since cros_run_test's "--as-chronos"
+ # option normally does that for us.
+ device_test_script_contents.append('chown -R chronos: ../..')
+ else:
+ self._test_cmd += [
+ # Some tests fail as root, so run as the less privileged user
+ # 'chronos'.
+ '--as-chronos',
+ ]
+
+ device_test_script_contents.append(test_invocation)
+
+ self._on_device_script = self.write_test_script_to_disk(
+ device_test_script_contents)
+
+ runtime_files = [os.path.relpath(self._on_device_script)]
+ runtime_files += self._read_runtime_files()
+ if self._vpython_dir:
+ # --vpython-dir is relative to the out dir, but --files expects paths
+ # relative to src dir, so fix the path up a bit.
+ runtime_files.append(
+ os.path.relpath(
+ os.path.abspath(os.path.join(self._path_to_outdir,
+ self._vpython_dir)),
+ CHROMIUM_SRC_PATH))
+ # TODO(bpastene): Add the vpython spec to the test's runtime deps instead
+ # of handling it here.
+ runtime_files.append('.vpython')
+
+ for f in runtime_files:
+ self._test_cmd.extend(['--files', f])
+
+ self._test_cmd += [
+ '--',
+ './' + os.path.relpath(self._on_device_script, self._path_to_outdir)
+ ]
+
+ def _read_runtime_files(self):
+ if not self._runtime_deps_path:
+ return []
+
+ abs_runtime_deps_path = os.path.abspath(
+ os.path.join(self._path_to_outdir, self._runtime_deps_path))
+ with open(abs_runtime_deps_path) as runtime_deps_file:
+ files = [l.strip() for l in runtime_deps_file if l]
+ rel_file_paths = []
+ for f in files:
+ rel_file_path = os.path.relpath(
+ os.path.abspath(os.path.join(self._path_to_outdir, f)))
+ if not any(regex.match(rel_file_path) for regex in self._FILE_BLACKLIST):
+ rel_file_paths.append(rel_file_path)
+ return rel_file_paths
+
+ def post_run(self, _):
+ if self._on_device_script:
+ os.remove(self._on_device_script)
+
+
+class BrowserSanityTest(RemoteTest):
+
+ def __init__(self, args, unknown_args):
+ super(BrowserSanityTest, self).__init__(args, unknown_args)
+
+ # 10 min should be enough time for the sanity test to pass.
+ self._retries = 1
+ self._timeout = 600
+
+ @property
+ def suite_name(self):
+ return SANITY_TEST_TARGET
+
+ def build_test_command(self):
+ if '--gtest_filter=%s' % SANITY_TEST_TARGET in self._additional_args:
+ logging.info(
+ 'GTest filtering not supported for the sanity test. The '
+ '--gtest_filter arg will be ignored.')
+ self._additional_args.remove('--gtest_filter=%s' % SANITY_TEST_TARGET)
+ if any(arg.startswith('--gtest_repeat') for arg in self._additional_args):
+ logging.info(
+ '--gtest_repeat not supported for sanity test. The arg will be '
+ 'ignored.')
+ self._additional_args = [
+ arg for arg in self._additional_args if not arg.startswith(
+ '--gtest_repeat')]
+
+ if self._additional_args:
+ raise TestFormatError(
+ 'Sanity test should not have additional args: %s' % (
+ self._additional_args))
+
+ # VMs don't have the disk space for an unstripped version of Chrome
+ # instrumented for code coverage, so only strip in that case.
+ if not self._use_vm or not os.environ.get('LLVM_PROFILE_FILE'):
+ self._test_cmd.append('--nostrip')
+
+ device_test_script_contents = self.BASIC_SHELL_SCRIPT[:]
+ if self._llvm_profile_var:
+ device_test_script_contents += [
+ 'echo "LLVM_PROFILE_FILE=%s" >> /etc/chrome_dev.conf' % (
+ self._llvm_profile_var)
+ ]
+
+ # vm_sanity.py is the sanity test, which is baked into the device image.
+ device_test_script_contents.append('/usr/local/autotest/bin/vm_sanity.py')
+
+ self._on_device_script = self.write_test_script_to_disk(
+ device_test_script_contents)
+
+ self._test_cmd += [
+ '--files', os.path.relpath(self._on_device_script),
+ # The sanity test smoke-checks the system browser, so deploy our
+ # locally-built chrome to the device before testing.
+ '--deploy',
+ '--mount',
+ '--build-dir', os.path.relpath(self._path_to_outdir, CHROMIUM_SRC_PATH),
+ '--',
+ './' + os.path.relpath(self._on_device_script, self._path_to_outdir)
+ ]
+
+
+def device_test(args, unknown_args):
+ # cros_run_test has trouble with relative paths that go up directories,
+ # so cd to src/, which should be the root of all data deps.
+ os.chdir(CHROMIUM_SRC_PATH)
+
+ # pylint: disable=redefined-variable-type
+ # TODO: Remove the above when depot_tool's pylint is updated to include the
+ # fix to https://github.com/PyCQA/pylint/issues/710.
+ if args.test_type == 'tast':
+ test = TastTest(args, unknown_args)
+ elif args.test_exe == SANITY_TEST_TARGET:
+ test = BrowserSanityTest(args, unknown_args)
+ else:
+ test = GTestTest(args, unknown_args)
+
+ test.build_test_command()
+ logging.info('Running the following command on the device:')
+ logging.info(' '.join(test.test_cmd))
+
+ return test.run_test()
+
+
+def host_cmd(args, unknown_args):
+ if not args.cmd:
+ raise TestFormatError('Must specify command to run on the host.')
+ elif unknown_args:
+ raise TestFormatError(
+ 'Args "%s" unsupported. Is your host command correctly formatted?' % (
+ ' '.join(unknown_args)))
+ elif args.deploy_chrome and not args.path_to_outdir:
+ raise TestFormatError(
+ '--path-to-outdir must be specified if --deploy-chrome is passed.')
+
+ cros_run_test_cmd = [
+ CROS_RUN_TEST_PATH,
+ '--board', args.board,
+ '--cache-dir', args.cros_cache,
+ ]
+ if args.use_vm:
+ cros_run_test_cmd += [
+ '--start',
+ # Don't persist any filesystem changes after the VM shutsdown.
+ '--copy-on-write',
+ '--device', 'localhost',
+ ]
+ else:
+ cros_run_test_cmd += [
+ '--device', args.device if args.device else LAB_DUT_HOSTNAME
+ ]
+ if args.verbose:
+ cros_run_test_cmd.append('--debug')
+
+ test_env = setup_env()
+ if args.deploy_chrome:
+ cros_run_test_cmd += [
+ '--deploy',
+ '--mount',
+ '--build-dir', os.path.abspath(args.path_to_outdir),
+ ]
+
+ cros_run_test_cmd += [
+ '--host-cmd',
+ '--',
+ ] + args.cmd
+
+ logging.info('Running the following command:')
+ logging.info(' '.join(cros_run_test_cmd))
+
+ return subprocess42.call(
+ cros_run_test_cmd, stdout=sys.stdout, stderr=sys.stderr, env=test_env)
+
+
+def setup_env():
+ """Returns a copy of the current env with some needed vars added."""
+ env = os.environ.copy()
+ # Some chromite scripts expect chromite/bin to be on PATH.
+ env['PATH'] = env['PATH'] + ':' + os.path.join(CHROMITE_PATH, 'bin')
+ # deploy_chrome needs a set of GN args used to build chrome to determine if
+ # certain libraries need to be pushed to the device. It looks for the args via
+ # an env var. To trigger the default deploying behavior, give it a dummy set
+ # of args.
+ # TODO(crbug.com/823996): Make the GN-dependent deps controllable via cmd
+ # line args.
+ if not env.get('GN_ARGS'):
+ env['GN_ARGS'] = 'enable_nacl = true'
+ if not env.get('USE'):
+ env['USE'] = 'highdpi'
+ return env
+
+
+def add_common_args(parser):
+ parser.add_argument(
+ '--cros-cache', type=str, default=DEFAULT_CROS_CACHE,
+ help='Path to cros cache.')
+ parser.add_argument(
+ '--path-to-outdir', type=str, required=True,
+ help='Path to output directory, all of whose contents will be '
+ 'deployed to the device.')
+ parser.add_argument(
+ '--runtime-deps-path', type=str,
+ help='Runtime data dependency file from GN.')
+ parser.add_argument(
+ '--vpython-dir', type=str,
+ help='Location on host of a directory containing a vpython binary to '
+ 'deploy to the device before the test starts. The location of this '
+ 'dir will be added onto PATH in the device. WARNING: The arch of '
+ 'the device might not match the arch of the host, so avoid using '
+ '"${platform}" when downloading vpython via CIPD.')
+ # TODO(bpastene): Switch all uses of "--vm-logs-dir" to "--logs-dir".
+ parser.add_argument(
+ '--vm-logs-dir', '--logs-dir', type=str, dest='logs_dir',
+ help='Will copy everything under /var/log/ from the device after the '
+ 'test into the specified dir.')
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--verbose', '-v', action='store_true')
+ # Required args.
+ parser.add_argument(
+ '--board', type=str, required=True, help='Type of CrOS device.')
+ vm_or_device_group = parser.add_mutually_exclusive_group()
+ vm_or_device_group.add_argument(
+ '--use-vm', action='store_true',
+ help='Will run the test in the VM instead of a device.')
+ vm_or_device_group.add_argument(
+ '--device', type=str,
+ help='Hostname (or IP) of device to run the test on. This arg is not '
+ 'required if --use-vm is set.')
+ subparsers = parser.add_subparsers(dest='test_type')
+ # Host-side test args.
+ host_cmd_parser = subparsers.add_parser(
+ 'host-cmd',
+ help='Runs a host-side test. Pass the host-side command to run after '
+ '"--". If --use-vm is passed, hostname and port for the device '
+ 'will be 127.0.0.1:9222.')
+ host_cmd_parser.set_defaults(func=host_cmd)
+ host_cmd_parser.add_argument(
+ '--cros-cache', type=str, default=DEFAULT_CROS_CACHE,
+ help='Path to cros cache.')
+ host_cmd_parser.add_argument(
+ '--path-to-outdir', type=os.path.realpath,
+ help='Path to output directory, all of whose contents will be deployed '
+ 'to the device.')
+ host_cmd_parser.add_argument(
+ '--deploy-chrome', action='store_true',
+ help='Will deploy a locally built Chrome binary to the device before '
+ 'running the host-cmd.')
+ host_cmd_parser.add_argument('cmd', nargs=argparse.REMAINDER)
+ # GTest args.
+ # TODO(bpastene): Rename 'vm-test' arg to 'gtest'.
+ gtest_parser = subparsers.add_parser(
+ 'vm-test',
+ help='Runs a device-side gtest.')
+ gtest_parser.set_defaults(func=device_test)
+ gtest_parser.add_argument(
+ '--test-exe', type=str, required=True,
+ help='Path to test executable to run inside the device. If the value is '
+ '%s, the sanity test that ships with the device image runs instead. '
+ 'This test smokes-check the system browser (eg: loads a simple '
+ 'webpage, executes some javascript), so a fully-built Chrome binary '
+ 'that can get deployed to the device is expected to be available in '
+ 'the out-dir.' % SANITY_TEST_TARGET)
+
+ # GTest args. Some are passed down to the test binary in the device. Others
+ # are parsed here since they might need tweaking or special handling.
+ gtest_parser.add_argument(
+ '--test-launcher-summary-output', type=str,
+ help='When set, will pass the same option down to the test and retrieve '
+ 'its result file at the specified location.')
+ # Shard args are parsed here since we might also specify them via env vars.
+ gtest_parser.add_argument(
+ '--test-launcher-shard-index',
+ type=int, default=os.environ.get('GTEST_SHARD_INDEX', 0),
+ help='Index of the external shard to run.')
+ gtest_parser.add_argument(
+ '--test-launcher-total-shards',
+ type=int, default=os.environ.get('GTEST_TOTAL_SHARDS', 1),
+ help='Total number of external shards.')
+
+ # Tast test args.
+ # pylint: disable=line-too-long
+ tast_test_parser = subparsers.add_parser(
+ 'tast',
+ help='Runs a device-side set of Tast tests. For more details, see: '
+ 'https://chromium.googlesource.com/chromiumos/platform/tast/+/master/docs/running_tests.md')
+ tast_test_parser.set_defaults(func=device_test)
+ tast_test_parser.add_argument(
+ '--suite-name', type=str, required=True,
+ help='Name to apply to the set of Tast tests to run. This has no effect '
+ 'on what is executed, but is used mainly for test results reporting '
+ 'and tracking (eg: flakiness dashboard).')
+ tast_test_parser.add_argument(
+ '--test-launcher-summary-output', type=str,
+ help='Generates a simple GTest-style JSON result file for the test run.')
+ # TODO(bpastene): Change all uses of "--conditional" to use "--attr-expr".
+ tast_test_parser.add_argument(
+ '--conditional', '--attr-expr', type=str, dest='conditional',
+ help='A boolean expression whose matching tests will run '
+ '(eg: ("dep:chrome" || "dep:chrome_login")).')
+ tast_test_parser.add_argument(
+ '--test', '-t', action='append', dest='tests',
+ help='A Tast test to run in the device (eg: "ui.ChromeLogin").')
+ tast_test_parser.add_argument(
+ '--use-host-tast-bin', action='store_true',
+ help='Use the host-side Tast bin to run the tests instead of the '
+ 'DUT-side local_test_runner. TODO(bpastene): Make this default.')
+
+ add_common_args(gtest_parser)
+ add_common_args(tast_test_parser)
+ args, unknown_args = parser.parse_known_args()
+
+ logging.basicConfig(level=logging.DEBUG if args.verbose else logging.WARN)
+
+ if not args.use_vm and not args.device:
+ # If we're not running on a VM, but haven't specified a hostname, assume
+ # we're on a lab bot and are trying to run a test on a lab DUT. See if the
+ # magic lab DUT hostname resolves to anything. (It will in the lab and will
+ # not on dev machines.)
+ try:
+ socket.getaddrinfo(LAB_DUT_HOSTNAME, None)
+ except socket.gaierror:
+ logging.error(
+ 'The default DUT hostname of %s is unreachable.', LAB_DUT_HOSTNAME)
+ return 1
+ if args.use_vm:
+ if not os.path.exists('/dev/kvm'):
+ logging.error('/dev/kvm is missing. Is KVM installed on this machine?')
+ return 1
+ elif not os.access('/dev/kvm', os.W_OK):
+ logging.error(
+ '/dev/kvm is not writable as current user. Perhaps you should be '
+ 'root?')
+ return 1
+
+ args.cros_cache = os.path.abspath(args.cros_cache)
+ return args.func(args, unknown_args)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/ciopfs.sha1 b/deps/v8/build/ciopfs.sha1
new file mode 100644
index 0000000000..c1855a347e
--- /dev/null
+++ b/deps/v8/build/ciopfs.sha1
@@ -0,0 +1 @@
+5454b3c4f1c9992047e7ae9d6d14d5b49b1b12f3 \ No newline at end of file
diff --git a/deps/v8/build/cipd/clobber_cipd_root.py b/deps/v8/build/cipd/clobber_cipd_root.py
new file mode 100755
index 0000000000..5d36c72239
--- /dev/null
+++ b/deps/v8/build/cipd/clobber_cipd_root.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Clobbers a CIPD root."""
+
+import argparse
+import os
+import shutil
+import sys
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Clobbers the CIPD root in the given directory.')
+
+ parser.add_argument(
+ '--root',
+ required=True,
+ help='Root directory for dependency.')
+ args = parser.parse_args()
+
+ cipd_root_dir = os.path.join(args.root, '.cipd')
+ if os.path.exists(cipd_root_dir):
+ shutil.rmtree(cipd_root_dir)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/clobber.py b/deps/v8/build/clobber.py
new file mode 100755
index 0000000000..18791c28f1
--- /dev/null
+++ b/deps/v8/build/clobber.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This script provides methods for clobbering build directories."""
+
+import argparse
+import os
+import shutil
+import subprocess
+import sys
+
+
+def extract_gn_build_commands(build_ninja_file):
+ """Extracts from a build.ninja the commands to run GN.
+
+ The commands to run GN are the gn rule and build.ninja build step at the
+ top of the build.ninja file. We want to keep these when deleting GN builds
+ since we want to preserve the command-line flags to GN.
+
+ On error, returns the empty string."""
+ result = ""
+ with open(build_ninja_file, 'r') as f:
+ # Read until the third blank line. The first thing GN writes to the file
+ # is "ninja_required_version = x.y.z", then the "rule gn" and the third
+ # is the section for "build build.ninja", separated by blank lines.
+ num_blank_lines = 0
+ while num_blank_lines < 3:
+ line = f.readline()
+ if len(line) == 0:
+ return '' # Unexpected EOF.
+ result += line
+ if line[0] == '\n':
+ num_blank_lines = num_blank_lines + 1
+ return result
+
+
+def delete_dir(build_dir):
+ if os.path.islink(build_dir):
+ return
+ # For unknown reasons (anti-virus?) rmtree of Chromium build directories
+ # often fails on Windows.
+ if sys.platform.startswith('win'):
+ subprocess.check_call(['rmdir', '/s', '/q', build_dir], shell=True)
+ else:
+ shutil.rmtree(build_dir)
+
+
+def delete_build_dir(build_dir):
+ # GN writes a build.ninja.d file. Note that not all GN builds have args.gn.
+ build_ninja_d_file = os.path.join(build_dir, 'build.ninja.d')
+ if not os.path.exists(build_ninja_d_file):
+ delete_dir(build_dir)
+ return
+
+ # GN builds aren't automatically regenerated when you sync. To avoid
+ # messing with the GN workflow, erase everything but the args file, and
+ # write a dummy build.ninja file that will automatically rerun GN the next
+ # time Ninja is run.
+ build_ninja_file = os.path.join(build_dir, 'build.ninja')
+ build_commands = extract_gn_build_commands(build_ninja_file)
+
+ try:
+ gn_args_file = os.path.join(build_dir, 'args.gn')
+ with open(gn_args_file, 'r') as f:
+ args_contents = f.read()
+ except IOError:
+ args_contents = ''
+
+ e = None
+ try:
+ # delete_dir and os.mkdir() may fail, such as when chrome.exe is running,
+ # and we still want to restore args.gn/build.ninja/build.ninja.d, so catch
+ # the exception and rethrow it later.
+ delete_dir(build_dir)
+ os.mkdir(build_dir)
+ except Exception as e:
+ pass
+
+ # Put back the args file (if any).
+ if args_contents != '':
+ with open(gn_args_file, 'w') as f:
+ f.write(args_contents)
+
+ # Write the build.ninja file sufficiently to regenerate itself.
+ with open(os.path.join(build_dir, 'build.ninja'), 'w') as f:
+ if build_commands != '':
+ f.write(build_commands)
+ else:
+ # Couldn't parse the build.ninja file, write a default thing.
+ f.write('''rule gn
+command = gn -q gen //out/%s/
+description = Regenerating ninja files
+
+build build.ninja: gn
+generator = 1
+depfile = build.ninja.d
+''' % (os.path.split(build_dir)[1]))
+
+ # Write a .d file for the build which references a nonexistant file. This
+ # will make Ninja always mark the build as dirty.
+ with open(build_ninja_d_file, 'w') as f:
+ f.write('build.ninja: nonexistant_file.gn\n')
+
+ if e:
+ # Rethrow the exception we caught earlier.
+ raise e
+
+def clobber(out_dir):
+ """Clobber contents of build directory.
+
+ Don't delete the directory itself: some checkouts have the build directory
+ mounted."""
+ for f in os.listdir(out_dir):
+ path = os.path.join(out_dir, f)
+ if os.path.isfile(path):
+ os.unlink(path)
+ elif os.path.isdir(path):
+ delete_build_dir(path)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('out_dir', help='The output directory to clobber')
+ args = parser.parse_args()
+ clobber(args.out_dir)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/common.croc b/deps/v8/build/common.croc
new file mode 100644
index 0000000000..fde7a8b298
--- /dev/null
+++ b/deps/v8/build/common.croc
@@ -0,0 +1,127 @@
+# -*- python -*-
+# Crocodile config file for Chromium - settings common to all platforms
+#
+# This should be speicified before the platform-specific config, for example:
+# croc -c chrome_common.croc -c linux/chrome_linux.croc
+
+{
+ # List of root directories, applied in order
+ 'roots' : [
+ # Sub-paths we specifically care about and want to call out
+ {
+ 'root' : '_/src',
+ 'altname' : 'CHROMIUM',
+ },
+ ],
+
+ # List of rules, applied in order
+ # Note that any 'include':0 rules here will be overridden by the 'include':1
+ # rules in the platform-specific configs.
+ 'rules' : [
+ # Don't scan for executable lines in uninstrumented C++ header files
+ {
+ 'regexp' : '.*\\.(h|hpp)$',
+ 'add_if_missing' : 0,
+ },
+
+ # Groups
+ {
+ 'regexp' : '',
+ 'group' : 'source',
+ },
+ {
+ 'regexp' : '.*_(test|unittest|uitest|browsertest)\\.',
+ 'group' : 'test',
+ },
+
+ # Languages
+ {
+ 'regexp' : '.*\\.(c|h)$',
+ 'language' : 'C',
+ },
+ {
+ 'regexp' : '.*\\.(cc|cpp|hpp)$',
+ 'language' : 'C++',
+ },
+
+ # Files/paths to include. Specify these before the excludes, since rules
+ # are in order.
+ {
+ 'regexp' : '^CHROMIUM/(base|media|net|printing|remoting|chrome|content|webkit/glue|native_client)/',
+ 'include' : 1,
+ },
+ # Don't include subversion or mercurial SCM dirs
+ {
+ 'regexp' : '.*/(\\.svn|\\.hg)/',
+ 'include' : 0,
+ },
+ # Don't include output dirs
+ {
+ 'regexp' : '.*/(Debug|Release|out|xcodebuild)/',
+ 'include' : 0,
+ },
+ # Don't include third-party source
+ {
+ 'regexp' : '.*/third_party/',
+ 'include' : 0,
+ },
+ # We don't run the V8 test suite, so we don't care about V8 coverage.
+ {
+ 'regexp' : '.*/v8/',
+ 'include' : 0,
+ },
+ ],
+
+ # Paths to add source from
+ 'add_files' : [
+ 'CHROMIUM'
+ ],
+
+ # Statistics to print
+ 'print_stats' : [
+ {
+ 'stat' : 'files_executable',
+ 'format' : '*RESULT FilesKnown: files_executable= %d files',
+ },
+ {
+ 'stat' : 'files_instrumented',
+ 'format' : '*RESULT FilesInstrumented: files_instrumented= %d files',
+ },
+ {
+ 'stat' : '100.0 * files_instrumented / files_executable',
+ 'format' : '*RESULT FilesInstrumentedPercent: files_instrumented_percent= %g percent',
+ },
+ {
+ 'stat' : 'lines_executable',
+ 'format' : '*RESULT LinesKnown: lines_known= %d lines',
+ },
+ {
+ 'stat' : 'lines_instrumented',
+ 'format' : '*RESULT LinesInstrumented: lines_instrumented= %d lines',
+ },
+ {
+ 'stat' : 'lines_covered',
+ 'format' : '*RESULT LinesCoveredSource: lines_covered_source= %d lines',
+ 'group' : 'source',
+ },
+ {
+ 'stat' : 'lines_covered',
+ 'format' : '*RESULT LinesCoveredTest: lines_covered_test= %d lines',
+ 'group' : 'test',
+ },
+ {
+ 'stat' : '100.0 * lines_covered / lines_executable',
+ 'format' : '*RESULT PercentCovered: percent_covered= %g percent',
+ },
+ {
+ 'stat' : '100.0 * lines_covered / lines_executable',
+ 'format' : '*RESULT PercentCoveredSource: percent_covered_source= %g percent',
+ 'group' : 'source',
+ },
+ {
+ 'stat' : '100.0 * lines_covered / lines_executable',
+ 'format' : '*RESULT PercentCoveredTest: percent_covered_test= %g percent',
+ 'group' : 'test',
+ },
+ ],
+}
diff --git a/deps/v8/build/compiled_action.gni b/deps/v8/build/compiled_action.gni
new file mode 100644
index 0000000000..7e25a0b6fc
--- /dev/null
+++ b/deps/v8/build/compiled_action.gni
@@ -0,0 +1,167 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file introduces two related templates that act like action and
+# action_foreach but instead of running a Python script, it will compile a
+# given tool in the host toolchain and run that (either once or over the list
+# of inputs, depending on the variant).
+#
+# Parameters
+#
+# tool (required)
+# [label] Label of the tool to run. This should be an executable, and
+# this label should not include a toolchain (anything in parens). The
+# host compile of this tool will be used.
+#
+# outputs (required)
+# [list of files] Like the outputs of action (if using "compiled_action",
+# this would be just the list of outputs), or action_foreach (if using
+# "compiled_action_foreach", this would contain source expansions mapping
+# input to output files).
+#
+# args (required)
+# [list of strings] Same meaning as action/action_foreach.
+#
+# inputs (optional)
+# Files the binary takes as input. The step will be re-run whenever any
+# of these change. If inputs is empty, the step will run only when the
+# binary itself changes.
+#
+# depfile
+# deps
+# visibility (all optional)
+# Same meaning as action/action_foreach.
+#
+#
+# Example of usage:
+#
+# compiled_action("run_my_tool") {
+# tool = "//tools/something:mytool"
+# outputs = [
+# "$target_gen_dir/mysource.cc",
+# "$target_gen_dir/mysource.h",
+# ]
+#
+# # The tool takes this input.
+# inputs = [ "my_input_file.idl" ]
+#
+# # In this case, the tool takes as arguments the input file and the output
+# # build dir (both relative to the "cd" that the script will be run in)
+# # and will produce the output files listed above.
+# args = [
+# rebase_path("my_input_file.idl", root_build_dir),
+# "--output-dir", rebase_path(target_gen_dir, root_build_dir),
+# ]
+# }
+#
+# You would typically declare your tool like this:
+# if (host_toolchain == current_toolchain) {
+# executable("mytool") {
+# ...
+# }
+# }
+# The if statement around the executable is optional. That says "I only care
+# about this target in the host toolchain". Usually this is what you want, and
+# saves unnecessarily compiling your tool for the target platform. But if you
+# need a target build of your tool as well, just leave off the if statement.
+
+if (host_os == "win") {
+ _host_executable_suffix = ".exe"
+} else {
+ _host_executable_suffix = ""
+}
+
+template("compiled_action") {
+ assert(defined(invoker.tool), "tool must be defined for $target_name")
+ assert(defined(invoker.outputs), "outputs must be defined for $target_name")
+ assert(defined(invoker.args), "args must be defined for $target_name")
+
+ assert(!defined(invoker.sources),
+ "compiled_action doesn't take a sources arg. Use inputs instead.")
+
+ action(target_name) {
+ forward_variables_from(invoker,
+ [
+ "data_deps",
+ "deps",
+ "depfile",
+ "inputs",
+ "outputs",
+ "testonly",
+ "visibility",
+ ])
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (!defined(inputs)) {
+ inputs = []
+ }
+
+ script = "//build/gn_run_binary.py"
+
+ # Constuct the host toolchain version of the tool.
+ host_tool = invoker.tool + "($host_toolchain)"
+
+ # Get the path to the executable. Currently, this assumes that the tool
+ # does not specify output_name so that the target name is the name to use.
+ # If that's not the case, we'll need another argument to the script to
+ # specify this, since we can't know what the output name is (it might be in
+ # another file not processed yet).
+ host_executable =
+ get_label_info(host_tool, "root_out_dir") + "/" +
+ get_label_info(host_tool, "name") + _host_executable_suffix
+
+ deps += [ host_tool ]
+
+ # The script takes as arguments the binary to run, and then the arguments
+ # to pass it.
+ args = [ rebase_path(host_executable, root_build_dir) ] + invoker.args
+ }
+}
+
+template("compiled_action_foreach") {
+ assert(defined(invoker.sources), "sources must be defined for $target_name")
+ assert(defined(invoker.tool), "tool must be defined for $target_name")
+ assert(defined(invoker.outputs), "outputs must be defined for $target_name")
+ assert(defined(invoker.args), "args must be defined for $target_name")
+
+ action_foreach(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "depfile",
+ "inputs",
+ "outputs",
+ "sources",
+ "testonly",
+ "visibility",
+ ])
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (!defined(inputs)) {
+ inputs = []
+ }
+
+ script = "//build/gn_run_binary.py"
+
+ # Constuct the host toolchain version of the tool.
+ host_tool = invoker.tool + "($host_toolchain)"
+
+ # Get the path to the executable. Currently, this assumes that the tool
+ # does not specify output_name so that the target name is the name to use.
+ # If that's not the case, we'll need another argument to the script to
+ # specify this, since we can't know what the output name is (it might be in
+ # another file not processed yet).
+ host_executable =
+ get_label_info(host_tool, "root_out_dir") + "/" +
+ get_label_info(host_tool, "name") + _host_executable_suffix
+
+ deps += [ host_tool ]
+
+ # The script takes as arguments the binary to run, and then the arguments
+ # to pass it.
+ args = [ rebase_path(host_executable, root_build_dir) ] + invoker.args
+ }
+}
diff --git a/deps/v8/build/compute_build_timestamp.py b/deps/v8/build/compute_build_timestamp.py
new file mode 100755
index 0000000000..1c4ca9cd9b
--- /dev/null
+++ b/deps/v8/build/compute_build_timestamp.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Returns a timestamp that approximates the build date.
+
+build_type impacts the timestamp generated, both relative to the date of the
+last recent commit:
+- default: the build date is set to the most recent first Sunday of a month at
+ 5:00am. The reason is that it is a time where invalidating the build cache
+ shouldn't have major reprecussions (due to lower load).
+- official: the build date is set to the current date at 5:00am, or the day
+ before if the current time is before 5:00am.
+Either way, it is guaranteed to be in the past and always in UTC.
+"""
+
+# The requirements for the timestamp:
+# (1) for the purposes of continuous integration, longer duration
+# between cache invalidation is better, but >=1mo is preferable.
+# (2) for security purposes, timebombs would ideally be as close to
+# the actual time of the build as possible. It must be in the past.
+# (3) HSTS certificate pinning is valid for 70 days. To make CI builds enforce
+# HTST pinning, <=1mo is preferable.
+#
+# On Windows, the timestamp is also written in the PE/COFF file header of
+# executables of dlls. That timestamp and the executable's file size are
+# the only two pieces of information that identify a given executable on
+# the symbol server, so rarely changing timestamps can cause conflicts there
+# as well. We only upload symbols for official builds to the symbol server.
+
+from __future__ import print_function
+
+import argparse
+import calendar
+import datetime
+import doctest
+import os
+import sys
+
+
+THIS_DIR = os.path.abspath(os.path.dirname(__file__))
+
+
+def GetFirstSundayOfMonth(year, month):
+ """Returns the first sunday of the given month of the given year.
+
+ >>> GetFirstSundayOfMonth(2016, 2)
+ 7
+ >>> GetFirstSundayOfMonth(2016, 3)
+ 6
+ >>> GetFirstSundayOfMonth(2000, 1)
+ 2
+ """
+ weeks = calendar.Calendar().monthdays2calendar(year, month)
+ # Return the first day in the first week that is a Sunday.
+ return [date_day[0] for date_day in weeks[0] if date_day[1] == 6][0]
+
+
+def GetBuildDate(build_type, utc_now):
+ """Gets the approximate build date given the specific build type.
+
+ >>> GetBuildDate('default', datetime.datetime(2016, 2, 6, 1, 2, 3))
+ datetime.datetime(2016, 1, 3, 1, 2, 3)
+ >>> GetBuildDate('default', datetime.datetime(2016, 2, 7, 5))
+ datetime.datetime(2016, 2, 7, 5, 0)
+ >>> GetBuildDate('default', datetime.datetime(2016, 2, 8, 5))
+ datetime.datetime(2016, 2, 7, 5, 0)
+ >>> GetBuildDate('official', datetime.datetime(2016, 2, 8, 5))
+ datetime.datetime(2016, 2, 8, 5, 0)
+ """
+ day = utc_now.day
+ month = utc_now.month
+ year = utc_now.year
+ if build_type != 'official':
+ first_sunday = GetFirstSundayOfMonth(year, month)
+ # If our build is after the first Sunday, we've already refreshed our build
+ # cache on a quiet day, so just use that day.
+ # Otherwise, take the first Sunday of the previous month.
+ if day >= first_sunday:
+ day = first_sunday
+ else:
+ month -= 1
+ if month == 0:
+ month = 12
+ year -= 1
+ day = GetFirstSundayOfMonth(year, month)
+ return datetime.datetime(
+ year, month, day, utc_now.hour, utc_now.minute, utc_now.second)
+
+
+def main():
+ if doctest.testmod()[0]:
+ return 1
+ argument_parser = argparse.ArgumentParser()
+ argument_parser.add_argument(
+ 'build_type', help='The type of build', choices=('official', 'default'))
+ args = argument_parser.parse_args()
+
+ # The mtime of the revision in build/util/LASTCHANGE is stored in a file
+ # next to it. Read it, to get a deterministic time close to "now".
+ # That date is then modified as described at the top of the file so that
+ # it changes less frequently than with every commit.
+ # This intentionally always uses build/util/LASTCHANGE's commit time even if
+ # use_dummy_lastchange is set.
+ lastchange_file = os.path.join(THIS_DIR, 'util', 'LASTCHANGE.committime')
+ last_commit_timestamp = int(open(lastchange_file).read())
+ now = datetime.datetime.utcfromtimestamp(last_commit_timestamp)
+
+ if now.hour < 5:
+ # The time is locked at 5:00 am in UTC to cause the build cache
+ # invalidation to not happen exactly at midnight. Use the same calculation
+ # as the day before.
+ # See //base/build_time.cc.
+ now = now - datetime.timedelta(days=1)
+ now = datetime.datetime(now.year, now.month, now.day, 5, 0, 0)
+ build_date = GetBuildDate(args.build_type, now)
+ print(int(calendar.timegm(build_date.utctimetuple())))
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/config/BUILD.gn b/deps/v8/build/config/BUILD.gn
new file mode 100644
index 0000000000..6a6b8f8a45
--- /dev/null
+++ b/deps/v8/build/config/BUILD.gn
@@ -0,0 +1,437 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/allocator.gni")
+import("//build/config/c++/c++.gni")
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build/config/crypto.gni")
+import("//build/config/dcheck_always_on.gni")
+import("//build/config/features.gni")
+
+# Subprojects need to override arguments in {mac,ios}_sdk_overrides.gni in their
+# .gn config, but those arguments are only used on macOS. Including
+# mac_sdk_overrides.gni insures that this doesn't trigger an unused argument
+# warning.
+import("//build/config/ios/ios_sdk_overrides.gni")
+import("//build/config/mac/mac_sdk_overrides.gni")
+
+import("//build/config/pch.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/ui.gni")
+import("//build/toolchain/goma.gni")
+
+declare_args() {
+ # When set (the default) enables C++ iterator debugging in debug builds.
+ # Iterator debugging is always off in release builds (technically, this flag
+ # affects the "debug" config, which is always available but applied by
+ # default only in debug builds).
+ #
+ # Iterator debugging is generally useful for catching bugs. But it can
+ # introduce extra locking to check the state of an iterator against the state
+ # of the current object. For iterator- and thread-heavy code, this can
+ # significantly slow execution - two orders of magnitude slowdown has been
+ # seen (crbug.com/903553) and iterator debugging also slows builds by making
+ # generation of snapshot_blob.bin take ~40-60 s longer. Therefore this
+ # defaults to off.
+ enable_iterator_debugging = false
+}
+
+# ==============================================
+# PLEASE DO NOT ADD MORE THINGS TO THIS LIST
+# ==============================================
+#
+# Legacy feature defines applied to all targets.
+#
+# These are applied to every single compile in the build and most of them are
+# only relevant to a few files. This bloats command lines and causes
+# unnecessary recompiles when flags are flipped.
+#
+# To pass defines to source code from the build, use the buildflag system which
+# will write headers containing the defines you need. This isolates the define
+# and means its definition can participate in the build graph, only recompiling
+# things when it actually changes.
+#
+# See //build/buildflag_header.gni for instructions on generating headers.
+#
+# This will also allow you to scope your build flag to a BUILD.gn file (or a
+# .gni file if you need it from more than one place) rather than making global
+# flags. See //build/config/BUILDCONFIG.gn for advice on where to define
+# build flags.
+config("feature_flags") {
+ defines = []
+ if (dcheck_always_on) {
+ defines += [ "DCHECK_ALWAYS_ON=1" ]
+ if (dcheck_is_configurable) {
+ defines += [ "DCHECK_IS_CONFIGURABLE" ]
+ }
+ }
+ if (use_udev) {
+ # TODO(brettw) should probably be "=1".
+ defines += [ "USE_UDEV" ]
+ }
+ if (use_aura) {
+ defines += [ "USE_AURA=1" ]
+ }
+ if (use_glib) {
+ defines += [ "USE_GLIB=1" ]
+ }
+ if (use_nss_certs) {
+ defines += [ "USE_NSS_CERTS=1" ]
+ }
+ if (use_ozone && !is_android) {
+ # Note that some Chrome OS builds unconditionally set |use_ozone| to true,
+ # but they also build some targets with the Android toolchain. This ensures
+ # that Android targets still build with USE_OZONE=0 in such cases.
+ #
+ # TODO(crbug.com/837032): Maybe this can be cleaned up if we can avoid
+ # setting use_ozone globally.
+ defines += [ "USE_OZONE=1" ]
+ }
+ if (use_x11) {
+ defines += [ "USE_X11=1" ]
+ }
+ if (use_allocator != "tcmalloc") {
+ defines += [ "NO_TCMALLOC" ]
+ }
+ if (is_asan || is_hwasan || is_lsan || is_tsan || is_msan) {
+ defines += [
+ "MEMORY_TOOL_REPLACES_ALLOCATOR",
+ "MEMORY_SANITIZER_INITIAL_SIZE",
+ ]
+ }
+ if (is_asan) {
+ defines += [ "ADDRESS_SANITIZER" ]
+ }
+ if (is_lsan) {
+ defines += [ "LEAK_SANITIZER" ]
+ }
+ if (is_tsan) {
+ defines += [
+ "THREAD_SANITIZER",
+ "DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1",
+ "WTF_USE_DYNAMIC_ANNOTATIONS_NOIMPL=1",
+ ]
+ }
+ if (is_msan) {
+ defines += [ "MEMORY_SANITIZER" ]
+ }
+ if (is_ubsan || is_ubsan_null || is_ubsan_vptr || is_ubsan_security) {
+ defines += [ "UNDEFINED_SANITIZER" ]
+ }
+ if (safe_browsing_mode == 1) {
+ defines += [ "FULL_SAFE_BROWSING" ]
+ defines += [ "SAFE_BROWSING_CSD" ]
+ defines += [ "SAFE_BROWSING_DB_LOCAL" ]
+ } else if (safe_browsing_mode == 2) {
+ defines += [ "SAFE_BROWSING_DB_REMOTE" ]
+ } else if (safe_browsing_mode == 3) {
+ defines += [ "SAFE_BROWSING_DB_LOCAL" ]
+ }
+ if (is_official_build) {
+ defines += [ "OFFICIAL_BUILD" ]
+ }
+ if (is_chrome_branded) {
+ defines += [ "GOOGLE_CHROME_BUILD" ]
+ } else {
+ defines += [ "CHROMIUM_BUILD" ]
+ }
+
+ # ==============================================
+ # PLEASE DO NOT ADD MORE THINGS TO THIS LIST
+ # ==============================================
+ #
+ # See the comment at the top.
+}
+
+# Debug/release ----------------------------------------------------------------
+
+config("debug") {
+ defines = [
+ "_DEBUG",
+ "DYNAMIC_ANNOTATIONS_ENABLED=1",
+ "WTF_USE_DYNAMIC_ANNOTATIONS=1",
+ ]
+
+ if (is_nacl) {
+ defines += [ "DYNAMIC_ANNOTATIONS_PREFIX=NACL_" ]
+ }
+
+ if (is_win) {
+ if (!enable_iterator_debugging) {
+ # Iterator debugging is enabled by default by the compiler on debug
+ # builds, and we have to tell it to turn it off.
+ defines += [ "_HAS_ITERATOR_DEBUGGING=0" ]
+ }
+ } else if (is_linux && current_cpu == "x64" && enable_iterator_debugging) {
+ # Enable libstdc++ debugging facilities to help catch problems early, see
+ # http://crbug.com/65151 .
+ # TODO(phajdan.jr): Should we enable this for all of POSIX?
+ defines += [ "_GLIBCXX_DEBUG=1" ]
+ }
+}
+
+config("release") {
+ defines = [ "NDEBUG" ]
+
+ # Sanitizers.
+ if (is_tsan) {
+ defines += [
+ "DYNAMIC_ANNOTATIONS_ENABLED=1",
+ "WTF_USE_DYNAMIC_ANNOTATIONS=1",
+ ]
+ } else {
+ defines += [ "NVALGRIND" ]
+ if (!is_nacl) {
+ # NaCl always enables dynamic annotations. Currently this value is set to
+ # 1 for all .nexes.
+ defines += [ "DYNAMIC_ANNOTATIONS_ENABLED=0" ]
+ }
+ }
+
+ if (is_ios) {
+ # Disable NSAssert and GTMDevAssert (from Google Toolbox for Mac). This
+ # follows XCode's default behavior for Release builds.
+ defines += [ "NS_BLOCK_ASSERTIONS=1" ]
+ }
+}
+
+# Default libraries ------------------------------------------------------------
+
+# This config defines the default libraries applied to all targets.
+config("default_libs") {
+ if (is_win) {
+ # TODO(brettw) this list of defaults should probably be smaller, and
+ # instead the targets that use the less common ones (e.g. wininet or
+ # winspool) should include those explicitly.
+ libs = [
+ "advapi32.lib",
+ "comdlg32.lib",
+ "dbghelp.lib",
+ "dnsapi.lib",
+ "gdi32.lib",
+ "msimg32.lib",
+ "odbc32.lib",
+ "odbccp32.lib",
+ "oleaut32.lib",
+ "shell32.lib",
+ "shlwapi.lib",
+ "user32.lib",
+ "usp10.lib",
+ "uuid.lib",
+ "version.lib",
+ "wininet.lib",
+ "winmm.lib",
+ "winspool.lib",
+ "ws2_32.lib",
+
+ # Please don't add more stuff here. We should actually be making this
+ # list smaller, since all common things should be covered. If you need
+ # some extra libraries, please just add a libs = [ "foo.lib" ] to your
+ # target that needs it.
+ ]
+ if (current_os == "winuwp") {
+ # These libraries are needed for Windows UWP (i.e. store apps).
+ libs += [
+ "dloadhelper.lib",
+ "WindowsApp.lib",
+ ]
+ } else {
+ # These libraries are not compatible with Windows UWP (i.e. store apps.)
+ libs += [
+ "delayimp.lib",
+ "kernel32.lib",
+ "ole32.lib",
+ ]
+ }
+ } else if (is_android) {
+ libs = [
+ "dl",
+ "m",
+ ]
+ } else if (is_mac) {
+ # Targets should choose to explicitly link frameworks they require. Since
+ # linking can have run-time side effects, nothing should be listed here.
+ libs = []
+ } else if (is_ios) {
+ # The libraries listed here will be specified for both the target and the
+ # host. Only the common ones should be listed here.
+ libs = [
+ "CoreFoundation.framework",
+ "CoreGraphics.framework",
+ "CoreText.framework",
+ "Foundation.framework",
+ ]
+ } else if (is_linux) {
+ libs = [
+ "dl",
+ "pthread",
+ "rt",
+ ]
+ }
+}
+
+group("common_deps") {
+ visibility = [
+ ":executable_deps",
+ ":loadable_module_deps",
+ ":shared_library_deps",
+ ]
+
+ # WARNING: This group is a dependency of **every executable and shared
+ # library**. Please be careful adding new dependencies here.
+ public_deps = []
+
+ if (using_sanitizer) {
+ public_deps += [ "//build/config/sanitizers:deps" ]
+ }
+
+ if (use_custom_libcxx) {
+ public_deps += [ "//buildtools/third_party/libc++" ]
+ }
+
+ if (use_afl) {
+ public_deps += [ "//third_party/afl" ]
+ }
+
+ if (is_android && use_order_profiling) {
+ public_deps += [ "//base/android/orderfile:orderfile_instrumentation" ]
+ }
+
+ if (is_win && generate_order_files && !is_nacl) {
+ public_deps += [ "//tools/cygprofile_win" ]
+ }
+
+ if (is_fuchsia) {
+ public_deps += [ "//third_party/fuchsia-sdk:runtime_library" ]
+ }
+}
+
+# Only the executable template in BUILDCONFIG.gn should reference this.
+group("executable_deps") {
+ public_deps = [
+ ":common_deps",
+ ]
+ if (export_libcxxabi_from_executables) {
+ public_deps += [ "//buildtools/third_party/libc++abi" ]
+ }
+}
+
+# Only the loadable_module template in BUILDCONFIG.gn should reference this.
+group("loadable_module_deps") {
+ public_deps = [
+ ":common_deps",
+ ]
+}
+
+# Only the shared_library template in BUILDCONFIG.gn should reference this.
+group("shared_library_deps") {
+ public_deps = [
+ ":common_deps",
+ ]
+}
+
+# Executable configs -----------------------------------------------------------
+
+# Windows linker setup for EXEs and DLLs.
+if (is_win) {
+ _windows_linker_configs = [
+ "//build/config/win:sdk_link",
+ "//build/config/win:common_linker_setup",
+ ]
+}
+
+# This config defines the configs applied to all executables.
+config("executable_config") {
+ configs = []
+
+ if (is_win) {
+ configs += _windows_linker_configs
+
+ # Currently only turn on linker CFI for executables.
+ configs += [ "//build/config/win:cfi_linker" ]
+ } else if (is_mac) {
+ configs += [ "//build/config/mac:mac_dynamic_flags" ]
+ } else if (is_ios) {
+ configs += [
+ "//build/config/ios:ios_dynamic_flags",
+ "//build/config/ios:ios_executable_flags",
+ ]
+ } else if (is_linux || is_android || current_os == "aix") {
+ configs += [ "//build/config/gcc:executable_config" ]
+ if (is_chromecast) {
+ configs += [ "//build/config/chromecast:executable_config" ]
+ } else if (is_fuchsia) {
+ configs += [ "//build/config/fuchsia:executable_config" ]
+ }
+ }
+
+ # If we're using the prebuilt instrumented libraries with the sanitizers, we
+ # need to add ldflags to every binary to make sure they are picked up.
+ if (prebuilt_instrumented_libraries_available) {
+ configs += [ "//third_party/instrumented_libraries:prebuilt_ldflags" ]
+ }
+ if (use_locally_built_instrumented_libraries) {
+ configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ]
+ }
+ configs += [ "//build/config/sanitizers:link_executable" ]
+}
+
+# Shared library configs -------------------------------------------------------
+
+# This config defines the configs applied to all shared libraries.
+config("shared_library_config") {
+ configs = []
+
+ if (is_win) {
+ configs += _windows_linker_configs
+ } else if (is_mac) {
+ configs += [ "//build/config/mac:mac_dynamic_flags" ]
+ } else if (is_ios) {
+ configs += [ "//build/config/ios:ios_dynamic_flags" ]
+ } else if (is_chromecast) {
+ configs += [ "//build/config/chromecast:shared_library_config" ]
+ } else if (is_linux || current_os == "aix") {
+ configs += [ "//build/config/gcc:shared_library_config" ]
+ }
+
+ # If we're using the prebuilt instrumented libraries with the sanitizers, we
+ # need to add ldflags to every binary to make sure they are picked up.
+ if (prebuilt_instrumented_libraries_available) {
+ configs += [ "//third_party/instrumented_libraries:prebuilt_ldflags" ]
+ }
+ if (use_locally_built_instrumented_libraries) {
+ configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ]
+ }
+ configs += [ "//build/config/sanitizers:link_shared_library" ]
+}
+
+# Add this config to your target to enable precompiled headers.
+#
+# Precompiled headers are done on a per-target basis. If you have just a couple
+# of files, the time it takes to precompile (~2 seconds) can actually be longer
+# than the time saved. On a Z620, a 100 file target compiles about 2 seconds
+# faster with precompiled headers, with greater savings for larger targets.
+#
+# Recommend precompiled headers for targets with more than 50 .cc files.
+config("precompiled_headers") {
+ if (enable_precompiled_headers) {
+ if (is_win) {
+ # This is a string rather than a file GN knows about. It has to match
+ # exactly what's in the /FI flag below, and what might appear in the
+ # source code in quotes for an #include directive.
+ precompiled_header = "build/precompile.h"
+
+ # This is a file that GN will compile with the above header. It will be
+ # implicitly added to the sources (potentially multiple times, with one
+ # variant for each language used in the target).
+ precompiled_source = "//build/precompile.cc"
+
+ # Force include the header.
+ cflags = [ "/FI$precompiled_header" ]
+ } else if (is_mac) {
+ precompiled_source = "//build/precompile.h"
+ }
+ }
+}
diff --git a/deps/v8/build/config/BUILDCONFIG.gn b/deps/v8/build/config/BUILDCONFIG.gn
new file mode 100644
index 0000000000..9fea836a46
--- /dev/null
+++ b/deps/v8/build/config/BUILDCONFIG.gn
@@ -0,0 +1,622 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# =============================================================================
+# WHAT IS THIS FILE?
+# =============================================================================
+#
+# This is the master GN build configuration. This file is loaded after the
+# build args (args.gn) for the build directory and after the toplevel ".gn"
+# file (which points to this file as the build configuration).
+#
+# This file will be executed and the resulting context will be used to execute
+# every other file in the build. So variables declared here (that don't start
+# with an underscore) will be implicitly global.
+
+# =============================================================================
+# PLATFORM SELECTION
+# =============================================================================
+#
+# There are two main things to set: "os" and "cpu". The "toolchain" is the name
+# of the GN thing that encodes combinations of these things.
+#
+# Users typically only set the variables "target_os" and "target_cpu" in "gn
+# args", the rest are set up by our build and internal to GN.
+#
+# There are three different types of each of these things: The "host"
+# represents the computer doing the compile and never changes. The "target"
+# represents the main thing we're trying to build. The "current" represents
+# which configuration is currently being defined, which can be either the
+# host, the target, or something completely different (like nacl). GN will
+# run the same build file multiple times for the different required
+# configuration in the same build.
+#
+# This gives the following variables:
+# - host_os, host_cpu, host_toolchain
+# - target_os, target_cpu, default_toolchain
+# - current_os, current_cpu, current_toolchain.
+#
+# Note the default_toolchain isn't symmetrical (you would expect
+# target_toolchain). This is because the "default" toolchain is a GN built-in
+# concept, and "target" is something our build sets up that's symmetrical with
+# its GYP counterpart. Potentially the built-in default_toolchain variable
+# could be renamed in the future.
+#
+# When writing build files, to do something only for the host:
+# if (current_toolchain == host_toolchain) { ...
+
+if (target_os == "") {
+ target_os = host_os
+}
+
+if (target_cpu == "") {
+ if (target_os == "android") {
+ # If we're building for Android, we should assume that we want to
+ # build for ARM by default, not the host_cpu (which is likely x64).
+ # This allows us to not have to specify both target_os and target_cpu
+ # on the command line.
+ target_cpu = "arm"
+ } else {
+ target_cpu = host_cpu
+ }
+}
+
+if (current_cpu == "") {
+ current_cpu = target_cpu
+}
+if (current_os == "") {
+ current_os = target_os
+}
+
+# =============================================================================
+# BUILD FLAGS
+# =============================================================================
+#
+# This block lists input arguments to the build, along with their default
+# values.
+#
+# If a value is specified on the command line, it will overwrite the defaults
+# given in a declare_args block, otherwise the default will be used.
+#
+# YOU SHOULD ALMOST NEVER NEED TO ADD FLAGS TO THIS FILE. GN allows any file in
+# the build to declare build flags. If you need a flag for a single component,
+# you can just declare it in the corresponding BUILD.gn file.
+#
+# - If your feature is a single target, say //components/foo, you can put
+# a declare_args() block in //components/foo/BUILD.gn and use it there.
+# Nobody else in the build needs to see the flag.
+#
+# - Defines based on build variables should be implemented via the generated
+# build flag header system. See //build/buildflag_header.gni. You can put
+# the buildflag_header target in the same file as the build flag itself. You
+# should almost never set "defines" directly.
+#
+# - If your flag toggles a target on and off or toggles between different
+# versions of similar things, write a "group" target that forwards to the
+# right target (or no target) depending on the value of the build flag. This
+# group can be in the same BUILD.gn file as the build flag, and targets can
+# depend unconditionally on the group rather than duplicating flag checks
+# across many targets.
+#
+# - If a semi-random set of build files REALLY needs to know about a define and
+# the above pattern for isolating the build logic in a forwarding group
+# doesn't work, you can put the argument in a .gni file. This should be put
+# in the lowest level of the build that knows about this feature (which should
+# almost always be outside of the //build directory!).
+#
+# Other flag advice:
+#
+# - Use boolean values when possible. If you need a default value that expands
+# to some complex thing in the default case (like the location of the
+# compiler which would be computed by a script), use a default value of -1 or
+# the empty string. Outside of the declare_args block, conditionally expand
+# the default value as necessary.
+#
+# - Use a name like "use_foo" or "is_foo" (whatever is more appropriate for
+# your feature) rather than just "foo".
+#
+# - Write good comments directly above the declaration with no blank line.
+# These comments will appear as documentation in "gn args --list".
+#
+# - Don't call exec_script inside declare_args. This will execute the script
+# even if the value is overridden, which is wasteful. See first bullet.
+
+declare_args() {
+ # Set to enable the official build level of optimization. This has nothing
+ # to do with branding, but enables an additional level of optimization above
+ # release (!is_debug). This might be better expressed as a tri-state
+ # (debug, release, official) but for historical reasons there are two
+ # separate flags.
+ is_official_build = false
+
+ # Whether we're a traditional desktop unix.
+ is_desktop_linux = current_os == "linux"
+
+ # Set to true when compiling with the Clang compiler.
+ is_clang = current_os != "linux" ||
+ (current_cpu != "s390x" && current_cpu != "s390" &&
+ current_cpu != "ppc64" && current_cpu != "ppc" &&
+ current_cpu != "mips" && current_cpu != "mips64")
+
+ # Allows the path to a custom target toolchain to be injected as a single
+ # argument, and set as the default toolchain.
+ custom_toolchain = ""
+
+ # This should not normally be set as a build argument. It's here so that
+ # every toolchain can pass through the "global" value via toolchain_args().
+ host_toolchain = ""
+
+ # DON'T ADD MORE FLAGS HERE. Read the comment above.
+}
+
+declare_args() {
+ # Debug build. Enabling official builds automatically sets is_debug to false.
+ is_debug = !is_official_build
+}
+
+declare_args() {
+ # Component build. Setting to true compiles targets declared as "components"
+ # as shared libraries loaded dynamically. This speeds up development time.
+ # When false, components will be linked statically.
+ #
+ # For more information see
+ # https://chromium.googlesource.com/chromium/src/+/master/docs/component_build.md
+ is_component_build = is_debug && current_os != "ios"
+}
+
+assert(!(is_debug && is_official_build), "Can't do official debug builds")
+
+# ==============================================================================
+# TOOLCHAIN SETUP
+# ==============================================================================
+#
+# Here we set the default toolchain, as well as the variable host_toolchain
+# which will identify the toolchain corresponding to the local system when
+# doing cross-compiles. When not cross-compiling, this will be the same as the
+# default toolchain.
+#
+# We do this before anything else to make sure we complain about any
+# unsupported os/cpu combinations as early as possible.
+
+if (host_toolchain == "") {
+ # This should only happen in the top-level context.
+ # In a specific toolchain context, the toolchain_args()
+ # block should have propagated a value down.
+ # TODO(dpranke): Add some sort of assert here that verifies that
+ # no toolchain omitted host_toolchain from its toolchain_args().
+
+ if (host_os == "linux") {
+ if (target_os != "linux") {
+ host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
+ } else if (is_clang) {
+ host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
+ } else {
+ host_toolchain = "//build/toolchain/linux:$host_cpu"
+ }
+ } else if (host_os == "mac") {
+ host_toolchain = "//build/toolchain/mac:clang_$host_cpu"
+ } else if (host_os == "win") {
+ # On Windows always use the target CPU for host builds for x86/x64. On the
+ # configurations we support this will always work and it saves build steps.
+ # Windows ARM64 targets require an x64 host for cross build.
+ if (target_cpu == "x86" || target_cpu == "x64") {
+ if (is_clang) {
+ host_toolchain = "//build/toolchain/win:win_clang_$target_cpu"
+ } else {
+ host_toolchain = "//build/toolchain/win:$target_cpu"
+ }
+ } else if (is_clang) {
+ host_toolchain = "//build/toolchain/win:win_clang_$host_cpu"
+ } else {
+ host_toolchain = "//build/toolchain/win:$host_cpu"
+ }
+ } else if (host_os == "aix") {
+ host_toolchain = "//build/toolchain/aix:$host_cpu"
+ } else {
+ assert(false, "Unsupported host_os: $host_os")
+ }
+}
+
+_default_toolchain = ""
+
+if (target_os == "android") {
+ assert(host_os == "linux" || host_os == "mac",
+ "Android builds are only supported on Linux and Mac hosts.")
+ _default_toolchain = "//build/toolchain/android:android_clang_$target_cpu"
+} else if (target_os == "chromeos" || target_os == "linux") {
+ # See comments in build/toolchain/cros/BUILD.gn about board compiles.
+ if (is_clang) {
+ _default_toolchain = "//build/toolchain/linux:clang_$target_cpu"
+ } else {
+ _default_toolchain = "//build/toolchain/linux:$target_cpu"
+ }
+} else if (target_os == "fuchsia") {
+ _default_toolchain = "//build/toolchain/fuchsia:$target_cpu"
+} else if (target_os == "ios") {
+ _default_toolchain = "//build/toolchain/mac:ios_clang_$target_cpu"
+} else if (target_os == "mac") {
+ assert(host_os == "mac", "Mac cross-compiles are unsupported.")
+ _default_toolchain = host_toolchain
+} else if (target_os == "win") {
+ # On Windows, we use the same toolchain for host and target by default.
+ # Beware, win cross builds have some caveats, see docs/win_cross.md
+ if (is_clang) {
+ _default_toolchain = "//build/toolchain/win:win_clang_$target_cpu"
+ } else {
+ _default_toolchain = "//build/toolchain/win:$target_cpu"
+ }
+} else if (target_os == "winuwp") {
+ # Only target WinUWP on for a Windows store application and only
+ # x86, x64 and arm are supported target CPUs.
+ assert(target_cpu == "x86" || target_cpu == "x64" || target_cpu == "arm" ||
+ target_cpu == "arm64")
+ _default_toolchain = "//build/toolchain/win:uwp_$target_cpu"
+} else if (target_os == "aix") {
+ _default_toolchain = "//build/toolchain/aix:$target_cpu"
+} else {
+ assert(false, "Unsupported target_os: $target_os")
+}
+
+# If a custom toolchain has been set in the args, set it as default. Otherwise,
+# set the default toolchain for the platform (if any).
+if (custom_toolchain != "") {
+ set_default_toolchain(custom_toolchain)
+} else if (_default_toolchain != "") {
+ set_default_toolchain(_default_toolchain)
+}
+
+# =============================================================================
+# OS DEFINITIONS
+# =============================================================================
+#
+# We set these various is_FOO booleans for convenience in writing OS-based
+# conditions.
+#
+# - is_android, is_chromeos, is_ios, and is_win should be obvious.
+# - is_mac is set only for desktop Mac. It is not set on iOS.
+# - is_posix is true for mac and any Unix-like system (basically everything
+# except Windows).
+# - is_linux is true for desktop Linux and ChromeOS, but not Android (which is
+# generally too different despite being based on the Linux kernel).
+#
+# Do not add more is_* variants here for random lesser-used Unix systems like
+# aix or one of the BSDs. If you need to check these, just check the
+# current_os value directly.
+
+is_android = current_os == "android"
+is_chromeos = current_os == "chromeos"
+is_fuchsia = current_os == "fuchsia"
+is_ios = current_os == "ios"
+is_linux = current_os == "chromeos" || current_os == "linux"
+is_mac = current_os == "mac"
+is_nacl = current_os == "nacl"
+is_win = current_os == "win" || current_os == "winuwp"
+
+is_posix = !is_win && !is_fuchsia
+
+# =============================================================================
+# SOURCES FILTERS
+# =============================================================================
+#
+# These patterns filter out platform-specific files when assigning to the
+# sources variable. The magic variable |sources_assignment_filter| is applied
+# to each assignment or appending to the sources variable and matches are
+# automatically removed.
+#
+# Note that the patterns are NOT regular expressions. Only "*" and "\b" (path
+# boundary = end of string or slash) are supported, and the entire string
+# must match the pattern (so you need "*.cc" to match all .cc files, for
+# example).
+
+# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
+# below.
+sources_assignment_filter = []
+
+if (!is_win) {
+ sources_assignment_filter += [
+ "*_win.cc",
+ "*_win.h",
+ "*_win_unittest.cc",
+ "*\bwin/*",
+ "*.def",
+ "*.rc",
+ ]
+}
+if (!is_mac) {
+ sources_assignment_filter += [
+ "*_mac.h",
+ "*_mac.cc",
+ "*_mac.mm",
+ "*_mac_unittest.h",
+ "*_mac_unittest.cc",
+ "*_mac_unittest.mm",
+ "*\bmac/*",
+ "*_cocoa.h",
+ "*_cocoa.cc",
+ "*_cocoa.mm",
+ "*_cocoa_unittest.h",
+ "*_cocoa_unittest.cc",
+ "*_cocoa_unittest.mm",
+ "*\bcocoa/*",
+ ]
+}
+if (!is_ios) {
+ sources_assignment_filter += [
+ "*_ios.h",
+ "*_ios.cc",
+ "*_ios.mm",
+ "*_ios_unittest.h",
+ "*_ios_unittest.cc",
+ "*_ios_unittest.mm",
+ "*\bios/*",
+ ]
+}
+if (!is_mac && !is_ios) {
+ sources_assignment_filter += [ "*.mm" ]
+}
+if (!is_linux) {
+ sources_assignment_filter += [
+ "*_linux.h",
+ "*_linux.cc",
+ "*_linux_unittest.h",
+ "*_linux_unittest.cc",
+ "*\blinux/*",
+ ]
+}
+if (!is_android) {
+ sources_assignment_filter += [
+ "*_android.h",
+ "*_android.cc",
+ "*_android_unittest.h",
+ "*_android_unittest.cc",
+ "*\bandroid/*",
+ ]
+}
+if (!is_chromeos) {
+ sources_assignment_filter += [
+ "*_chromeos.h",
+ "*_chromeos.cc",
+ "*_chromeos_unittest.h",
+ "*_chromeos_unittest.cc",
+ "*\bchromeos/*",
+ ]
+}
+
+# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
+# below.
+
+# Actually save this list.
+#
+# These patterns are executed for every file in the source tree of every run.
+# Therefore, adding more patterns slows down the build for everybody. We should
+# only add automatic patterns for configurations affecting hundreds of files
+# across many projects in the tree.
+#
+# Therefore, we only add rules to this list corresponding to platforms on the
+# Chromium waterfall. This is not for non-officially-supported platforms
+# (FreeBSD, etc.) toolkits, (X11, GTK, etc.), or features. For these cases,
+# write a conditional in the target to remove the file(s) from the list when
+# your platform/toolkit/feature doesn't apply.
+set_sources_assignment_filter(sources_assignment_filter)
+
+# =============================================================================
+# TARGET DEFAULTS
+# =============================================================================
+#
+# Set up the default configuration for every build target of the given type.
+# The values configured here will be automatically set on the scope of the
+# corresponding target. Target definitions can add or remove to the settings
+# here as needed.
+#
+# WHAT GOES HERE?
+#
+# Other than the main compiler and linker configs, the only reason for a config
+# to be in this list is if some targets need to explicitly override that config
+# by removing it. This is how targets opt-out of flags. If you don't have that
+# requirement and just need to add a config everywhere, reference it as a
+# sub-config of an existing one, most commonly the main "compiler" one.
+
+# Holds all configs used for running the compiler.
+default_compiler_configs = [
+ "//build/config:feature_flags",
+ "//build/config/compiler:afdo",
+ "//build/config/compiler:afdo_optimize_size",
+ "//build/config/compiler:assembler_debug_dir",
+ "//build/config/compiler:compiler",
+ "//build/config/compiler:compiler_arm_fpu",
+ "//build/config/compiler:compiler_arm_thumb",
+ "//build/config/compiler:chromium_code",
+ "//build/config/compiler:default_include_dirs",
+ "//build/config/compiler:default_optimization",
+ "//build/config/compiler:default_stack_frames",
+ "//build/config/compiler:default_symbols",
+ "//build/config/compiler:export_dynamic",
+ "//build/config/compiler:no_exceptions",
+ "//build/config/compiler:no_rtti",
+ "//build/config/compiler:runtime_library",
+ "//build/config/compiler:thin_archive",
+ "//build/config/coverage:default_coverage",
+ "//build/config/sanitizers:default_sanitizer_flags",
+]
+
+if (is_win) {
+ default_compiler_configs += [
+ "//build/config/win:default_crt",
+ "//build/config/win:lean_and_mean",
+ "//build/config/win:nominmax",
+ "//build/config/win:unicode",
+ "//build/config/win:winver",
+ ]
+}
+
+if (is_posix) {
+ if (current_os != "aix") {
+ default_compiler_configs +=
+ [ "//build/config/gcc:symbol_visibility_hidden" ]
+ }
+}
+
+if (is_fuchsia) {
+ default_compiler_configs += [ "//build/config/gcc:symbol_visibility_hidden" ]
+}
+
+if (is_android) {
+ default_compiler_configs +=
+ [ "//build/config/android:default_orderfile_instrumentation" ]
+}
+
+if (is_win) {
+ default_compiler_configs +=
+ [ "//build/config/win:default_cygprofile_instrumentation" ]
+}
+
+if (is_clang && !is_nacl) {
+ default_compiler_configs += [
+ "//build/config/clang:find_bad_constructs",
+ "//build/config/clang:extra_warnings",
+ ]
+}
+
+# Debug/release-related defines.
+if (is_debug) {
+ default_compiler_configs += [ "//build/config:debug" ]
+} else {
+ default_compiler_configs += [ "//build/config:release" ]
+}
+
+# Static libraries and source sets use only the compiler ones.
+set_defaults("static_library") {
+ configs = default_compiler_configs
+}
+set_defaults("source_set") {
+ configs = default_compiler_configs
+}
+
+# Compute the set of configs common to all linked targets (shared libraries,
+# loadable modules, executables) to avoid duplication below.
+if (is_win) {
+ # Many targets remove these configs, so they are not contained within
+ # //build/config:executable_config for easy removal.
+ _linker_configs = [
+ "//build/config/win:default_incremental_linking",
+
+ # Default to console-mode apps. Most of our targets are tests and such
+ # that shouldn't use the windows subsystem.
+ "//build/config/win:console",
+ ]
+} else if (is_mac) {
+ _linker_configs = [ "//build/config/mac:strip_all" ]
+} else {
+ _linker_configs = []
+}
+
+# Executable defaults.
+default_executable_configs = default_compiler_configs + [
+ "//build/config:default_libs",
+ "//build/config:executable_config",
+ ] + _linker_configs
+set_defaults("executable") {
+ configs = default_executable_configs
+}
+
+# Shared library and loadable module defaults (also for components in component
+# mode).
+default_shared_library_configs = default_compiler_configs + [
+ "//build/config:default_libs",
+ "//build/config:shared_library_config",
+ ] + _linker_configs
+if (is_android) {
+ # Strip native JNI exports from shared libraries by default. Binaries that
+ # want this can remove this config.
+ default_shared_library_configs +=
+ [ "//build/config/android:hide_all_but_jni_onload" ]
+}
+set_defaults("shared_library") {
+ configs = default_shared_library_configs
+}
+set_defaults("loadable_module") {
+ configs = default_shared_library_configs
+
+ # loadable_modules are generally used by other libs, not just via JNI.
+ if (is_android) {
+ configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
+ }
+}
+
+# Sets default dependencies for executable and shared_library targets.
+#
+# Variables
+# no_default_deps: If true, no standard dependencies will be added.
+foreach(_target_type,
+ [
+ "executable",
+ "loadable_module",
+ "shared_library",
+ ]) {
+ template(_target_type) {
+ target(_target_type, target_name) {
+ forward_variables_from(invoker, "*", [ "no_default_deps" ])
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (!defined(invoker.no_default_deps) || !invoker.no_default_deps) {
+ deps += [ "//build/config:${_target_type}_deps" ]
+ }
+ }
+ }
+}
+
+# ==============================================================================
+# COMPONENT SETUP
+# ==============================================================================
+
+# Defines a component, which equates to a shared_library when
+# is_component_build == true and a static_library otherwise.
+#
+# Use static libraries for the static build rather than source sets because
+# many of of our test binaries link many large dependencies but often don't
+# use large portions of them. The static libraries are much more efficient to
+# link in this situation since only the necessary object files are linked.
+#
+# The invoker can override the type of the target in the non-component-build
+# case by setting static_component_type to either "source_set" or
+# "static_library". If unset, the default will be used.
+template("component") {
+ if (is_component_build) {
+ _component_mode = "shared_library"
+ } else if (defined(invoker.static_component_type)) {
+ assert(invoker.static_component_type == "static_library" ||
+ invoker.static_component_type == "source_set")
+ _component_mode = invoker.static_component_type
+ } else if (!defined(invoker.sources)) {
+ # When there are no sources defined, use a source set to avoid creating
+ # an empty static library (which generally don't work).
+ _component_mode = "source_set"
+ } else {
+ _component_mode = "static_library"
+ }
+ target(_component_mode, target_name) {
+ # Explicitly forward visibility, implicitly forward everything else.
+ # Forwarding "*" doesn't recurse into nested scopes (to avoid copying all
+ # globals into each template invocation), so won't pick up file-scoped
+ # variables. Normally this isn't too bad, but visibility is commonly
+ # defined at the file scope. Explicitly forwarding visibility and then
+ # excluding it from the "*" set works around this problem.
+ # See http://crbug.com/594610
+ forward_variables_from(invoker, [ "visibility" ])
+ forward_variables_from(invoker, "*", [ "visibility" ])
+ }
+}
+
+# Component defaults
+set_defaults("component") {
+ if (is_component_build) {
+ configs = default_shared_library_configs
+ if (is_android) {
+ configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
+ }
+ } else {
+ configs = default_compiler_configs
+ }
+}
diff --git a/deps/v8/build/config/OWNERS b/deps/v8/build/config/OWNERS
new file mode 100644
index 0000000000..082e53e018
--- /dev/null
+++ b/deps/v8/build/config/OWNERS
@@ -0,0 +1,4 @@
+dpranke@chromium.org
+scottmg@chromium.org
+
+per-file *jumbo*=bratell@opera.com
diff --git a/deps/v8/build/config/aix/BUILD.gn b/deps/v8/build/config/aix/BUILD.gn
new file mode 100644
index 0000000000..e3f21c39c5
--- /dev/null
+++ b/deps/v8/build/config/aix/BUILD.gn
@@ -0,0 +1,50 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/toolchain/toolchain.gni")
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic.
+
+config("compiler") {
+ # These flags are shared between the C compiler and linker.
+ defines = [
+ "_LINUX_SOURCE_COMPAT=1",
+ "__STDC_FORMAT_MACROS",
+ "_ALL_SOURCE=1",
+ ]
+
+ cflags = [
+ "-Wall",
+ "-Wno-unused-parameter",
+ "-pthread",
+ "-Wmissing-field-initializers",
+ "-Wno-uninitialized",
+ "-mcpu=power5+",
+ "-mfprnd",
+ "-mno-popcntb",
+ "-maix64",
+ "-fdata-sections",
+ "-ffunction-sections",
+ "-O3",
+
+ # "-Werror"
+ # We need to find a way to fix the TOC warnings if we want to enable this.
+ ]
+
+ cflags_cc = [
+ "-std=gnu++11",
+ "-fno-rtti",
+ "-fno-exceptions",
+ "-Wno-narrowing",
+ "-Wno-non-virtual-dtor",
+ ]
+
+ ldflags = [
+ "-pthread",
+ "-maix64",
+ "-Wl,-bbigtoc",
+ ]
+}
diff --git a/deps/v8/build/config/allocator.gni b/deps/v8/build/config/allocator.gni
new file mode 100644
index 0000000000..52dcc5f101
--- /dev/null
+++ b/deps/v8/build/config/allocator.gni
@@ -0,0 +1,58 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+# Temporarily disable tcmalloc on arm64 linux to get rid of compilation errors.
+if (is_android || is_mac || is_ios || is_asan || is_lsan || is_tsan ||
+ is_msan || is_win || is_fuchsia || (is_linux && target_cpu == "arm64")) {
+ _default_allocator = "none"
+} else {
+ _default_allocator = "tcmalloc"
+}
+
+# The debug CRT on Windows has some debug features that are incompatible with
+# the shim. NaCl in particular does seem to link some binaries statically
+# against the debug CRT with "is_nacl=false".
+if ((is_linux || is_android || is_mac ||
+ (is_win && !is_component_build && !is_debug)) && !is_asan && !is_hwasan &&
+ !is_lsan && !is_tsan && !is_msan) {
+ _default_use_allocator_shim = true
+} else {
+ _default_use_allocator_shim = false
+}
+
+declare_args() {
+ # Memory allocator to use. Set to "none" to use default allocator.
+ use_allocator = _default_allocator
+
+ # Causes all the allocations to be routed via allocator_shim.cc.
+ use_allocator_shim = _default_use_allocator_shim
+
+ # Partition alloc is included by default except iOS.
+ use_partition_alloc = !is_ios
+
+ # Use the new tcmalloc. It's relevant only when use_allocator == "tcmalloc".
+ use_new_tcmalloc = false
+}
+
+if (is_nacl) {
+ # Turn off the build flag for NaCL builds to minimize confusion, as NaCL
+ # doesn't support the heap shim.
+ use_allocator_shim = false
+}
+
+assert(use_allocator == "none" || use_allocator == "tcmalloc")
+
+assert(!is_win || use_allocator == "none", "Tcmalloc doesn't work on Windows.")
+assert(!is_mac || use_allocator == "none", "Tcmalloc doesn't work on macOS.")
+
+assert(
+ !use_allocator_shim || is_linux || is_android || is_win || is_mac,
+ "use_allocator_shim is supported only on Linux, Android, Windows and macOS targets")
+
+if (is_win && use_allocator_shim) {
+ assert(!is_component_build,
+ "The allocator shim doesn't work for the component build on Windows.")
+}
diff --git a/deps/v8/build/config/android/BUILD.gn b/deps/v8/build/config/android/BUILD.gn
new file mode 100644
index 0000000000..b69d42b700
--- /dev/null
+++ b/deps/v8/build/config/android/BUILD.gn
@@ -0,0 +1,175 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/c++/c++.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+
+assert(is_android)
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic that is
+# Android-only.
+config("compiler") {
+ cflags = [
+ "-ffunction-sections",
+ "-fno-short-enums",
+ ]
+ defines = [
+ "ANDROID",
+
+ # The NDK has these things, but doesn't define the constants to say that it
+ # does. Define them here instead.
+ "HAVE_SYS_UIO_H",
+
+ # Forces full rebuilds on NDK rolls. To rebuild everything when NDK version
+ # stays the same, increment the suffix number.
+ "ANDROID_NDK_VERSION_ROLL=${android_ndk_version}_1",
+ ]
+
+ if (current_cpu == "mips64el") {
+ cflags += [
+ # Have to force IAS for mips64.
+ "-fintegrated-as",
+ ]
+ }
+
+ ldflags = [
+ # Don't allow visible symbols from libgcc or libc++ to be
+ # re-exported.
+ "-Wl,--exclude-libs=libgcc.a",
+
+ # Don't allow visible symbols from libraries that contain
+ # assembly code with symbols that aren't hidden properly.
+ # http://crbug.com/448386
+ "-Wl,--exclude-libs=libvpx_assembly_arm.a",
+ ]
+
+ # $compile_api_level corresponds to the API level used for the sysroot path
+ # calculation in //build/config/android/config.gni
+ if (current_cpu == "arm") {
+ abi_target = "arm-linux-androideabi"
+ compile_api_level = android32_ndk_api_level
+ } else if (current_cpu == "x86") {
+ abi_target = "i686-linux-android"
+ compile_api_level = android32_ndk_api_level
+ } else if (current_cpu == "arm64") {
+ abi_target = "aarch64-linux-android"
+ compile_api_level = android64_ndk_api_level
+ } else if (current_cpu == "x64") {
+ # Place holder for x64 support, not tested.
+ # TODO: Enable clang support for Android x64. http://crbug.com/539781
+ abi_target = "x86_64-linux-android"
+ compile_api_level = android64_ndk_api_level
+ } else if (current_cpu == "mipsel") {
+ abi_target = "mipsel-linux-android"
+ compile_api_level = android32_ndk_api_level
+ } else if (current_cpu == "mips64el") {
+ # Place holder for mips64 support, not tested.
+ abi_target = "mips64el-linux-android"
+ compile_api_level = android64_ndk_api_level
+ } else {
+ assert(false, "Architecture not supported")
+ }
+ cflags += [
+ "--target=$abi_target",
+ "-isystem" +
+ rebase_path("$android_ndk_root/sysroot/usr/include/$abi_target",
+ root_build_dir),
+ "-D__ANDROID_API__=$compile_api_level",
+ ]
+ ldflags += [ "--target=$abi_target" ]
+
+ # TODO(crbug.com/771171): Remove this define once code that uses it has been
+ # updated to no longer need it. This is leftover from older Android NDK
+ # versions.
+ if (compile_api_level < 20) {
+ cflags += [ "-DHAVE_PTHREAD_COND_TIMEDWAIT_MONOTONIC=1" ]
+ }
+
+ # Assign any flags set for the C compiler to asmflags so that they are sent
+ # to the assembler.
+ asmflags = cflags
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Android-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+ # NOTE: The libc++ header include paths below are specified in cflags_cc
+ # rather than include_dirs because they need to come after include_dirs.
+ # Think of them like system headers, but don't use '-isystem' because the
+ # arm-linux-androideabi-4.4.3 toolchain (circa Gingerbread) will exhibit
+ # strange errors. The include ordering here is important; change with
+ # caution.
+ cflags_cc = [ "-isystem" +
+ rebase_path("$android_ndk_root/sources/android/support/include",
+ root_build_dir) ]
+
+ defines = [
+ "__GNU_SOURCE=1", # Necessary for clone().
+ "CHROMIUM_CXX_TWEAK_INLINES", # Saves binary size.
+ ]
+ ldflags = [ "-nostdlib" ]
+ lib_dirs = [ android_libcpp_lib_dir ]
+
+ libs = []
+ libs += [ "android_support" ]
+
+ # arm builds of libc++ starting in NDK r12 depend on unwind.
+ if (current_cpu == "arm") {
+ libs += [ "unwind" ]
+ }
+
+ # Manually link the libgcc.a that the cross compiler uses. This is
+ # absolute because the linker will look inside the sysroot if it's not.
+ libs += [
+ rebase_path(android_libgcc_file),
+ "c",
+ ]
+
+ if (current_cpu == "arm" && arm_version == 6) {
+ libs += [ "atomic" ]
+ }
+
+ if (current_cpu == "mipsel") {
+ libs += [ "atomic" ]
+ }
+
+ # TODO(jdduke) Re-enable on mips after resolving linking
+ # issues with libc++ (crbug.com/456380).
+ if (current_cpu != "mipsel" && current_cpu != "mips64el") {
+ ldflags += [ "-Wl,--warn-shared-textrel" ]
+ }
+}
+
+config("hide_all_but_jni_onload") {
+ ldflags = [ "-Wl,--version-script=" + rebase_path(
+ "//build/android/android_only_explicit_jni_exports.lst",
+ root_build_dir) ]
+}
+
+config("hide_all_but_jni") {
+ ldflags = [ "-Wl,--version-script=" +
+ rebase_path("//build/android/android_only_jni_exports.lst",
+ root_build_dir) ]
+}
+
+config("lld_pack_relocations") {
+ ldflags = [ "-Wl,--pack-dyn-relocs=android" ]
+}
+
+# Used for instrumented build to generate the orderfile.
+config("default_orderfile_instrumentation") {
+ if (use_order_profiling) {
+ cflags = [ "-finstrument-function-entry-bare" ]
+ if (use_thin_lto) {
+ # TODO(pcc): This should not be necessary. Remove once
+ # https://reviews.llvm.org/D50017 lands and gets rolled in.
+ ldflags = [ "-Wl,-u,__cyg_profile_func_enter_bare" ]
+ }
+ }
+}
diff --git a/deps/v8/build/config/android/OWNERS b/deps/v8/build/config/android/OWNERS
new file mode 100644
index 0000000000..74dca6f718
--- /dev/null
+++ b/deps/v8/build/config/android/OWNERS
@@ -0,0 +1,6 @@
+agrieve@chromium.org
+estevenson@chromium.org
+digit@chromium.org
+wnwen@chromium.org
+
+# COMPONENT: Build
diff --git a/deps/v8/build/config/android/abi.gni b/deps/v8/build/config/android/abi.gni
new file mode 100644
index 0000000000..79e98b8a8c
--- /dev/null
+++ b/deps/v8/build/config/android/abi.gni
@@ -0,0 +1,80 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Logic separated out from config.gni so that it can be used by compiler.gni
+# without introducing a circular dependency.
+
+# NOTE: Because Chrome OS builds may depend on targets built with the Android
+# toolchain, this GNI file may be read and processed from within Chrome OS
+# toolchains. Checking |is_android| here would therefore be too restrictive.
+assert(is_android || is_chromeos)
+
+declare_args() {
+ # Adds intrumentation to each function. Writes a file with the order that
+ # functions are called at startup.
+ use_order_profiling = false
+
+ # Only effective if use_order_profiling = true. When this is true,
+ # instrumentation switches from startup profiling after a delay, and
+ # then waits for a devtools memory dump request to dump all
+ # profiling information. When false, the same delay is used to switch from
+ # startup, and then after a second delay all profiling information is dumped.
+ # See base::android::orderfile::StartDelayedDump for more information.
+ devtools_instrumentation_dumping = false
+
+ # Builds secondary abi for APKs, supports build 32-bit arch as secondary
+ # abi in 64-bit Monochrome and WebView.
+ build_apk_secondary_abi = true
+}
+
+assert(!devtools_instrumentation_dumping || use_order_profiling,
+ "devtools_instrumentation_dumping requires use_order_profiling")
+
+if (current_cpu == "x86") {
+ android_app_abi = "x86"
+} else if (current_cpu == "arm") {
+ import("//build/config/arm.gni")
+ if (arm_version < 7) {
+ android_app_abi = "armeabi"
+ } else {
+ android_app_abi = "armeabi-v7a"
+ }
+} else if (current_cpu == "mipsel") {
+ android_app_abi = "mips"
+} else if (current_cpu == "x64") {
+ android_app_abi = "x86_64"
+} else if (current_cpu == "arm64") {
+ android_app_abi = "arm64-v8a"
+} else if (current_cpu == "mips64el") {
+ android_app_abi = "mips64"
+} else {
+ assert(false, "Unknown Android ABI: " + current_cpu)
+}
+
+if (target_cpu == "arm64" || target_cpu == "x64" || target_cpu == "mips64el") {
+ android_64bit_target_cpu = true
+} else if (target_cpu == "arm" || target_cpu == "x86" ||
+ target_cpu == "mipsel") {
+ android_64bit_target_cpu = false
+} else {
+ assert(false, "Unknown target CPU: $target_cpu")
+}
+
+# Intentionally do not define android_app_secondary_abi_cpu and
+# android_app_secondary_abi for 32-bit target_cpu, since they are not used.
+if (target_cpu == "arm64") {
+ android_secondary_abi_cpu = "arm"
+ android_app_secondary_abi = "armeabi-v7a"
+} else if (target_cpu == "x64") {
+ android_secondary_abi_cpu = "x86"
+ android_app_secondary_abi = "x86"
+} else if (target_cpu == "mips64el") {
+ android_secondary_abi_cpu = "mipsel"
+ android_app_secondary_abi = "mips"
+}
+
+if (defined(android_secondary_abi_cpu)) {
+ android_secondary_abi_toolchain =
+ "//build/toolchain/android:android_clang_${android_secondary_abi_cpu}"
+}
diff --git a/deps/v8/build/config/android/config.gni b/deps/v8/build/config/android/config.gni
new file mode 100644
index 0000000000..a4e230c763
--- /dev/null
+++ b/deps/v8/build/config/android/config.gni
@@ -0,0 +1,380 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains common system config stuff for the Android build.
+
+# NOTE: Because Chrome OS builds may depend on targets built with the Android
+# toolchain, this GNI file may be read and processed from within Chrome OS
+# toolchains. Checking |is_android| here would therefore be too restrictive.
+if (is_android || is_chromeos) {
+ import("//build/config/chromecast_build.gni")
+ import("//build_overrides/build.gni")
+ import("abi.gni")
+
+ if (build_with_chromium) {
+ # Some non-chromium projects (e.g. WebRTC) use our build configs
+ # heavily but don't write gclient args files.
+
+ import("//build/config/gclient_args.gni")
+ if (defined(checkout_android_native_support)) {
+ n = "$0x0A" # Newline
+ assert(checkout_android_native_support,
+ "Missing native Android toolchain support. |target_os| in your " +
+ ".gclient configuration file (in the parent directory of " +
+ "src) must include \"android\" and/or \"chromeos\". For " +
+ "example:${n}${n}solutions = [${n}...${n}]${n}" +
+ "target_os=[\"chromeos\"]${n}${n}" +
+ "After adding |target_os| please re-run \"gclient sync\".${n}")
+ }
+ }
+
+ has_chrome_android_internal =
+ exec_script("//build/dir_exists.py",
+ [ rebase_path("//clank", root_build_dir) ],
+ "string") == "True"
+
+ # We are using a separate declare_args block for only this argument so that
+ # we can decide if we have to pull in definitions from the internal config
+ # early.
+ declare_args() {
+ # Enables using the internal Chrome for Android repository. The default
+ # value depends on whether the repository is available, and if it's not but
+ # this argument is manually set to True, the generation will fail.
+ # The main purpose of this argument is to avoid having to maintain 2
+ # repositories to support both public only and internal builds.
+ enable_chrome_android_internal = has_chrome_android_internal
+
+ # Android API level for 32 bits platforms
+ android32_ndk_api_level = 16
+
+ # Android API level for 64 bits platforms
+ android64_ndk_api_level = 21
+ }
+
+ if (enable_chrome_android_internal) {
+ import("//clank/config.gni")
+ } else {
+ import("//build/config/android/sdk.gni")
+ declare_args() {
+ # Android SDK release. Currently, only "o_mr1" is publicly supported.
+ android_sdk_release = default_android_sdk_release
+ }
+ }
+
+ if (!defined(extra_chrome_shared_library_configs)) {
+ extra_chrome_shared_library_configs = []
+ }
+ if (!defined(extra_chrome_shared_library_deps)) {
+ extra_chrome_shared_library_deps = []
+ }
+
+ if (!defined(default_android_ndk_root)) {
+ default_android_ndk_root = "//third_party/android_ndk"
+ default_android_ndk_version = "r16"
+ default_android_ndk_major_version = 16
+ } else {
+ assert(defined(default_android_ndk_version))
+ assert(defined(default_android_ndk_major_version))
+ }
+
+ if (android_sdk_release == "o_mr1") {
+ default_android_sdk_root = "//third_party/android_sdk/public"
+ default_android_sdk_version = 27
+ default_android_sdk_build_tools_version = "27.0.3"
+ default_android_sdk_tools_version_suffix = "-26.0.0-dev"
+ public_android_sdk = true
+ }
+
+ if (android_sdk_release == "p") {
+ default_android_sdk_root = "//third_party/android_sdk/public"
+ default_android_sdk_version = 28
+ default_android_sdk_build_tools_version = "27.0.3"
+ default_android_sdk_tools_version_suffix = "-26.0.0-dev"
+ public_android_sdk = true
+ }
+
+ if (!defined(default_lint_android_sdk_root)) {
+ # Purposefully repeated so that downstream can change
+ # default_android_sdk_root without changing lint version.
+ default_lint_android_sdk_root = "//third_party/android_sdk/public"
+ default_lint_android_sdk_version = 26
+ }
+
+ if (!defined(default_extras_android_sdk_root)) {
+ # Purposefully repeated so that downstream can change
+ # default_android_sdk_root without changing where we load the SDK extras
+ # from. (Google Play services, etc.)
+ default_extras_android_sdk_root = "//third_party/android_sdk/public"
+ }
+
+ if (!defined(default_android_keystore_path)) {
+ default_android_keystore_path = "//build/android/chromium-debug.keystore"
+ default_android_keystore_name = "chromiumdebugkey"
+ default_android_keystore_password = "chromium"
+ }
+
+ # google_play_services_package contains the path where individual client
+ # targets (e.g. google_play_services_base_java) are located.
+ if (!defined(google_play_services_package)) {
+ if (is_chromecast && chromecast_branding != "public") {
+ google_play_services_package = "//chromecast/internal/android/prebuilt/google-play-services-first-party"
+ } else {
+ google_play_services_package = "//third_party/android_deps"
+ }
+ }
+
+ if (!defined(system_webview_apk_target)) {
+ system_webview_apk_target = "//android_webview:system_webview_apk"
+ }
+
+ webview_public_framework_dep =
+ "//third_party/android_sdk:public_framework_system_java"
+ if (!defined(webview_framework_dep)) {
+ webview_framework_dep = webview_public_framework_dep
+ }
+
+ assert(defined(default_android_sdk_root),
+ "SDK release " + android_sdk_release + " not recognized.")
+
+ declare_args() {
+ android_ndk_root = default_android_ndk_root
+ android_ndk_version = default_android_ndk_version
+ android_ndk_major_version = default_android_ndk_major_version
+
+ android_sdk_root = default_android_sdk_root
+ android_sdk_version = default_android_sdk_version
+ android_sdk_build_tools_version = default_android_sdk_build_tools_version
+ android_sdk_tools_version_suffix = default_android_sdk_tools_version_suffix
+
+ lint_android_sdk_root = default_lint_android_sdk_root
+ lint_android_sdk_version = default_lint_android_sdk_version
+
+ # Libc++ library directory. Override to use a custom libc++ binary.
+ android_libcpp_lib_dir = ""
+
+ # Android versionCode for android_apk()s that don't explicitly set one.
+ android_default_version_code = "1"
+
+ # Android versionName for android_apk()s that don't explicitly set one.
+ android_default_version_name = "Developer Build"
+
+ # Forced Android versionCode
+ android_override_version_code = ""
+
+ # Forced Android versionName
+ android_override_version_name = ""
+
+ # The path to the keystore to use for signing builds.
+ android_keystore_path = default_android_keystore_path
+
+ # The name of the keystore to use for signing builds.
+ android_keystore_name = default_android_keystore_name
+
+ # The password for the keystore to use for signing builds.
+ android_keystore_password = default_android_keystore_password
+
+ # Java debug on Android. Having this on enables multidexing, and turning it
+ # off will enable proguard.
+ is_java_debug = is_debug
+
+ # Report Java assert failure on Android. Turning it on will report Java
+ # assert failure without crash.
+ report_java_assert = false
+
+ # Mark APKs as android:debuggable="true".
+ debuggable_apks = !is_official_build
+
+ # Set to false to disable the Errorprone compiler
+ use_errorprone_java_compiler = true
+
+ # Enables EMMA Java code coverage. Instruments classes during build to
+ # produce .ec files during runtime
+ emma_coverage = false
+
+ # EMMA filter string consisting of a list of inclusion/exclusion patterns
+ # separated with whitespace and/or comma. Only has effect if
+ # emma_coverage==true
+ emma_filter = ""
+
+ # Disables process isolation when building _incremental targets.
+ # Required for Android M+ due to SELinux policies (stronger sandboxing).
+ disable_incremental_isolated_processes = false
+
+ # Build incremental targets whenever possible.
+ # Ex. with this arg set to true, the chrome_public_apk target result in
+ # chrome_public_apk_incremental being built.
+ incremental_apk_by_default = false
+
+ # When true, updates all android_aar_prebuilt() .info files during gn gen.
+ # Refer to android_aar_prebuilt() for more details.
+ update_android_aar_prebuilts = false
+
+ # Turns off android lint. Useful for prototyping or for faster local builds.
+ disable_android_lint = false
+
+ # Location of aapt2 binary used for app bundles. For now, a more recent version
+ # than the one distributed with the Android SDK is required.
+ android_sdk_tools_bundle_aapt2 =
+ "//third_party/android_build_tools/aapt2/aapt2"
+
+ # Use R8 for Java optimization rather than ProGuard for all targets. R8 is
+ # already used as the default for public targets. This will evenutally be
+ # the default. https://crbug.com/908988
+ use_r8 = false
+
+ # Checks that proguard flags have not changed (!is_java_debug only).
+ check_android_configuration = false
+
+ # Enable the chrome build for devices without touchscreens.
+ notouch_build = false
+
+ # Move Trichrome assets to the shared library APK. This will be removed
+ # once P builds are no longer supported. https://crbug.com/943637
+ trichrome_shared_assets = android_sdk_release == "q"
+ }
+
+ if (notouch_build && defined(extra_keymappings)) {
+ keycode_conversion_data_android_path = extra_keymappings
+ }
+
+ assert(!(check_android_configuration && is_java_debug),
+ "check_android_configuration only works when proguard is enabled")
+
+ # We need a second declare_args block to make sure we are using the overridden
+ # value of the arguments set above.
+ declare_args() {
+ if (defined(default_android_sdk_platform_version)) {
+ android_sdk_platform_version = default_android_sdk_platform_version
+ } else {
+ android_sdk_platform_version = android_sdk_version
+ }
+
+ # Speed up dexing using dx --incremental.
+ enable_incremental_dx = is_java_debug
+
+ # Use hashed symbol names to reduce JNI symbol overhead.
+ use_hashed_jni_names = !is_java_debug
+ }
+
+ # This should not be used for release builds since dx --incremental is known
+ # to not produce byte-for-byte identical output.
+ assert(!(enable_incremental_dx && !is_java_debug))
+
+ # Path to where selected build variables are written to.
+ android_build_vars = "$root_build_dir/build_vars.txt"
+
+ # Host stuff -----------------------------------------------------------------
+
+ # Defines the name the Android build gives to the current host CPU
+ # architecture, which is different than the names GN uses.
+ if (host_cpu == "x64") {
+ android_host_arch = "x86_64"
+ } else if (host_cpu == "x86") {
+ android_host_arch = "x86"
+ } else {
+ assert(false, "Need Android toolchain support for your build CPU arch.")
+ }
+
+ # Defines the name the Android build gives to the current host CPU
+ # architecture, which is different than the names GN uses.
+ if (host_os == "linux") {
+ android_host_os = "linux"
+ } else if (host_os == "mac") {
+ android_host_os = "darwin"
+ } else {
+ assert(false, "Need Android toolchain support for your build OS.")
+ }
+
+ # Directories and files ------------------------------------------------------
+ #
+ # We define may of the dirs strings here for each output architecture (rather
+ # than just the current one) since these are needed by the Android toolchain
+ # file to define toolchains for all possible targets in one pass.
+
+ android_sdk =
+ "${android_sdk_root}/platforms/android-${android_sdk_platform_version}"
+ android_sdk_tools = "${android_sdk_root}/tools"
+ android_sdk_build_tools =
+ "${android_sdk_root}/build-tools/$android_sdk_build_tools_version"
+
+ # Path to the SDK's android.jar
+ android_sdk_jar = "$android_sdk/android.jar"
+
+ # Subdirectories inside android_ndk_root that contain the sysroot for the
+ # associated platform.
+ x86_android_sysroot_subdir =
+ "platforms/android-${android32_ndk_api_level}/arch-x86"
+ arm_android_sysroot_subdir =
+ "platforms/android-${android32_ndk_api_level}/arch-arm"
+ mips_android_sysroot_subdir =
+ "platforms/android-${android32_ndk_api_level}/arch-mips"
+ x86_64_android_sysroot_subdir =
+ "platforms/android-${android64_ndk_api_level}/arch-x86_64"
+ arm64_android_sysroot_subdir =
+ "platforms/android-${android64_ndk_api_level}/arch-arm64"
+ mips64_android_sysroot_subdir =
+ "platforms/android-${android64_ndk_api_level}/arch-mips64"
+
+ # Toolchain root directory for each build. The actual binaries are inside
+ # a "bin" directory inside of these.
+ _android_toolchain_version = "4.9"
+ _android_toolchain_detailed_version = "4.9.x"
+ x86_android_toolchain_root = "$android_ndk_root/toolchains/x86-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+ arm_android_toolchain_root = "$android_ndk_root/toolchains/arm-linux-androideabi-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+ mips_android_toolchain_root = "$android_ndk_root/toolchains/mipsel-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+ x86_64_android_toolchain_root = "$android_ndk_root/toolchains/x86_64-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+ arm64_android_toolchain_root = "$android_ndk_root/toolchains/aarch64-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+ mips64_android_toolchain_root = "$android_ndk_root/toolchains/mips64el-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+
+ # Location of libgcc. This is only needed for the current GN toolchain, so we
+ # only need to define the current one, rather than one for every platform
+ # like the toolchain roots.
+ if (current_cpu == "x86") {
+ android_prebuilt_arch = "android-x86"
+ _binary_prefix = "i686-linux-android"
+ android_toolchain_root = "$x86_android_toolchain_root"
+ android_libgcc_file = "$android_toolchain_root/lib/gcc/i686-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
+ } else if (current_cpu == "arm") {
+ android_prebuilt_arch = "android-arm"
+ _binary_prefix = "arm-linux-androideabi"
+ android_toolchain_root = "$arm_android_toolchain_root"
+ android_libgcc_file = "$android_toolchain_root/lib/gcc/arm-linux-androideabi/${_android_toolchain_detailed_version}/libgcc.a"
+ } else if (current_cpu == "mipsel") {
+ android_prebuilt_arch = "android-mips"
+ _binary_prefix = "mipsel-linux-android"
+ android_toolchain_root = "$mips_android_toolchain_root"
+ android_libgcc_file = "$android_toolchain_root/lib/gcc/mipsel-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
+ } else if (current_cpu == "x64") {
+ android_prebuilt_arch = "android-x86_64"
+ _binary_prefix = "x86_64-linux-android"
+ android_toolchain_root = "$x86_64_android_toolchain_root"
+ android_libgcc_file = "$android_toolchain_root/lib/gcc/x86_64-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
+ } else if (current_cpu == "arm64") {
+ android_prebuilt_arch = "android-arm64"
+ _binary_prefix = "aarch64-linux-android"
+ android_toolchain_root = "$arm64_android_toolchain_root"
+ android_libgcc_file = "$android_toolchain_root/lib/gcc/aarch64-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
+ } else if (current_cpu == "mips64el") {
+ android_prebuilt_arch = "android-mips64"
+ _binary_prefix = "mips64el-linux-android"
+ android_toolchain_root = "$mips64_android_toolchain_root"
+ android_libgcc_file = "$android_toolchain_root/lib/gcc/mips64el-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
+ } else {
+ assert(false, "Need android libgcc support for your target arch.")
+ }
+
+ android_tool_prefix = "$android_toolchain_root/bin/$_binary_prefix-"
+ android_readelf = "${android_tool_prefix}readelf"
+ android_objcopy = "${android_tool_prefix}objcopy"
+ android_gdbserver =
+ "$android_ndk_root/prebuilt/$android_prebuilt_arch/gdbserver/gdbserver"
+
+ # Toolchain stuff ------------------------------------------------------------
+
+ android_libcpp_root = "$android_ndk_root/sources/cxx-stl/llvm-libc++"
+
+ if (android_libcpp_lib_dir == "") {
+ android_libcpp_lib_dir = "${android_libcpp_root}/libs/${android_app_abi}"
+ }
+}
diff --git a/deps/v8/build/config/android/extract_unwind_tables.gni b/deps/v8/build/config/android/extract_unwind_tables.gni
new file mode 100644
index 0000000000..2cf9887747
--- /dev/null
+++ b/deps/v8/build/config/android/extract_unwind_tables.gni
@@ -0,0 +1,56 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+template("unwind_table_asset") {
+ # Note: This file name is used in multiple monochrome build scripts.
+ _asset_path = "${target_gen_dir}/${target_name}/unwind_cfi_32"
+ _unwind_action = "${target_name}__extract"
+
+ action(_unwind_action) {
+ if (defined(invoker.testonly)) {
+ testonly = invoker.testonly
+ }
+
+ _root_dir = "$root_out_dir"
+ if (build_apk_secondary_abi && defined(android_secondary_abi_cpu)) {
+ _root_dir = get_label_info(":foo($android_secondary_abi_toolchain)",
+ "root_out_dir")
+ }
+
+ script = "//build/android/gyp/extract_unwind_tables.py"
+ outputs = [
+ _asset_path,
+ ]
+ inputs = [
+ "${_root_dir}/lib.unstripped/$shlib_prefix${invoker.library_target}$shlib_extension",
+ ]
+
+ args = [
+ "--input_path",
+ rebase_path(
+ "${_root_dir}/lib.unstripped/$shlib_prefix${invoker.library_target}$shlib_extension",
+ root_build_dir),
+ "--output_path",
+ rebase_path(_asset_path, root_build_dir),
+ "--dump_syms_path",
+ rebase_path("$root_out_dir/dump_syms", root_build_dir),
+ ]
+ deps = invoker.deps
+ deps += [ "//third_party/breakpad:dump_syms" ]
+ }
+ android_assets(target_name) {
+ if (defined(invoker.testonly)) {
+ testonly = invoker.testonly
+ }
+ sources = [
+ _asset_path,
+ ]
+ disable_compression = true
+ deps = [
+ ":$_unwind_action",
+ ]
+ }
+}
diff --git a/deps/v8/build/config/android/internal_rules.gni b/deps/v8/build/config/android/internal_rules.gni
new file mode 100644
index 0000000000..264514adaf
--- /dev/null
+++ b/deps/v8/build/config/android/internal_rules.gni
@@ -0,0 +1,3816 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Do not add any imports to non-//build directories here.
+# Some projects (e.g. V8) do not have non-build directories DEPS'ed in.
+import("//build/config/android/config.gni")
+import("//build/config/dcheck_always_on.gni")
+import("//build/config/python.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/util/generate_wrapper.gni")
+import("//build_overrides/build.gni")
+assert(is_android)
+
+# These identify targets that have .build_config files (except for android_apk,
+# java_binary, resource_rewriter, android_app_bundle since we never need to
+# depend on these).
+_java_target_whitelist = [
+ "*:*_java",
+ "*:*_javalib",
+ "*:*_java_*", # e.g. java_test_support
+ "*:java",
+ "*:junit",
+ "*:junit_*",
+ "*:*_junit_*",
+ "*:*javatests",
+ "*:*_assets",
+ "*android*:assets",
+ "*:*_apk_*resources",
+ "*android*:resources",
+ "*:*_resources",
+ "*:*_grd",
+ "*:*locale_paks",
+ "*_bundle_module",
+
+ # TODO(agrieve): Rename targets below to match above patterns.
+ "*android_webview/glue:glue",
+]
+
+# Targets that match the whitelist but are not actually java targets.
+_java_target_blacklist = [ "*:*_unpack_aar" ]
+
+_default_proguard_jar_path = "//third_party/proguard/lib/proguard.jar"
+_r8_path = "//third_party/r8/lib/r8.jar"
+
+_dexdump_path = "$android_sdk_build_tools/dexdump"
+_dexlayout_path = "//third_party/android_build_tools/art/dexlayout"
+_profman_path = "//third_party/android_build_tools/art/profman"
+_art_lib_file_names = [
+ "libartbase.so",
+ "libart-compiler.so",
+ "libart-dexlayout.so",
+ "libart-disassembler.so",
+ "libart-gtest.so",
+ "libart.so",
+ "libbacktrace.so",
+ "libbase.so",
+ "libcrypto-host.so",
+ "libc++.so",
+ "libcutils.so",
+ "libdexfile.so",
+ "libexpat-host.so",
+ "libicui18n-host.so",
+ "libicuuc-host.so",
+ "libjavacore.so",
+ "libjavacrypto.so",
+ "liblog.so",
+ "liblz4.so",
+ "liblzma.so",
+ "libnativebridge.so",
+ "libnativehelper.so",
+ "libnativeloader.so",
+ "libopenjdkjvm.so",
+ "libopenjdkjvmti.so",
+ "libopenjdk.so",
+ "libprofile.so",
+ "libsigchain.so",
+ "libssl-host.so",
+ "libunwindstack.so",
+ "libvixl-arm64.so",
+ "libvixl-arm.so",
+ "libvixld-arm64.so",
+ "libvixld-arm.so",
+ "libz-host.so",
+ "libziparchive.so",
+ "slicer.so",
+]
+_default_art_libs = []
+foreach(lib, _art_lib_file_names) {
+ _default_art_libs += [ "//third_party/android_build_tools/art/lib/$lib" ]
+}
+
+# Put the bug number in the target name so that false-positives have a hint in
+# the error message about why non-existent dependencies are there.
+build_config_target_suffix = "__build_config_crbug_908819"
+
+# Write the target's .build_config file. This is a json file that contains a
+# dictionary of information about how to build this target (things that
+# require knowledge about this target's dependencies and cannot be calculated
+# at gn-time). There is a special syntax to add a value in that dictionary to
+# an action/action_foreachs args:
+# --python-arg=@FileArg($rebased_build_config_path:key0:key1)
+# At runtime, such an arg will be replaced by the value in the build_config.
+# See build/android/gyp/write_build_config.py and
+# build/android/gyp/util/build_utils.py:ExpandFileArgs
+template("write_build_config") {
+ _type = invoker.type
+
+ # Don't need to enforce naming scheme for these targets since we never
+ # consider them in dependency chains.
+ if (_type != "android_apk" && _type != "java_binary" &&
+ _type != "resource_rewriter" && _type != "dist_jar" &&
+ _type != "java_annotation_processor" && _type != "dist_aar" &&
+ _type != "android_app_bundle") {
+ set_sources_assignment_filter(_java_target_whitelist)
+ _parent_invoker = invoker.invoker
+ _target_label =
+ get_label_info(":${_parent_invoker.target_name}", "label_no_toolchain")
+ sources = [
+ _target_label,
+ ]
+ if (sources != []) {
+ set_sources_assignment_filter(_java_target_blacklist)
+ sources = []
+ sources = [
+ _target_label,
+ ]
+ if (sources != []) {
+ assert(false, "Invalid java target name: $_target_label")
+ }
+ }
+ sources = []
+ }
+
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "testonly",
+ ])
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (defined(invoker.android_manifest_dep)) {
+ deps += [ invoker.android_manifest_dep ]
+ }
+
+ script = "//build/android/gyp/write_build_config.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ inputs = []
+ outputs = [
+ invoker.build_config,
+ ]
+
+ _deps_configs = []
+ if (defined(invoker.possible_config_deps)) {
+ foreach(_possible_dep, invoker.possible_config_deps) {
+ set_sources_assignment_filter(_java_target_whitelist)
+ _target_label = get_label_info(_possible_dep, "label_no_toolchain")
+ sources = [
+ _target_label,
+ ]
+ if (sources == []) {
+ set_sources_assignment_filter(_java_target_blacklist)
+ sources = []
+ sources = [
+ _target_label,
+ ]
+ if (sources != []) {
+ # Put the bug number in the target name so that false-positives
+ # have a hint in the error message about non-existent dependencies.
+ deps += [ "$_target_label$build_config_target_suffix" ]
+ _dep_gen_dir = get_label_info(_possible_dep, "target_gen_dir")
+ _dep_name = get_label_info(_possible_dep, "name")
+ _deps_configs += [ "$_dep_gen_dir/$_dep_name.build_config" ]
+ }
+ }
+ sources = []
+ }
+ }
+ _rebased_deps_configs = rebase_path(_deps_configs, root_build_dir)
+
+ args = [
+ "--type=$_type",
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--deps-configs=$_rebased_deps_configs",
+ "--build-config",
+ rebase_path(invoker.build_config, root_build_dir),
+ ]
+
+ if (defined(invoker.jar_path)) {
+ args += [
+ "--jar-path",
+ rebase_path(invoker.jar_path, root_build_dir),
+ ]
+ }
+ if (defined(invoker.unprocessed_jar_path)) {
+ args += [
+ "--unprocessed-jar-path",
+ rebase_path(invoker.unprocessed_jar_path, root_build_dir),
+ ]
+ }
+ if (defined(invoker.ijar_path)) {
+ args += [
+ "--interface-jar-path",
+ rebase_path(invoker.ijar_path, root_build_dir),
+ ]
+ }
+ if (defined(invoker.java_resources_jar)) {
+ args += [
+ "--java-resources-jar-path",
+ rebase_path(invoker.java_resources_jar, root_build_dir),
+ ]
+ }
+ if (defined(invoker.annotation_processor_deps)) {
+ _processor_configs = []
+ foreach(_processor_dep, invoker.annotation_processor_deps) {
+ _target_label = get_label_info(_processor_dep, "label_no_toolchain")
+ _dep_gen_dir = get_label_info(_processor_dep, "target_gen_dir")
+ _dep_name = get_label_info(_processor_dep, "name")
+ deps += [ "$_target_label$build_config_target_suffix" ]
+ _processor_configs += [ "$_dep_gen_dir/$_dep_name.build_config" ]
+ }
+ _rebased_processor_configs =
+ rebase_path(_processor_configs, root_build_dir)
+ args += [ "--annotation-processor-configs=$_rebased_processor_configs" ]
+ }
+
+ if (defined(invoker.dex_path)) {
+ args += [
+ "--dex-path",
+ rebase_path(invoker.dex_path, root_build_dir),
+ ]
+ }
+ if (defined(invoker.final_dex_path)) {
+ args += [
+ "--final-dex-path",
+ rebase_path(invoker.final_dex_path, root_build_dir),
+ ]
+ }
+ if (defined(invoker.supports_android) && invoker.supports_android) {
+ args += [ "--supports-android" ]
+ }
+ if (defined(invoker.requires_android) && invoker.requires_android) {
+ args += [ "--requires-android" ]
+ }
+ if (defined(invoker.is_prebuilt) && invoker.is_prebuilt) {
+ args += [ "--is-prebuilt" ]
+ }
+ if (defined(invoker.bypass_platform_checks) &&
+ invoker.bypass_platform_checks) {
+ args += [ "--bypass-platform-checks" ]
+ }
+
+ if (defined(invoker.apk_under_test)) {
+ deps += [ "${invoker.apk_under_test}$build_config_target_suffix" ]
+ apk_under_test_gen_dir =
+ get_label_info(invoker.apk_under_test, "target_gen_dir")
+ apk_under_test_name = get_label_info(invoker.apk_under_test, "name")
+ apk_under_test_config =
+ "$apk_under_test_gen_dir/$apk_under_test_name.build_config"
+ args += [
+ "--tested-apk-config",
+ rebase_path(apk_under_test_config, root_build_dir),
+ ]
+ }
+
+ if (defined(invoker.asset_sources)) {
+ _rebased_asset_sources =
+ rebase_path(invoker.asset_sources, root_build_dir)
+ args += [ "--asset-sources=$_rebased_asset_sources" ]
+ }
+ if (defined(invoker.asset_renaming_sources)) {
+ _rebased_asset_renaming_sources =
+ rebase_path(invoker.asset_renaming_sources, root_build_dir)
+ args += [ "--asset-renaming-sources=$_rebased_asset_renaming_sources" ]
+
+ # These are zip paths, so no need to rebase.
+ args += [
+ "--asset-renaming-destinations=${invoker.asset_renaming_destinations}",
+ ]
+ }
+ if (defined(invoker.disable_compression) && invoker.disable_compression) {
+ args += [ "--disable-asset-compression" ]
+ }
+ if (defined(invoker.treat_as_locale_paks) && invoker.treat_as_locale_paks) {
+ args += [ "--treat-as-locale-paks" ]
+ }
+
+ if (defined(invoker.android_manifest)) {
+ inputs += [ invoker.android_manifest ]
+ args += [
+ "--android-manifest",
+ rebase_path(invoker.android_manifest, root_build_dir),
+ ]
+ }
+ if (defined(invoker.resources_zip)) {
+ args += [
+ "--resources-zip",
+ rebase_path(invoker.resources_zip, root_build_dir),
+ ]
+ }
+ if (defined(invoker.custom_package)) {
+ args += [
+ "--package-name",
+ invoker.custom_package,
+ ]
+ }
+ if (defined(invoker.r_text)) {
+ args += [
+ "--r-text",
+ rebase_path(invoker.r_text, root_build_dir),
+ ]
+ }
+
+ if (defined(invoker.resource_dirs)) {
+ resource_dirs = rebase_path(invoker.resource_dirs, root_build_dir)
+ args += [ "--resource-dirs=$resource_dirs" ]
+ }
+
+ if (defined(invoker.proto_resources_path)) {
+ _rebased_proto_resources =
+ rebase_path(invoker.proto_resources_path, root_build_dir)
+ args += [ "--apk-proto-resources=$_rebased_proto_resources" ]
+ }
+
+ if (defined(invoker.module_rtxt_path)) {
+ _rebased_rtxt_path = rebase_path(invoker.module_rtxt_path, root_build_dir)
+ args += [ "--module-rtxt-path=$_rebased_rtxt_path" ]
+ }
+
+ if (defined(invoker.shared_libraries_runtime_deps_file)) {
+ # Don't list shared_libraries_runtime_deps_file as an input in order to
+ # avoid having to depend on the runtime_deps target. See comment in
+ # rules.gni for why we do this.
+ args += [
+ "--shared-libraries-runtime-deps",
+ rebase_path(invoker.shared_libraries_runtime_deps_file, root_build_dir),
+ ]
+ }
+
+ if (defined(invoker.base_whitelist_rtxt_path)) {
+ args += [
+ "--base-whitelist-rtxt-path",
+ rebase_path(invoker.base_whitelist_rtxt_path, root_build_dir),
+ ]
+ }
+
+ if (defined(invoker.loadable_modules) && invoker.loadable_modules != []) {
+ _rebased_modules = rebase_path(invoker.loadable_modules, root_build_dir)
+ args += [ "--native-libs=$_rebased_modules" ]
+ }
+
+ if (defined(invoker.extra_shared_libraries)) {
+ _rebased_extra_shared_libraries =
+ rebase_path(invoker.extra_shared_libraries, root_build_dir)
+ args += [ "--native-libs=$_rebased_extra_shared_libraries" ]
+ }
+
+ if (defined(invoker.secondary_abi_shared_libraries_runtime_deps_file)) {
+ # Don't list secondary_abi_shared_libraries_runtime_deps_file as an
+ # input in order to avoid having to depend on the runtime_deps target.
+ # See comment in rules.gni for why we do this.
+ args += [
+ "--secondary-abi-shared-libraries-runtime-deps",
+ rebase_path(invoker.secondary_abi_shared_libraries_runtime_deps_file,
+ root_build_dir),
+ ]
+ }
+
+ if (defined(invoker.secondary_abi_loadable_modules) &&
+ invoker.secondary_abi_loadable_modules != []) {
+ _rebased_secondary_abi_modules =
+ rebase_path(invoker.secondary_abi_loadable_modules, root_build_dir)
+ args += [ "--secondary-native-libs=$_rebased_secondary_abi_modules" ]
+ }
+
+ if (defined(invoker.native_lib_placeholders) &&
+ invoker.native_lib_placeholders != []) {
+ args += [ "--native-lib-placeholders=${invoker.native_lib_placeholders}" ]
+ }
+
+ if (defined(invoker.secondary_native_lib_placeholders) &&
+ invoker.secondary_native_lib_placeholders != []) {
+ args += [ "--secondary-native-lib-placeholders=${invoker.secondary_native_lib_placeholders}" ]
+ }
+
+ if (defined(invoker.uncompress_shared_libraries) &&
+ invoker.uncompress_shared_libraries) {
+ args += [ "--uncompress-shared-libraries" ]
+ }
+
+ if (defined(invoker.apk_path)) {
+ _rebased_apk_path = rebase_path(invoker.apk_path, root_build_dir)
+ _incremental_allowed =
+ defined(invoker.incremental_allowed) && invoker.incremental_allowed
+ args += [ "--apk-path=$_rebased_apk_path" ]
+ if (_incremental_allowed) {
+ _rebased_incremental_apk_path =
+ rebase_path(invoker.incremental_apk_path, root_build_dir)
+ _rebased_incremental_install_json_path =
+ rebase_path(invoker.incremental_install_json_path, root_build_dir)
+ args += [
+ "--incremental-install-json-path=$_rebased_incremental_install_json_path",
+ "--incremental-apk-path=$_rebased_incremental_apk_path",
+ ]
+ }
+ }
+
+ if (defined(invoker.java_sources_file)) {
+ args += [
+ "--java-sources-file",
+ rebase_path(invoker.java_sources_file, root_build_dir),
+ ]
+ }
+ if (defined(invoker.srcjar)) {
+ args += [
+ "--srcjar",
+ rebase_path(invoker.srcjar, root_build_dir),
+ ]
+ }
+ if (defined(invoker.bundled_srcjars)) {
+ _rebased_bundled_srcjars =
+ rebase_path(invoker.bundled_srcjars, root_build_dir)
+ args += [ "--bundled-srcjars=$_rebased_bundled_srcjars" ]
+ }
+ if (defined(invoker.classpath_deps)) {
+ _classpath_deps_configs = []
+ foreach(d, invoker.classpath_deps) {
+ _target_label = get_label_info(d, "label_no_toolchain")
+ deps += [ "$_target_label$build_config_target_suffix" ]
+ _dep_gen_dir = get_label_info(d, "target_gen_dir")
+ _dep_name = get_label_info(d, "name")
+ _classpath_deps_configs += [ "$_dep_gen_dir/$_dep_name.build_config" ]
+ }
+ _rebased_classpath_deps_configs =
+ rebase_path(_classpath_deps_configs, root_build_dir)
+ args += [ "--classpath-deps-configs=$_rebased_classpath_deps_configs" ]
+ }
+ if (defined(invoker.input_jars_paths)) {
+ _rebased_input_jars_paths =
+ rebase_path(invoker.input_jars_paths, root_build_dir)
+ args += [ "--extra-classpath-jars=$_rebased_input_jars_paths" ]
+ }
+ if (defined(invoker.proguard_enabled) && invoker.proguard_enabled) {
+ args += [ "--proguard-enabled" ]
+ }
+ if (defined(invoker.proguard_mapping_path)) {
+ _rebased_proguard_mapping_path =
+ rebase_path(invoker.proguard_mapping_path, root_build_dir)
+ args += [ "--proguard-mapping-path=$_rebased_proguard_mapping_path" ]
+ }
+ if (defined(invoker.proguard_configs)) {
+ _rebased_proguard_configs =
+ rebase_path(invoker.proguard_configs, root_build_dir)
+ args += [ "--proguard-configs=$_rebased_proguard_configs" ]
+ }
+ if (defined(invoker.static_library_dependent_targets)) {
+ _dependent_configs = []
+ foreach(d, invoker.static_library_dependent_targets) {
+ _target_label = get_label_info(d, "label_no_toolchain")
+ deps += [ "$_target_label$build_config_target_suffix" ]
+ _dep_gen_dir = get_label_info(d, "target_gen_dir")
+ _dep_name = get_label_info(d, "name")
+ _dependent_configs += [ "$_dep_gen_dir/$_dep_name.build_config" ]
+ }
+ _rebased_depdent_configs = rebase_path(_dependent_configs, root_build_dir)
+ args += [ "--static-library-dependent-configs=$_rebased_depdent_configs" ]
+ }
+ if (defined(invoker.gradle_treat_as_prebuilt) &&
+ invoker.gradle_treat_as_prebuilt) {
+ args += [ "--gradle-treat-as-prebuilt" ]
+ }
+ if (defined(invoker.main_class)) {
+ args += [
+ "--main-class",
+ invoker.main_class,
+ ]
+ }
+ if (defined(invoker.base_module_target)) {
+ _target_label =
+ get_label_info(invoker.base_module_target, "label_no_toolchain")
+ _dep_gen_dir = get_label_info(_target_label, "target_gen_dir")
+ _dep_name = get_label_info(_target_label, "name")
+ deps += [ "$_target_label$build_config_target_suffix" ]
+ args += [
+ "--base-module-build-config",
+ rebase_path("$_dep_gen_dir/$_dep_name.build_config", root_build_dir),
+ ]
+ }
+ if (current_toolchain != default_toolchain) {
+ # This has to be a built-time error rather than a GN assert because many
+ # packages have a mix of java and non-java targets. For example, the
+ # following would fail even though nothing depends on :bar(//baz):
+ #
+ # shared_library("foo") {
+ # }
+ #
+ # android_library("bar") {
+ # deps = [ ":foo(//baz)" ]
+ # assert(current_toolchain == default_toolchain)
+ # }
+ _msg = [
+ "Tried to build an Android target in a non-default toolchain.",
+ "target: " + get_label_info(":$target_name", "label_with_toolchain"),
+ "default_toolchain: $default_toolchain",
+ ]
+ args += [ "--fail=$_msg" ]
+ }
+ }
+}
+
+# Copy a list of file into a destination directory. Potentially renaming
+# files are they are copied. This also ensures that symlinks are followed
+# during the copy (i.e. the symlinks are never copied, only their content).
+#
+# Variables:
+# dest: Destination directory path.
+# sources: List of source files or directories to copy to dest.
+# renaming_sources: Optional list of source file paths that will be renamed
+# during the copy operation. If provided, renaming_destinations is required.
+# renaming_destinations: Optional list of destination file paths, required
+# when renaming_sources is provided. Both lists should have the same size
+# and matching entries.
+# args: Optional. Additionnal arguments to the copy_ex.py script.
+#
+# The following variables have the usual GN meaning: data, deps, inputs,
+# outputs, testonly, visibility.
+#
+template("copy_ex") {
+ set_sources_assignment_filter([])
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "data",
+ "deps",
+ "outputs",
+ "testonly",
+ "visibility",
+ ])
+ sources = []
+ if (defined(invoker.sources)) {
+ sources += invoker.sources
+ }
+ if (defined(invoker.inputs)) {
+ inputs = invoker.inputs
+ }
+
+ script = "//build/android/gyp/copy_ex.py"
+
+ args = [
+ "--dest",
+ rebase_path(invoker.dest, root_build_dir),
+ ]
+ rebased_sources = rebase_path(sources, root_build_dir)
+ args += [ "--files=$rebased_sources" ]
+
+ if (defined(invoker.args)) {
+ args += invoker.args
+ }
+
+ if (defined(invoker.renaming_sources) &&
+ defined(invoker.renaming_destinations)) {
+ sources += invoker.renaming_sources
+ rebased_renaming_sources =
+ rebase_path(invoker.renaming_sources, root_build_dir)
+ args += [ "--renaming-sources=$rebased_renaming_sources" ]
+
+ renaming_destinations = invoker.renaming_destinations
+ args += [ "--renaming-destinations=$renaming_destinations" ]
+ }
+ }
+}
+
+template("generate_android_wrapper") {
+ generate_wrapper(target_name) {
+ forward_variables_from(invoker, "*")
+ generator_script = "//build/android/gyp/generate_android_wrapper.py"
+ sources = [
+ "//build/android/gyp/util/build_utils.py",
+ "//build/gn_helpers.py",
+ "//build/util/generate_wrapper.py",
+ ]
+ }
+}
+
+# Generates a script in the build bin directory which runs the test
+# target using the test runner script in build/android/test_runner.py.
+template("test_runner_script") {
+ testonly = true
+ _test_name = invoker.test_name
+ _test_type = invoker.test_type
+ _incremental_install =
+ defined(invoker.incremental_install) && invoker.incremental_install
+
+ _runtime_deps =
+ !defined(invoker.ignore_all_data_deps) || !invoker.ignore_all_data_deps
+
+ if (_runtime_deps) {
+ # This runtime_deps file is used at runtime and thus cannot go in
+ # target_gen_dir.
+ _target_dir_name = get_label_info(":$target_name", "dir")
+ _runtime_deps_file =
+ "$root_out_dir/gen.runtime/$_target_dir_name/$target_name.runtime_deps"
+ _runtime_deps_target = "${target_name}__write_deps"
+ group(_runtime_deps_target) {
+ forward_variables_from(invoker,
+ [
+ "data",
+ "deps",
+ "public_deps",
+ ])
+ data_deps = []
+ if (defined(invoker.data_deps)) {
+ data_deps += invoker.data_deps
+ }
+ if (defined(invoker.additional_apks)) {
+ data_deps += invoker.additional_apks
+ }
+ write_runtime_deps = _runtime_deps_file
+ }
+ }
+
+ generate_android_wrapper(target_name) {
+ forward_variables_from(invoker,
+ [
+ "data_deps",
+ "deps",
+ ])
+ if (!defined(deps)) {
+ deps = []
+ }
+
+ if (!defined(data_deps)) {
+ data_deps = []
+ }
+
+ if (defined(android_test_runner_script)) {
+ executable = android_test_runner_script
+ } else {
+ executable = "//build/android/test_runner.py"
+ }
+ testonly = true
+
+ data_deps += [
+ "//build/android:test_runner_py",
+ "//build/android:logdog_wrapper_py",
+ ]
+
+ data = []
+
+ executable_args = [
+ _test_type,
+ "--output-directory",
+ "@WrappedPath(.)",
+ ]
+
+ if (_runtime_deps) {
+ deps += [ ":$_runtime_deps_target" ]
+ data += [ _runtime_deps_file ]
+ _rebased_runtime_deps_file =
+ rebase_path(_runtime_deps_file, root_build_dir)
+ executable_args += [
+ "--runtime-deps-path",
+ "@WrappedPath(${_rebased_runtime_deps_file})",
+ ]
+ }
+
+ # apk_target is not used for native executable tests
+ # (e.g. breakpad_unittests).
+ if (defined(invoker.apk_target)) {
+ assert(!defined(invoker.executable_dist_dir))
+ deps += [ "${invoker.apk_target}$build_config_target_suffix" ]
+ _apk_build_config =
+ get_label_info(invoker.apk_target, "target_gen_dir") + "/" +
+ get_label_info(invoker.apk_target, "name") + ".build_config"
+ _rebased_apk_build_config = rebase_path(_apk_build_config, root_build_dir)
+ assert(_rebased_apk_build_config != "") # Mark as used.
+ } else if (_test_type == "gtest") {
+ assert(
+ defined(invoker.executable_dist_dir),
+ "Must define either apk_target or executable_dist_dir for test_runner_script()")
+ _rebased_executable_dist_dir =
+ rebase_path(invoker.executable_dist_dir, root_build_dir)
+ executable_args += [
+ "--executable-dist-dir",
+ "@WrappedPath(${_rebased_executable_dist_dir})",
+ ]
+ }
+
+ _device_test = true
+ if (_test_type == "gtest") {
+ assert(defined(invoker.test_suite))
+ executable_args += [
+ "--suite",
+ invoker.test_suite,
+ ]
+ } else if (_test_type == "instrumentation") {
+ _test_apk = "@WrappedPath(@FileArg($_rebased_apk_build_config:deps_info:apk_path))"
+ if (_incremental_install) {
+ _test_apk = "@WrappedPath(@FileArg($_rebased_apk_build_config:deps_info:incremental_apk_path))"
+ }
+ _rebased_test_jar = rebase_path(invoker.test_jar, root_build_dir)
+ executable_args += [
+ "--test-apk",
+ _test_apk,
+ "--test-jar",
+ "@WrappedPath(${_rebased_test_jar})",
+ ]
+ if (defined(invoker.apk_under_test)) {
+ deps += [ "${invoker.apk_under_test}$build_config_target_suffix" ]
+ _apk_under_test_build_config =
+ get_label_info(invoker.apk_under_test, "target_gen_dir") + "/" +
+ get_label_info(invoker.apk_under_test, "name") + ".build_config"
+ _rebased_apk_under_test_build_config =
+ rebase_path(_apk_under_test_build_config, root_build_dir)
+ _apk_under_test = "@WrappedPath(@FileArg($_rebased_apk_under_test_build_config:deps_info:apk_path))"
+ if (_incremental_install) {
+ _apk_under_test = "@WrappedPath(@FileArg($_rebased_apk_under_test_build_config:deps_info:incremental_apk_path))"
+ }
+ executable_args += [
+ "--apk-under-test",
+ _apk_under_test,
+ ]
+ }
+ if (defined(invoker.proguard_enabled) && invoker.proguard_enabled) {
+ executable_args += [ "--enable-java-deobfuscation" ]
+ }
+ if (emma_coverage) {
+ # Set a default coverage output directory (can be overridden by user
+ # passing the same flag).
+ _rebased_coverage_dir =
+ rebase_path("$root_out_dir/coverage", root_build_dir)
+ executable_args += [
+ "--coverage-dir",
+ "@WrappedPath(${_rebased_coverage_dir})",
+ ]
+ }
+ } else if (_test_type == "junit") {
+ assert(defined(invoker.test_suite))
+ _device_test = false
+ executable_args += [
+ "--test-suite",
+ invoker.test_suite,
+ ]
+ if (defined(invoker.android_manifest_path)) {
+ _rebased_android_manifest_path =
+ rebase_path(invoker.android_manifest_path, root_build_dir)
+ executable_args += [
+ "--android-manifest-path",
+ "@WrappedPath(${_rebased_android_manifest_path})",
+ ]
+ } else if (defined(invoker.package_name)) {
+ executable_args += [
+ "--package-name",
+ invoker.package_name,
+ ]
+ } else {
+ assert(false, "Must specify a package_name or android_manifest_path")
+ }
+
+ deps += [ ":${invoker.test_suite}$build_config_target_suffix" ]
+ _junit_binary_build_config =
+ "${target_gen_dir}/${invoker.test_suite}.build_config"
+ _rebased_build_config =
+ rebase_path("$_junit_binary_build_config", root_build_dir)
+ executable_args += [ "@WrappedPathList(--resource-zip, @FileArg($_rebased_build_config:resources:dependency_zips))" ]
+
+ _rebased_robolectric_runtime_deps_dir =
+ rebase_path("$root_build_dir/lib.java/third_party/robolectric",
+ root_build_dir)
+ executable_args += [
+ "--robolectric-runtime-deps-dir",
+ "@WrappedPath(${_rebased_robolectric_runtime_deps_dir})",
+ ]
+ } else if (_test_type == "linker") {
+ executable_args += [
+ "--test-apk",
+ "@WrappedPath(@FileArg($_rebased_apk_build_config:deps_info:apk_path))",
+ ]
+ } else {
+ assert(false, "Invalid test type: $_test_type.")
+ }
+
+ if (defined(invoker.additional_apks)) {
+ foreach(additional_apk, invoker.additional_apks) {
+ deps += [ "$additional_apk$build_config_target_suffix" ]
+ _build_config = get_label_info(additional_apk, "target_gen_dir") + "/" +
+ get_label_info(additional_apk, "name") + ".build_config"
+ _rebased_build_config = rebase_path(_build_config, root_build_dir)
+ executable_args += [
+ "--additional-apk",
+ "@WrappedPath(@FileArg($_rebased_build_config:deps_info:apk_path))",
+ "--additional-apk-incremental",
+ "@WrappedPath(@FileArg($_rebased_build_config:deps_info:incremental_apk_path))",
+ ]
+ }
+ }
+ if (defined(invoker.shard_timeout)) {
+ executable_args += [ "--shard-timeout=${invoker.shard_timeout}" ]
+ }
+ if (_incremental_install) {
+ executable_args += [
+ "--test-apk-incremental-install-json",
+ "@WrappedPath(@FileArg($_rebased_apk_build_config:deps_info:incremental_install_json_path))",
+ ]
+ if (defined(invoker.apk_under_test)) {
+ executable_args += [
+ "--apk-under-test-incremental-install-json",
+ "@WrappedPath(@FileArg($_rebased_apk_under_test_build_config:deps_info:incremental_install_json_path))",
+ ]
+ }
+ executable_args += [ "--fast-local-dev" ]
+ }
+ if (_device_test && is_asan) {
+ executable_args += [ "--tool=asan" ]
+ }
+
+ if (defined(invoker.generated_script)) {
+ assert(_test_name != "" || true) # Mark _test_name as used.
+ wrapper_script = invoker.generated_script
+ } else {
+ wrapper_script = "$root_build_dir/bin/run_${_test_name}"
+ }
+ }
+}
+
+template("stack_script") {
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "data_deps",
+ "deps",
+ "testonly",
+ ])
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (!defined(data_deps)) {
+ data_deps = []
+ }
+
+ data_deps +=
+ [ "//third_party/android_platform/development/scripts:stack_py" ]
+
+ script = "//build/android/gyp/create_stack_script.py"
+
+ _stack_target_name = invoker.stack_target_name
+ _stack_script = "//third_party/android_platform/development/scripts/stack"
+
+ _generated_script = "$root_build_dir/bin/stack_${_stack_target_name}"
+
+ outputs = [
+ _generated_script,
+ ]
+ data = [
+ _generated_script,
+ ]
+
+ args = [
+ "--output-directory",
+ rebase_path(root_build_dir, root_build_dir),
+ "--script-path",
+ rebase_path(_stack_script, root_build_dir),
+ "--script-output-path",
+ rebase_path(_generated_script, root_build_dir),
+ "--arch=$target_cpu",
+ ]
+ if (defined(invoker.packed_libraries)) {
+ args += [
+ "--packed-libs",
+ invoker.packed_libraries,
+ ]
+ }
+ }
+}
+
+if (enable_java_templates) {
+ android_sdk_jar = "$android_sdk/android.jar"
+ android_default_aapt_path = "$android_sdk_build_tools/aapt"
+
+ template("android_lint") {
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "data_deps",
+ "public_deps",
+ "testonly",
+ ])
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (defined(invoker.srcjar_deps)) {
+ deps += invoker.srcjar_deps
+ }
+
+ if (defined(invoker.lint_suppressions_file)) {
+ lint_suppressions_file = invoker.lint_suppressions_file
+ } else if (!defined(lint_suppressions_file)) {
+ lint_suppressions_file = "//build/android/lint/suppressions.xml"
+ }
+
+ _lint_path = "$lint_android_sdk_root/tools-lint/bin/lint"
+ _cache_dir = "$root_build_dir/android_lint_cache"
+ _result_path = "$target_gen_dir/$target_name/result.xml"
+ _config_path = "$target_gen_dir/$target_name/config.xml"
+ _suppressions_file = lint_suppressions_file
+ _platform_xml_path =
+ "${android_sdk_root}/platform-tools/api/api-versions.xml"
+
+ script = "//build/android/gyp/lint.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ inputs = [
+ _platform_xml_path,
+ _suppressions_file,
+ ]
+
+ outputs = [
+ _result_path,
+ _config_path,
+ ]
+
+ args = [
+ "--lint-path",
+ rebase_path(_lint_path, root_build_dir),
+ "--cache-dir",
+ rebase_path(_cache_dir, root_build_dir),
+ "--platform-xml-path",
+ rebase_path(_platform_xml_path, root_build_dir),
+ "--android-sdk-version=${lint_android_sdk_version}",
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--config-path",
+ rebase_path(_suppressions_file, root_build_dir),
+ "--product-dir=.",
+ "--processed-config-path",
+ rebase_path(_config_path, root_build_dir),
+ "--result-path",
+ rebase_path(_result_path, root_build_dir),
+ "--include-unexpected-failures",
+ ]
+ if (defined(invoker.android_manifest)) {
+ inputs += [ invoker.android_manifest ]
+ args += [
+ "--manifest-path",
+ rebase_path(invoker.android_manifest, root_build_dir),
+ ]
+ }
+
+ if (defined(invoker.disable)) {
+ args += [ "--disable=${invoker.disable}" ]
+ }
+
+ if (defined(invoker.create_cache) && invoker.create_cache) {
+ args += [
+ "--create-cache",
+ "--silent",
+ ]
+ } else {
+ inputs += invoker.java_files
+ inputs += [ invoker.build_config ]
+ if (invoker.java_files != []) {
+ inputs += [ invoker.java_sources_file ]
+ _rebased_java_sources_file =
+ rebase_path(invoker.java_sources_file, root_build_dir)
+ args += [ "--java-sources-file=$_rebased_java_sources_file" ]
+ }
+ deps += [ "//build/android:prepare_android_lint_cache" ]
+
+ _rebased_build_config =
+ rebase_path(invoker.build_config, root_build_dir)
+ args += [
+ "--srcjars=@FileArg($_rebased_build_config:gradle:bundled_srcjars)",
+ "--can-fail-build",
+ ]
+ if (invoker.requires_android) {
+ args += [
+ "--resource-sources=@FileArg($_rebased_build_config:deps_info:owned_resources_dirs)",
+ "--resource-sources=@FileArg($_rebased_build_config:deps_info:owned_resources_zips)",
+ ]
+ }
+ }
+ }
+ }
+
+ template("proguard") {
+ action_with_pydeps(target_name) {
+ set_sources_assignment_filter([])
+ forward_variables_from(invoker,
+ [
+ "data",
+ "data_deps",
+ "deps",
+ "public_deps",
+ "testonly",
+ ])
+ script = "//build/android/gyp/proguard.py"
+
+ # http://crbug.com/725224. Fix for bots running out of memory.
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ _output_path = invoker.output_path
+ inputs = [
+ invoker.build_config,
+ ]
+ if (defined(invoker.inputs)) {
+ inputs += invoker.inputs
+ }
+ _mapping_path = "$_output_path.mapping"
+ if (defined(invoker.proguard_mapping_path)) {
+ _mapping_path = invoker.proguard_mapping_path
+ }
+ depfile = "${target_gen_dir}/${target_name}.d"
+ outputs = [
+ _output_path,
+ _mapping_path,
+ ]
+ _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--output-path",
+ rebase_path(_output_path, root_build_dir),
+ "--mapping-output",
+ rebase_path(_mapping_path, root_build_dir),
+ "--classpath",
+ "@FileArg($_rebased_build_config:deps_info:proguard_classpath_jars)",
+ "--classpath",
+ "@FileArg($_rebased_build_config:android:sdk_jars)",
+ ]
+
+ if (defined(invoker.config_output_path)) {
+ _config_output_path = invoker.config_output_path
+ outputs += [ _config_output_path ]
+ args += [
+ "--output-config",
+ rebase_path(_config_output_path, root_build_dir),
+ ]
+
+ if (defined(invoker.proguard_expectations_file)) {
+ _expected_configs_file = invoker.proguard_expectations_file
+ inputs += [ _expected_configs_file ]
+ args += [
+ "--expected-configs-file",
+ rebase_path(_expected_configs_file, root_build_dir),
+ ]
+ if (check_android_configuration) {
+ args += [ "--verify-expected-configs" ]
+ }
+ }
+ }
+
+ if (!defined(invoker.proguard_jar_path) || use_r8) {
+ if (defined(invoker.proguard_jar_path)) {
+ not_needed(invoker, [ "proguard_jar_path" ])
+ }
+ args += [
+ "--r8-path",
+ rebase_path(_r8_path, root_build_dir),
+ ]
+ inputs += [ _r8_path ]
+ } else {
+ _proguard_jar_path = invoker.proguard_jar_path
+ args += [
+ "--proguard-path",
+ rebase_path(_proguard_jar_path, root_build_dir),
+ ]
+ inputs += [ _proguard_jar_path ]
+ }
+
+ if (defined(invoker.args)) {
+ args += invoker.args
+ }
+ }
+ }
+
+ # Generates a script in the build bin directory to run a java binary.
+ #
+ # Variables
+ # main_class: The class containing the program entry point.
+ # build_config: Path to .build_config for the jar (contains classpath).
+ # jar_path: Optional. First classpath entry to be inserted before
+ # the classpath extracted from the build_config.
+ # script_name: Name of the script to generate.
+ # wrapper_script_args: List of extra arguments to pass to the executable.
+ # bootclasspath: Optional. list of zip/jar file paths to add to the boot
+ # class path when the script will invoke javac.
+ #
+ template("java_binary_script") {
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "testonly",
+ ])
+
+ _main_class = invoker.main_class
+ _build_config = invoker.build_config
+ _script_name = invoker.script_name
+
+ script = "//build/android/gyp/create_java_binary_script.py"
+ inputs = [
+ _build_config,
+ ]
+ _java_script = "$root_build_dir/bin/$_script_name"
+ outputs = [
+ _java_script,
+ ]
+ _rebased_build_config = rebase_path(_build_config, root_build_dir)
+ args = [
+ "--output",
+ rebase_path(_java_script, root_build_dir),
+ "--main-class",
+ _main_class,
+ ]
+ if (defined(invoker.jar_path)) {
+ _jar_path_list = [ rebase_path(invoker.jar_path, root_build_dir) ]
+ args += [ "--classpath=$_jar_path_list" ]
+ }
+ args += [ "--classpath=@FileArg($_rebased_build_config:deps_info:java_runtime_classpath)" ]
+
+ if (emma_coverage) {
+ args += [
+ "--classpath",
+ rebase_path("//third_party/android_sdk/public/tools/lib/emma.jar",
+ root_build_dir),
+ "--noverify",
+ ]
+ }
+ if (defined(invoker.wrapper_script_args)) {
+ args += [ "--" ] + invoker.wrapper_script_args
+ }
+ if (defined(invoker.bootclasspath)) {
+ args += [
+ "--bootclasspath",
+ rebase_path(invoker.bootclasspath, root_build_dir),
+ ]
+ }
+ }
+ }
+
+ template("dex") {
+ assert(defined(invoker.output))
+
+ _proguard_enabled =
+ defined(invoker.proguard_enabled) && invoker.proguard_enabled
+ _proguarding_with_r8 =
+ _proguard_enabled && (!defined(invoker.proguard_jar_path) || use_r8)
+ _enable_multidex =
+ !defined(invoker.enable_multidex) || invoker.enable_multidex
+ _enable_main_dex_list =
+ _enable_multidex &&
+ (!defined(invoker.min_sdk_version) || invoker.min_sdk_version < 21)
+ if (!_enable_main_dex_list) {
+ if (defined(invoker.negative_main_dex_globs)) {
+ not_needed(invoker, [ "negative_main_dex_globs" ])
+ }
+ }
+
+ assert(!(defined(invoker.input_jars) && _proguard_enabled),
+ "input_jars can't be specified when proguarding a dex.")
+
+ if (_enable_main_dex_list) {
+ _main_dex_rules = "//build/android/main_dex_classes.flags"
+ }
+
+ if (!_proguarding_with_r8) {
+ _dexing_jars = []
+ if (defined(invoker.input_jars)) {
+ _dexing_jars += invoker.input_jars
+ }
+ }
+
+ if (_proguard_enabled) {
+ if (_proguarding_with_r8) {
+ _proguard_output_path = invoker.output
+ _proguard_target_name = target_name
+ _proguard_config_output_path = "$_proguard_output_path.proguard_flags"
+ } else {
+ _proguard_output_path = invoker.output + ".proguard.jar"
+ _proguard_target_name = "${target_name}__proguard"
+ _dexing_jars += [ _proguard_output_path ]
+ }
+
+ proguard(_proguard_target_name) {
+ forward_variables_from(invoker,
+ [
+ "build_config",
+ "deps",
+ "proguard_expectations_file",
+ "proguard_jar_path",
+ "proguard_mapping_path",
+ "testonly",
+ ])
+ inputs = []
+ if (defined(invoker.inputs)) {
+ inputs += invoker.inputs
+ }
+ if (defined(invoker.proguard_configs)) {
+ inputs += invoker.proguard_configs
+ }
+
+ _rebased_build_config = rebase_path(build_config, root_build_dir)
+ args = [
+ "--proguard-configs=@FileArg($_rebased_build_config:deps_info:proguard_all_configs)",
+ "--input-paths=@FileArg($_rebased_build_config:deps_info:java_runtime_classpath)",
+ ]
+ if (defined(invoker.proguard_config_exclusions)) {
+ _rebased_proguard_config_exclusions =
+ rebase_path(invoker.proguard_config_exclusions, root_build_dir)
+ args += [
+ "--proguard-config-exclusions=$_rebased_proguard_config_exclusions",
+ ]
+ }
+ if (defined(invoker.proguard_args)) {
+ args += invoker.proguard_args
+ }
+
+ if (defined(invoker.repackage_classes)) {
+ args += [
+ "--repackage-classes",
+ "${invoker.repackage_classes}",
+ ]
+ }
+
+ if (defined(invoker.min_sdk_version)) {
+ args += [
+ "--min-api",
+ "${invoker.min_sdk_version}",
+ ]
+ }
+
+ if (_enable_multidex && _proguarding_with_r8) {
+ if (_enable_main_dex_list) {
+ if (defined(invoker.extra_main_dex_proguard_config)) {
+ args += [
+ "--main-dex-rules-path",
+ rebase_path(invoker.extra_main_dex_proguard_config,
+ root_build_dir),
+ ]
+ inputs += [ invoker.extra_main_dex_proguard_config ]
+ }
+ args += [
+ "--main-dex-rules-path",
+ rebase_path(_main_dex_rules, root_build_dir),
+ ]
+ inputs += [ _main_dex_rules ]
+ } else {
+ if (defined(invoker.extra_main_dex_proguard_config)) {
+ not_needed(invoker, [ "extra_main_dex_proguard_config" ])
+ }
+ }
+ }
+
+ output_path = _proguard_output_path
+ if (_proguarding_with_r8) {
+ config_output_path = _proguard_config_output_path
+ }
+ }
+ }
+
+ if (!_proguarding_with_r8) {
+ if (_enable_main_dex_list) {
+ _main_dex_list_path = invoker.output + ".main_dex_list"
+ _main_dex_list_target_name = "${target_name}__main_dex_list"
+ action_with_pydeps(_main_dex_list_target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "testonly",
+ ])
+
+ script = "//build/android/gyp/main_dex_list.py"
+ depfile = "$target_gen_dir/$target_name.d"
+
+ # http://crbug.com/725224. Fix for bots running out of memory.
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ if (defined(invoker.proguard_jar_path)) {
+ _proguard_jar_path = invoker.proguard_jar_path
+ } else {
+ _proguard_jar_path = _default_proguard_jar_path
+ }
+
+ _shrinked_android = "$android_sdk_build_tools/lib/shrinkedAndroid.jar"
+ _dx = "$android_sdk_build_tools/lib/dx.jar"
+ inputs = [
+ _main_dex_rules,
+ _dx,
+ _proguard_jar_path,
+ _shrinked_android,
+ ]
+
+ outputs = [
+ _main_dex_list_path,
+ ]
+
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--dx-path",
+ rebase_path(_dx, root_build_dir),
+ "--shrinked-android-path",
+ rebase_path(_shrinked_android, root_build_dir),
+ "--main-dex-list-path",
+ rebase_path(_main_dex_list_path, root_build_dir),
+ "--main-dex-rules-path",
+ rebase_path(_main_dex_rules, root_build_dir),
+ "--proguard-path",
+ rebase_path(_proguard_jar_path, root_build_dir),
+ ]
+
+ if (defined(invoker.extra_main_dex_proguard_config)) {
+ inputs += [ invoker.extra_main_dex_proguard_config ]
+ args += [
+ "--main-dex-rules-path",
+ rebase_path(invoker.extra_main_dex_proguard_config,
+ root_build_dir),
+ ]
+ }
+
+ if (_proguard_enabled) {
+ deps += [ ":${_proguard_target_name}" ]
+ }
+
+ if (defined(invoker.negative_main_dex_globs)) {
+ args += [
+ "--negative-main-dex-globs=${invoker.negative_main_dex_globs}",
+ ]
+ }
+
+ if (defined(invoker.input_jar_classpath)) {
+ inputs += [ invoker.build_config ]
+ args += [ "--inputs=@FileArg(${invoker.input_jar_classpath})" ]
+ }
+
+ inputs += _dexing_jars
+ if (_dexing_jars != []) {
+ args += rebase_path(_dexing_jars, root_build_dir)
+ }
+ }
+ }
+
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "testonly",
+ ])
+ script = "//build/android/gyp/dex.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ inputs = []
+ outputs = [
+ invoker.output,
+ ]
+
+ _rebased_output = rebase_path(invoker.output, root_build_dir)
+
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--dex-path",
+ _rebased_output,
+ ]
+
+ if (_proguard_enabled) {
+ deps += [ ":${_proguard_target_name}" ]
+ }
+
+ if (_enable_multidex) {
+ args += [ "--multi-dex" ]
+ if (_enable_main_dex_list) {
+ args += [
+ "--main-dex-list-path",
+ rebase_path(_main_dex_list_path, root_build_dir),
+ ]
+ deps += [ ":${_main_dex_list_target_name}" ]
+ inputs += [ _main_dex_list_path ]
+ }
+ }
+
+ if (defined(invoker.input_dex_classpath)) {
+ inputs += [ invoker.build_config ]
+ args += [ "--inputs=@FileArg(${invoker.input_dex_classpath})" ]
+ }
+
+ inputs += _dexing_jars
+ if (_dexing_jars != []) {
+ args += rebase_path(_dexing_jars, root_build_dir)
+ }
+
+ if (defined(invoker.dexlayout_profile)) {
+ args += [
+ "--dexlayout-profile",
+ rebase_path(invoker.dexlayout_profile, root_build_dir),
+ "--dexlayout-path",
+ rebase_path(_dexlayout_path, root_build_dir),
+ "--profman-path",
+ rebase_path(_profman_path, root_build_dir),
+ "--dexdump-path",
+ rebase_path(_dexdump_path, root_build_dir),
+ ]
+ inputs += [
+ _dexlayout_path,
+ _profman_path,
+ _dexdump_path,
+ invoker.dexlayout_profile,
+ ]
+ inputs += _default_art_libs
+ if (_proguard_enabled) {
+ args += [
+ "--proguard-mapping-path",
+ rebase_path(invoker.proguard_mapping_path, root_build_dir),
+ ]
+ inputs += [ invoker.proguard_mapping_path ]
+ }
+ }
+
+ if (!is_java_debug) {
+ args += [ "--release" ]
+ }
+
+ if (defined(invoker.min_sdk_version)) {
+ args += [
+ "--min-api",
+ "${invoker.min_sdk_version}",
+ ]
+ }
+
+ _d8_path = "//third_party/r8/lib/d8.jar"
+ inputs += [ _d8_path ]
+ args += [
+ "--d8-jar-path",
+ rebase_path(_d8_path, root_build_dir),
+ ]
+ }
+ }
+ }
+
+ template("emma_instr") {
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "public_deps",
+ "testonly",
+ ])
+
+ _coverage_file = "$target_out_dir/${target_name}.em"
+ _source_dirs_listing_file = "$target_out_dir/${target_name}_sources.txt"
+ _emma_jar = "${android_sdk_root}/tools/lib/emma.jar"
+
+ script = "//build/android/gyp/emma_instr.py"
+ inputs = invoker.java_files + [
+ _emma_jar,
+ invoker.input_jar_path,
+ ]
+ outputs = [
+ _coverage_file,
+ _source_dirs_listing_file,
+ invoker.output_jar_path,
+ ]
+ args = [
+ "instrument_jar",
+ "--input-path",
+ rebase_path(invoker.input_jar_path, root_build_dir),
+ "--output-path",
+ rebase_path(invoker.output_jar_path, root_build_dir),
+ "--coverage-file",
+ rebase_path(_coverage_file, root_build_dir),
+ "--sources-list-file",
+ rebase_path(_source_dirs_listing_file, root_build_dir),
+ "--src-root",
+ rebase_path("//", root_build_dir),
+ "--emma-jar",
+ rebase_path(_emma_jar, root_build_dir),
+ ]
+ _rebased_java_sources_file =
+ rebase_path(invoker.java_sources_file, root_build_dir)
+ args += [ "--java-sources-file=$_rebased_java_sources_file" ]
+
+ if (emma_filter != "") {
+ args += [
+ "--filter-string",
+ emma_filter,
+ ]
+ }
+ }
+ }
+
+ # TODO(digit): Document this!
+ #
+ # Variables:
+ # testonly:
+ # build_config:
+ # input_jar_path:
+ # output_jar_path:
+ # enable_build_hooks:
+ # enable_build_hooks_android:
+ # supports_android:
+ # emma_instrument:
+ # jar_excluded_patterns: Optional list of .class file patterns to exclude
+ # from the final .jar file.
+ # jar_included_patterns: OPtional list of .class file patterns to include
+ # in the final .jar file. jar_excluded_patterns take precedence over this.
+ # strip_resource_classes:
+ # deps:
+ # java_files:
+ # java_sources_file:
+ # inputs:
+ # data_deps:
+ # visibility:
+ #
+ template("process_java_prebuilt") {
+ set_sources_assignment_filter([])
+ forward_variables_from(invoker, [ "testonly" ])
+
+ assert(invoker.build_config != "")
+ _build_config = invoker.build_config
+ _rebased_build_config = rebase_path(_build_config, root_build_dir)
+ assert(_rebased_build_config != "" || true) # Mark used.
+
+ _input_jar_path = invoker.input_jar_path
+ _output_jar_path = invoker.output_jar_path
+
+ _enable_assert =
+ defined(invoker.enable_build_hooks) && invoker.enable_build_hooks &&
+ (is_java_debug || dcheck_always_on || report_java_assert)
+
+ _enable_custom_resources = defined(invoker.enable_build_hooks_android) &&
+ invoker.enable_build_hooks_android
+
+ # Turned off because of existing code which fails the assertion
+ _enable_thread_annotations = false
+
+ _desugar = defined(invoker.supports_android) && invoker.supports_android
+ _emma_instrument = invoker.emma_instrument
+ _enable_split_compat = defined(invoker.split_compat_class_names)
+ _enable_bytecode_rewriter =
+ _enable_assert || _enable_custom_resources ||
+ _enable_thread_annotations || _enable_split_compat
+ _is_prebuilt = defined(invoker.is_prebuilt) && invoker.is_prebuilt
+ _enable_bytecode_checks = !defined(invoker.enable_bytecode_checks) ||
+ invoker.enable_bytecode_checks
+
+ # Release builds don't have asserts enabled, so they often will not run the
+ # bytecode rewriter. We are okay with having release builds not run the
+ # bytecode checks at all, since the dependency errors can be caught in debug
+ # mode.
+ not_needed([
+ "_is_prebuilt",
+ "_enable_bytecode_checks",
+ ])
+ if (defined(invoker.enable_bytecode_rewriter)) {
+ not_needed([
+ "_enable_assert",
+ "_enable_custom_resources",
+ "_enable_thread_annotations",
+ ])
+ _enable_bytecode_rewriter = invoker.enable_bytecode_rewriter
+ }
+
+ _jar_excluded_patterns = []
+ if (defined(invoker.jar_excluded_patterns)) {
+ _jar_excluded_patterns = invoker.jar_excluded_patterns
+ }
+ _jar_included_patterns = []
+ if (defined(invoker.jar_included_patterns)) {
+ _jar_included_patterns = invoker.jar_included_patterns
+ }
+ _strip_resource_classes = defined(invoker.strip_resource_classes) &&
+ invoker.strip_resource_classes
+ _filter_jar = _jar_excluded_patterns != [] ||
+ _jar_included_patterns != [] || _strip_resource_classes
+
+ _deps = []
+ _previous_output_jar = _input_jar_path
+
+ if (_enable_bytecode_rewriter) {
+ _java_bytecode_rewriter_target = "${target_name}__bytecode_rewrite"
+ _java_bytecode_rewriter_input_jar = _previous_output_jar
+ _java_bytecode_rewriter_output_jar =
+ "$target_out_dir/$target_name-bytecode-rewritten.jar"
+
+ action_with_pydeps(_java_bytecode_rewriter_target) {
+ script = "//build/android/gyp/bytecode_processor.py"
+ _bytecode_rewriter_script =
+ "$root_build_dir/bin/helper/java_bytecode_rewriter"
+ deps = _deps + [ "//build/android/bytecode:java_bytecode_rewriter($default_toolchain)" ]
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ inputs = [
+ _bytecode_rewriter_script,
+ _java_bytecode_rewriter_input_jar,
+ _build_config,
+ ]
+ outputs = [
+ _java_bytecode_rewriter_output_jar,
+ ]
+ args = [
+ "--script",
+ rebase_path(_bytecode_rewriter_script, root_build_dir),
+ "--input-jar",
+ rebase_path(_java_bytecode_rewriter_input_jar, root_build_dir),
+ "--output-jar",
+ rebase_path(_java_bytecode_rewriter_output_jar, root_build_dir),
+ ]
+ if (_is_prebuilt) {
+ args += [ "--is-prebuilt" ]
+ }
+ if (_enable_assert) {
+ args += [ "--enable-assert" ]
+ }
+ if (_enable_custom_resources) {
+ args += [ "--enable-custom-resources" ]
+ }
+ if (_enable_thread_annotations) {
+ args += [ "--enable-thread-annotations" ]
+ }
+ if (_enable_bytecode_checks) {
+ args += [ "--enable-check-class-path" ]
+ }
+ if (_enable_split_compat) {
+ args += [ "--split-compat-class-names" ] +
+ invoker.split_compat_class_names
+ }
+ args += [
+ "--direct-classpath-jars",
+ "@FileArg($_rebased_build_config:javac:classpath)",
+ "--sdk-classpath-jars",
+ "@FileArg($_rebased_build_config:android:sdk_jars)",
+ "--extra-classpath-jars",
+ "@FileArg($_rebased_build_config:deps_info:javac_full_classpath)",
+ ]
+ }
+
+ _deps = []
+ _deps = [ ":$_java_bytecode_rewriter_target" ]
+ _previous_output_jar = _java_bytecode_rewriter_output_jar
+ }
+
+ if (_desugar) {
+ _desugar_target = "${target_name}__desugar"
+ _desugar_input_jar = _previous_output_jar
+ _desugar_output_jar = "$target_out_dir/$target_name-desugar.jar"
+
+ action_with_pydeps(_desugar_target) {
+ script = "//build/android/gyp/desugar.py"
+ deps = _deps
+ depfile = "$target_gen_dir/$target_name.d"
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ _desugar_jar = "//third_party/bazel/desugar/Desugar.jar"
+
+ inputs = [
+ _build_config,
+ _desugar_input_jar,
+ _desugar_jar,
+ ]
+ outputs = [
+ _desugar_output_jar,
+ ]
+ args = [
+ "--desugar-jar",
+ rebase_path(_desugar_jar, root_build_dir),
+ "--input-jar",
+ rebase_path(_desugar_input_jar, root_build_dir),
+ "--output-jar",
+ rebase_path(_desugar_output_jar, root_build_dir),
+
+ # Temporarily using java_full_interface_classpath until classpath validation of targets
+ # is implemented, see http://crbug.com/885273
+ "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)",
+ "--bootclasspath=@FileArg($_rebased_build_config:android:sdk_interface_jars)",
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ ]
+ }
+
+ _deps = []
+ _deps = [ ":$_desugar_target" ]
+ _previous_output_jar = _desugar_output_jar
+ }
+
+ if (_filter_jar) {
+ _filter_target = "${target_name}__filter"
+ _filter_input_jar = _previous_output_jar
+ _filter_output_jar = "$target_out_dir/$target_name-filtered.jar"
+
+ action_with_pydeps(_filter_target) {
+ script = "//build/android/gyp/filter_zip.py"
+ deps = _deps
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ inputs = [
+ _build_config,
+ _filter_input_jar,
+ ]
+ outputs = [
+ _filter_output_jar,
+ ]
+ args = [
+ "--input",
+ rebase_path(_filter_input_jar, root_build_dir),
+ "--output",
+ rebase_path(_filter_output_jar, root_build_dir),
+ "--exclude-globs=$_jar_excluded_patterns",
+ "--include-globs=$_jar_included_patterns",
+ ]
+ if (_strip_resource_classes) {
+ args += [ "--strip-resource-classes-for=@FileArg($_rebased_build_config:javac:resource_packages)" ]
+ }
+ }
+
+ _deps = []
+ _deps = [ ":$_filter_target" ]
+ _previous_output_jar = _filter_output_jar
+ }
+
+ if (_emma_instrument) {
+ # Emma must run after desugar (or else desugar sometimes fails).
+ _emma_target = "${target_name}__emma"
+ _emma_input_jar = _previous_output_jar
+ _emma_output_jar = "$target_out_dir/$target_name-instrumented.jar"
+
+ emma_instr(_emma_target) {
+ deps = _deps
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+
+ forward_variables_from(invoker,
+ [
+ "java_files",
+ "java_sources_file",
+ ])
+
+ input_jar_path = _emma_input_jar
+ output_jar_path = _emma_output_jar
+ }
+
+ _deps = []
+ _deps = [ ":$_emma_target" ]
+ _previous_output_jar = _emma_output_jar
+ }
+
+ _output_jar_target = "${target_name}__copy"
+
+ # This is copy_ex rather than copy to ensure that JARs (rather than
+ # possibly broken symlinks to them) get copied into the output
+ # directory.
+ copy_ex(_output_jar_target) {
+ forward_variables_from(invoker, [ "inputs" ])
+ deps = _deps
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ dest = _output_jar_path
+ sources = [
+ _previous_output_jar,
+ ]
+ outputs = [
+ _output_jar_path,
+ ]
+ }
+
+ group(target_name) {
+ forward_variables_from(invoker,
+ [
+ "data_deps",
+ "visibility",
+ ])
+ public_deps = [
+ ":$_output_jar_target",
+ ]
+ }
+ }
+
+ template("merge_manifests") {
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "testonly",
+ ])
+ script = "//build/android/gyp/merge_manifest.py"
+ depfile = "$target_gen_dir/$target_name.d"
+
+ inputs = [
+ invoker.build_config,
+ invoker.input_manifest,
+ ]
+
+ outputs = [
+ invoker.output_manifest,
+ ]
+ _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
+
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--build-vars",
+ rebase_path(android_build_vars, root_build_dir),
+ "--root-manifest",
+ rebase_path(invoker.input_manifest, root_build_dir),
+ "--output",
+ rebase_path(invoker.output_manifest, root_build_dir),
+ "--extras",
+ "@FileArg($_rebased_build_config:extra_android_manifests)",
+ ]
+
+ if (defined(invoker.expected_manifest)) {
+ inputs += [ invoker.expected_manifest ]
+ _normalized_output = "${invoker.output_manifest}.normalized"
+ outputs += [ _normalized_output ]
+ args += [
+ "--expected-manifest",
+ rebase_path(invoker.expected_manifest, root_build_dir),
+ "--normalized-output",
+ rebase_path(_normalized_output, root_build_dir),
+ ]
+ if (check_android_configuration) {
+ args += [ "--verify-expected-manifest" ]
+ }
+ }
+ }
+ }
+
+ # This template is used to parse a set of resource directories and
+ # create the R.txt, .srcjar and .resources.zip for it.
+ #
+ # Input variables:
+ # deps: Specifies the input dependencies for this target.
+ #
+ # build_config: Path to the .build_config file corresponding to the target.
+ #
+ # resource_dirs:
+ # List of directories containing Android resources, layout should be
+ # similar to what aapt -S <dir> expects.
+ #
+ # generated_resource_dirs: (optional)
+ # List of directories containing generated resources.
+ #
+ # generated_resource_files: (optional)
+ # If generated_resources_dirs is not empty, must list all the files
+ # within these directories (the directory must appear at the start of
+ # the file path).
+ #
+ # custom_package: (optional)
+ # Package name for the generated R.java source file. Optional if
+ # android_manifest is not provided.
+ #
+ # android_manifest: (optional)
+ # If custom_package is not provided, path to an AndroidManifest.xml file
+ # that is only used to extract a package name out of it.
+ #
+ # r_text_in_path: (optional)
+ # Path to an input R.txt file to use to generate the R.java file.
+ # The default is to use 'aapt' to generate the file from the content
+ # of the resource directories.
+ #
+ # shared_resources: (optional)
+ # If true, generate an R.java file that uses non-final resource ID
+ # variables and an onResourcesLoaded() method.
+ #
+ # v14_skip: (optional)
+ # If true, skip generation of v14 compatible resources.
+ # (see generate_v14_compatible_resources.py for details).
+ #
+ # Output variables:
+ # zip_path: (optional)
+ # Path to a .resources.zip that will simply contain all the
+ # input resources, collected in a single archive.
+ #
+ # r_text_out_path: (optional): Path for the generated R.txt file.
+ #
+ # srcjar_path: (optional) Path to a generated .srcjar containing the
+ # generated R.java source file.
+ #
+ template("prepare_resources") {
+ if (defined(invoker.srcjar_path)) {
+ _srcjar_path = invoker.srcjar_path
+ }
+ action_with_pydeps(target_name) {
+ set_sources_assignment_filter([])
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "testonly",
+ "visibility",
+ ])
+ script = "//build/android/gyp/prepare_resources.py"
+
+ depfile = "$target_gen_dir/${invoker.target_name}.d"
+ outputs = []
+ _all_resource_dirs = []
+ sources = []
+
+ if (defined(invoker.resource_dirs)) {
+ _all_resource_dirs += invoker.resource_dirs
+
+ # Speed up "gn gen" by short-circuiting the empty directory.
+ if (invoker.resource_dirs != [ "//build/android/empty" ] &&
+ invoker.resource_dirs != []) {
+ _sources_build_rel =
+ exec_script("//build/android/gyp/find.py",
+ rebase_path(invoker.resource_dirs, root_build_dir),
+ "list lines")
+ sources += rebase_path(_sources_build_rel, ".", root_build_dir)
+ }
+ }
+
+ if (defined(invoker.generated_resource_dirs)) {
+ assert(defined(invoker.generated_resource_files))
+ _all_resource_dirs += invoker.generated_resource_dirs
+ sources += invoker.generated_resource_files
+ }
+
+ _android_aapt_path = android_default_aapt_path
+
+ inputs = [
+ invoker.build_config,
+ _android_aapt_path,
+ ]
+
+ _rebased_all_resource_dirs =
+ rebase_path(_all_resource_dirs, root_build_dir)
+ _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
+
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--include-resources=@FileArg($_rebased_build_config:android:sdk_jars)",
+ "--aapt-path",
+ rebase_path(_android_aapt_path, root_build_dir),
+ "--dependencies-res-zips=@FileArg($_rebased_build_config:resources:dependency_zips)",
+ "--extra-res-packages=@FileArg($_rebased_build_config:resources:extra_package_names)",
+ "--extra-r-text-files=@FileArg($_rebased_build_config:resources:extra_r_text_files)",
+ ]
+
+ if (defined(invoker.android_manifest)) {
+ if (defined(invoker.android_manifest_dep)) {
+ deps += [ invoker.android_manifest_dep ]
+ }
+ inputs += [ invoker.android_manifest ]
+ args += [
+ "--android-manifest",
+ rebase_path(invoker.android_manifest, root_build_dir),
+ ]
+ }
+
+ if (_rebased_all_resource_dirs != []) {
+ args += [ "--resource-dirs=$_rebased_all_resource_dirs" ]
+ }
+
+ if (defined(invoker.zip_path)) {
+ outputs += [
+ invoker.zip_path,
+ invoker.zip_path + ".info",
+ ]
+ args += [
+ "--resource-zip-out",
+ rebase_path(invoker.zip_path, root_build_dir),
+ ]
+ }
+
+ if (defined(invoker.r_text_out_path)) {
+ outputs += [ invoker.r_text_out_path ]
+ args += [
+ "--r-text-out",
+ rebase_path(invoker.r_text_out_path, root_build_dir),
+ ]
+ }
+
+ if (defined(_srcjar_path)) {
+ outputs += [ _srcjar_path ]
+ args += [
+ "--srcjar-out",
+ rebase_path(_srcjar_path, root_build_dir),
+ ]
+ }
+
+ if (defined(invoker.r_text_in_path)) {
+ _r_text_in_path = invoker.r_text_in_path
+ inputs += [ _r_text_in_path ]
+ args += [
+ "--r-text-in",
+ rebase_path(_r_text_in_path, root_build_dir),
+ ]
+ }
+
+ if (defined(invoker.custom_package)) {
+ args += [
+ "--custom-package",
+ invoker.custom_package,
+ ]
+ }
+
+ if (defined(invoker.strip_drawables) && invoker.strip_drawables) {
+ args += [ "--strip-drawables" ]
+ }
+
+ if (defined(invoker.shared_resources) && invoker.shared_resources) {
+ args += [ "--shared-resources" ]
+ }
+
+ if (defined(invoker.v14_skip) && invoker.v14_skip) {
+ args += [ "--v14-skip" ]
+ }
+ }
+ }
+
+ # A template that is used to compile all resources needed by a binary
+ # (e.g. an android_apk or a junit_binary) into an intermediate .ar_
+ # archive. It can also generate an associated .srcjar that contains the
+ # final R.java sources for all resource packages the binary depends on.
+ #
+ # Input variables:
+ # deps: Specifies the input dependencies for this target.
+ #
+ # build_config: Path to the .build_config file corresponding to the target.
+ #
+ # android_manifest: Path to root manifest for the binary.
+ #
+ # version_code: (optional)
+ #
+ # version_name: (optional)
+ #
+ # shared_resources: (optional)
+ # If true, make all variables in each generated R.java file non-final,
+ # and provide an onResourcesLoaded() method that can be used to reset
+ # their package index at load time. Useful when the APK corresponds to
+ # a library that is loaded at runtime, like system_webview_apk or
+ # monochrome_apk.
+ #
+ # app_as_shared_lib: (optional)
+ # If true, same effect as shared_resources, but also ensures that the
+ # resources can be used by the APK when it is loaded as a regular
+ # application as well. Useful for the monochrome_public_apk target
+ # which is both an application and a shared runtime library that
+ # implements the system webview feature.
+ #
+ # shared_resources_whitelist: (optional)
+ # Path to an R.txt file. If provided, acts similar to shared_resources
+ # except that it restricts the list of non-final resource variables
+ # to the list from the input R.txt file. Overrides shared_resources
+ # when both are specified.
+ #
+ # shared_resources_whitelist_locales: (optional)
+ # If shared_resources_whitelist is used, provide an optional list of
+ # Chromium locale names to determine which localized shared string
+ # resources to put in the final output, even if aapt_locale_whitelist
+ # is defined to a smaller subset.
+ #
+ # support_zh_hk: (optional)
+ # If true, support zh-HK in Chrome on Android by using the resources
+ # from zh-TW. See https://crbug.com/780847.
+ #
+ # aapt_locale_whitelist: (optional)
+ # Restrict compiled locale-dependent resources to a specific whitelist.
+ # NOTE: This is a list of Chromium locale names, not Android ones.
+ #
+ # resource_blacklist_regex: (optional)
+ #
+ # resource_blacklist_exceptions: (optional)
+ #
+ # no_xml_namespaces: (optional)
+ #
+ # png_to_webp: (optional)
+ # If true, convert all PNG resources (except 9-patch files) to WebP.
+ #
+ # post_process_script: (optional)
+ #
+ # proto_format: (optional). If true, compiles resources into protocol
+ # buffer format.
+ #
+ # package_name: (optional)
+ # Name of the package for the purpose of assigning package ID.
+ #
+ # package_name_to_id_mapping: (optional)
+ # List containing mapping from package names to package IDs. It will be
+ # used to determine which package ID to assign if package_name variable
+ # was passed in.
+ #
+ # package_id: (optional)
+ # Use a custom package ID in resource IDs (same purpose as
+ # package_name_to_id_mapping)
+ #
+ # arsc_package_name: (optional)
+ # Use this package name in the arsc file rather than the package name
+ # found in the AndroidManifest.xml. Does not affect the package name
+ # used in AndroidManifest.xml.
+ #
+ # Output variables:
+ # output: Path to a zip file containing the compiled resources.
+ #
+ # r_text_out_path: (optional):
+ # Path for the corresponding generated R.txt file.
+ #
+ # srcjar_path: (optional)
+ # Path to a generated .srcjar containing the generated R.java sources
+ # for all dependent resource libraries.
+ #
+ # proguard_file: (optional)
+ # Path to proguard configuration file for this apk target.
+ #
+ # proguard_file_main_dex: (optional)
+ #
+ #
+ template("compile_resources") {
+ _compile_resources_target_name = target_name
+ _compiled_resources_path = invoker.output
+
+ if (defined(invoker.srcjar_path)) {
+ _srcjar_path = invoker.srcjar_path
+ }
+ if (defined(invoker.post_process_script)) {
+ _compile_resources_target_name = "${target_name}__intermediate"
+ _compiled_resources_path =
+ get_path_info(_compiled_resources_path, "dir") + "/" +
+ get_path_info(_compiled_resources_path, "name") + ".intermediate.ap_"
+ _srcjar_path = "${_srcjar_path}.intermediate.srcjar"
+ }
+
+ _proto_format = defined(invoker.proto_format) && invoker.proto_format
+
+ # NOTE: Regarding the names of the depfiles used by this template:
+ # They all have the same prefix, related to invoker.target_name,
+ # instead of $target_name, so it is important they have different
+ # file paths. Otherwise, extra-rebuilds or even incorrect builds
+ # may happen due to incorrect dependency information. The suffixes
+ # used are:
+ #
+ # _1.d for the unprocessed compiled resources.
+ # _2.d for the optional processed compiled resources.
+ # _3.d for the proto-compiled resources.
+
+ action_with_pydeps(_compile_resources_target_name) {
+ set_sources_assignment_filter([])
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "testonly",
+ "visibility",
+ ])
+ script = "//build/android/gyp/compile_resources.py"
+
+ depfile = "$target_gen_dir/${invoker.target_name}_1.d"
+ outputs = []
+
+ _android_aapt_path = android_default_aapt_path
+ _android_aapt2_path = android_sdk_tools_bundle_aapt2
+ if (_proto_format) {
+ depfile = "$target_gen_dir/${invoker.target_name}_3.d"
+ }
+
+ inputs = [
+ invoker.build_config,
+ _android_aapt_path,
+ _android_aapt2_path,
+ ]
+
+ _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
+
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--include-resources=@FileArg($_rebased_build_config:android:sdk_jars)",
+ "--aapt2-path",
+ rebase_path(_android_aapt2_path, root_build_dir),
+ "--dependencies-res-zips=@FileArg($_rebased_build_config:resources:dependency_zips)",
+ "--extra-res-packages=@FileArg($_rebased_build_config:resources:extra_package_names)",
+ "--extra-r-text-files=@FileArg($_rebased_build_config:resources:extra_r_text_files)",
+ ]
+
+ inputs += [ invoker.android_manifest ]
+ args += [
+ "--android-manifest",
+ rebase_path(invoker.android_manifest, root_build_dir),
+ ]
+
+ if (defined(invoker.no_xml_namespaces) && invoker.no_xml_namespaces) {
+ args += [ "--no-xml-namespaces" ]
+ }
+
+ if (defined(invoker.version_code)) {
+ args += [
+ "--version-code",
+ invoker.version_code,
+ ]
+ }
+ if (defined(invoker.version_name)) {
+ args += [
+ "--version-name",
+ invoker.version_name,
+ ]
+ }
+ if (defined(_compiled_resources_path)) {
+ _info_path = invoker.output + ".info"
+ outputs += [
+ _compiled_resources_path,
+ _info_path,
+ ]
+ args += [
+ "--apk-path",
+ rebase_path(_compiled_resources_path, root_build_dir),
+ "--apk-info-path",
+ rebase_path(_info_path, root_build_dir),
+ ]
+ }
+
+ if (defined(invoker.optimized_resources_path)) {
+ args += [
+ "--optimized-resources-path",
+ rebase_path(invoker.optimized_resources_path, root_build_dir),
+ ]
+ outputs += [ invoker.optimized_resources_path ]
+
+ if (defined(invoker.resources_config_path)) {
+ inputs += [ invoker.resources_config_path ]
+ args += [
+ "--resources-config-path",
+ rebase_path(invoker.resources_config_path, root_build_dir),
+ ]
+ }
+ }
+
+ # Useful to have android:debuggable in the manifest even for Release
+ # builds. Just omit it for officai
+ if (debuggable_apks) {
+ args += [ "--debuggable" ]
+ }
+
+ if (defined(invoker.r_text_out_path)) {
+ outputs += [ invoker.r_text_out_path ]
+ args += [
+ "--r-text-out",
+ rebase_path(invoker.r_text_out_path, root_build_dir),
+ ]
+ }
+
+ if (defined(_srcjar_path)) {
+ outputs += [ _srcjar_path ]
+ args += [
+ "--srcjar-out",
+ rebase_path(_srcjar_path, root_build_dir),
+ ]
+ }
+
+ if (defined(invoker.custom_package)) {
+ args += [
+ "--custom-package",
+ invoker.custom_package,
+ ]
+ }
+
+ if (_proto_format) {
+ args += [ "--proto-format" ]
+ }
+
+ # Define the flags related to shared resources.
+ #
+ # Note the small sanity check to ensure that the package ID of the
+ # generated resources table is correct. It should be 0x02 for runtime
+ # shared libraries, and 0x7f otherwise.
+
+ if (defined(invoker.shared_resources) && invoker.shared_resources) {
+ args += [ "--shared-resources" ]
+ }
+ if (defined(invoker.app_as_shared_lib) && invoker.app_as_shared_lib) {
+ args += [ "--app-as-shared-lib" ]
+ }
+ if (defined(invoker.package_id)) {
+ args += [ "--package-id=${invoker.package_id}" ]
+ }
+ if (defined(invoker.package_name)) {
+ args += [
+ "--package-name=${invoker.package_name}",
+ "--package-name-to-id-mapping=${invoker.package_name_to_id_mapping}",
+ ]
+ }
+ if (defined(invoker.arsc_package_name)) {
+ args += [
+ "--arsc-package-name",
+ invoker.arsc_package_name,
+ ]
+ }
+
+ if (defined(invoker.shared_resources_whitelist)) {
+ inputs += [ invoker.shared_resources_whitelist ]
+ args += [
+ "--shared-resources-whitelist",
+ rebase_path(invoker.shared_resources_whitelist, root_build_dir),
+ ]
+ }
+ if (defined(invoker.shared_resources_whitelist_locales)) {
+ args += [ "--shared-resources-whitelist-locales=" +
+ "${invoker.shared_resources_whitelist_locales}" ]
+ }
+
+ if (defined(invoker.proguard_file)) {
+ outputs += [ invoker.proguard_file ]
+ args += [
+ "--proguard-file",
+ rebase_path(invoker.proguard_file, root_build_dir),
+ ]
+ }
+
+ if (defined(invoker.proguard_file_main_dex)) {
+ outputs += [ invoker.proguard_file_main_dex ]
+ args += [
+ "--proguard-file-main-dex",
+ rebase_path(invoker.proguard_file_main_dex, root_build_dir),
+ ]
+ }
+
+ if (defined(invoker.aapt_locale_whitelist)) {
+ args += [ "--locale-whitelist=${invoker.aapt_locale_whitelist}" ]
+ }
+ if (defined(invoker.png_to_webp) && invoker.png_to_webp) {
+ _webp_target = "//third_party/libwebp:cwebp($host_toolchain)"
+ _webp_binary = get_label_info(_webp_target, "root_out_dir") + "/cwebp"
+ deps += [ _webp_target ]
+ inputs += [ _webp_binary ]
+ args += [
+ "--png-to-webp",
+ "--webp-binary",
+ rebase_path(_webp_binary, root_build_dir),
+ ]
+ }
+ if (defined(invoker.resource_blacklist_regex)) {
+ args +=
+ [ "--resource-blacklist-regex=${invoker.resource_blacklist_regex}" ]
+ if (defined(invoker.resource_blacklist_exceptions)) {
+ args += [ "--resource-blacklist-exceptions=${invoker.resource_blacklist_exceptions}" ]
+ }
+ }
+
+ if (defined(invoker.support_zh_hk) && invoker.support_zh_hk) {
+ args += [ "--support-zh-hk" ]
+ }
+
+ if (defined(invoker.include_resource)) {
+ _rebased_include_resources =
+ rebase_path(invoker.include_resource, root_build_dir)
+ args += [ "--include-resources=$_rebased_include_resources" ]
+ }
+
+ if (defined(invoker.args)) {
+ args += invoker.args
+ }
+
+ if (defined(invoker.emit_ids_out_path)) {
+ outputs += [ invoker.emit_ids_out_path ]
+ _rebased_emit_ids_path =
+ rebase_path(invoker.emit_ids_out_path, root_out_dir)
+ args += [ "--emit-ids-out=$_rebased_emit_ids_path" ]
+ }
+
+ if (defined(invoker.resource_ids_provider_dep)) {
+ _compile_res_dep =
+ "${invoker.resource_ids_provider_dep}__compile_resources"
+ _gen_dir = get_label_info(_compile_res_dep, "target_gen_dir")
+ _name = get_label_info(_compile_res_dep, "name")
+ _resource_ids_path = "$_gen_dir/$_name.resource_ids"
+ inputs += [ _resource_ids_path ]
+ _rebased_ids_path = rebase_path(_resource_ids_path, root_out_dir)
+ args += [ "--use-resource-ids-path=$_rebased_ids_path" ]
+ deps += [ _compile_res_dep ]
+ }
+ }
+
+ if (defined(invoker.post_process_script)) {
+ action(target_name) {
+ depfile = "${target_gen_dir}/${invoker.target_name}_2.d"
+ script = invoker.post_process_script
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--apk-path",
+ rebase_path(_compiled_resources_path, root_build_dir),
+ "--output",
+ rebase_path(invoker.output, root_build_dir),
+ "--srcjar-in",
+ rebase_path(_srcjar_path, root_build_dir),
+ "--srcjar-out",
+ rebase_path(invoker.srcjar_path, root_build_dir),
+ ]
+ if (defined(invoker.shared_resources_whitelist)) {
+ args += [
+ "--r-text-whitelist",
+ rebase_path(invoker.shared_resources_whitelist, root_build_dir),
+ "--r-text",
+ rebase_path(invoker.r_text_out_path, root_build_dir),
+ ]
+ }
+ inputs = [
+ _srcjar_path,
+ _compiled_resources_path,
+ ]
+ if (defined(invoker.post_process_script_inputs)) {
+ inputs += invoker.post_process_script_inputs
+ }
+ outputs = [
+ invoker.output,
+ invoker.srcjar_path,
+ ]
+ public_deps = [
+ ":${_compile_resources_target_name}",
+ ]
+ }
+ }
+ }
+
+ # Create an .jar.info file by merging several .jar.info files into one.
+ #
+ # Variables:
+ # build_config: Path to APK's build config file. Used to extract the
+ # list of input .jar files from its dependencies.
+ # name: Name of the apk or app bundle (e.g. "Foo.apk").
+ # packaged_resources_path: Path to .ap_ file.
+ #
+ template("create_size_info_files") {
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "deps",
+ ])
+ script = "//build/android/gyp/create_size_info_files.py"
+ _jar_info_path = "$root_build_dir/size-info/${invoker.name}.jar.info"
+ _pak_info_path = "$root_build_dir/size-info/${invoker.name}.pak.info"
+ _res_info_path = "$root_build_dir/size-info/${invoker.name}.res.info"
+ outputs = [
+ _jar_info_path,
+ _pak_info_path,
+ _res_info_path,
+ ]
+ depfile = "$target_gen_dir/$target_name.d"
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--jar-info-path",
+ rebase_path(_jar_info_path, root_build_dir),
+ "--pak-info-path",
+ rebase_path(_pak_info_path, root_build_dir),
+ "--res-info-path",
+ rebase_path(_res_info_path, root_build_dir),
+ ]
+ _is_bundle = defined(invoker.module_build_configs)
+ if (_is_bundle) {
+ inputs = invoker.module_build_configs
+ foreach(_build_config, invoker.module_build_configs) {
+ _rebased_build_config = rebase_path(_build_config, root_build_dir)
+ args += [
+ "--jar-files=@FileArg($_rebased_build_config:deps_info:unprocessed_jar_path)",
+ "--jar-files=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)",
+ "--resource-apk=@FileArg($_rebased_build_config:deps_info:proto_resources_path)",
+ "--assets=@FileArg($_rebased_build_config:assets)",
+ "--uncompressed-assets=@FileArg($_rebased_build_config:uncompressed_assets)",
+ ]
+ }
+ } else {
+ inputs = [
+ invoker.build_config,
+ invoker.packaged_resources_path,
+ ]
+ _rebased_build_config =
+ rebase_path(invoker.build_config, root_build_dir)
+ args += [
+ "--jar-files=@FileArg($_rebased_build_config:deps_info:jar_path)",
+ "--jar-files=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)",
+ "--resource-apk",
+ rebase_path(invoker.packaged_resources_path, root_build_dir),
+ "--assets=@FileArg($_rebased_build_config:assets)",
+ "--uncompressed-assets=@FileArg($_rebased_build_config:uncompressed_assets)",
+ ]
+ }
+ }
+ }
+
+ # Creates a signed and aligned .apk.
+ #
+ # Variables
+ # apk_name: (optional) APK name (without .apk suffix). If provided, will
+ # be used to generate .info files later used by the supersize tool.
+ # assets_build_config: Path to android_apk .build_config containing merged
+ # asset information.
+ # deps: Specifies the dependencies of this target.
+ # dex_path: Path to classes.dex file to include (optional).
+ # packaged_resources_path: Path to .ap_ to use.
+ # output_apk_path: Output path for the generated .apk.
+ # native_lib_placeholders: List of placeholder filenames to add to the apk
+ # (optional).
+ # secondary_native_lib_placeholders: List of placeholder filenames to add to
+ # the apk for the secondary ABI (optional).
+ # native_libs: List of native libraries.
+ # native_libs_filearg: @FileArg() of additionally native libraries.
+ # secondary_abi_native_libs: (optional) List of native libraries for
+ # secondary ABI.
+ # secondary_abi_native_libs_filearg: (optional). @FileArg() of additional
+ # secondary ABI native libs.
+ # write_asset_list: Adds an extra file to the assets, which contains a list of
+ # all other asset files.
+ # keystore_path: Path to keystore to use for signing.
+ # keystore_name: Key alias to use.
+ # keystore_password: Keystore password.
+ # uncompress_shared_libraries: (optional, default false) Whether to store
+ # native libraries inside the APK uncompressed and page-aligned.
+ template("package_apk") {
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "public_deps",
+ "testonly",
+ ])
+ _native_lib_placeholders = []
+ if (defined(invoker.native_lib_placeholders)) {
+ _native_lib_placeholders = invoker.native_lib_placeholders
+ }
+ _secondary_native_lib_placeholders = []
+ if (defined(invoker.secondary_native_lib_placeholders)) {
+ _secondary_native_lib_placeholders =
+ invoker.secondary_native_lib_placeholders
+ }
+
+ script = "//build/android/gyp/apkbuilder.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ _apksigner = "$android_sdk_build_tools/apksigner"
+ _zipalign = "$android_sdk_build_tools/zipalign"
+ data_deps = [
+ "//tools/android/md5sum",
+ ] # Used when deploying APKs
+
+ inputs = invoker.native_libs + [
+ invoker.keystore_path,
+ invoker.packaged_resources_path,
+ _apksigner,
+ _zipalign,
+ ]
+ if (defined(invoker.dex_path)) {
+ inputs += [ invoker.dex_path ]
+ }
+
+ outputs = [
+ invoker.output_apk_path,
+ ]
+ data = [
+ invoker.output_apk_path,
+ ]
+
+ _rebased_compiled_resources_path =
+ rebase_path(invoker.packaged_resources_path, root_build_dir)
+ _rebased_packaged_apk_path =
+ rebase_path(invoker.output_apk_path, root_build_dir)
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--resource-apk=$_rebased_compiled_resources_path",
+ "--output-apk=$_rebased_packaged_apk_path",
+ "--apksigner-path",
+ rebase_path(_apksigner, root_build_dir),
+ "--zipalign-path",
+ rebase_path(_zipalign, root_build_dir),
+ "--key-path",
+ rebase_path(invoker.keystore_path, root_build_dir),
+ "--key-name",
+ invoker.keystore_name,
+ "--key-passwd",
+ invoker.keystore_password,
+ ]
+ if (defined(invoker.uncompress_dex) && invoker.uncompress_dex) {
+ args += [ "--uncompress-dex" ]
+ }
+ if (defined(invoker.assets_build_config)) {
+ inputs += [ invoker.assets_build_config ]
+ _rebased_build_config =
+ rebase_path(invoker.assets_build_config, root_build_dir)
+ args += [
+ "--assets=@FileArg($_rebased_build_config:assets)",
+ "--uncompressed-assets=@FileArg($_rebased_build_config:uncompressed_assets)",
+ ]
+
+ # TODO(mlopatkin) We are relying on the fact that assets_build_config is
+ # an APK build_config.
+ args += [ "--java-resources=@FileArg($_rebased_build_config:java_resources_jars)" ]
+ }
+ if (defined(invoker.write_asset_list) && invoker.write_asset_list) {
+ args += [ "--write-asset-list" ]
+ }
+ if (defined(invoker.dex_path)) {
+ _rebased_dex_path = rebase_path(invoker.dex_path, root_build_dir)
+ args += [ "--dex-file=$_rebased_dex_path" ]
+ }
+ if (invoker.native_libs != [] || defined(invoker.native_libs_filearg) ||
+ _native_lib_placeholders != []) {
+ args += [ "--android-abi=$android_app_abi" ]
+ }
+ if (defined(android_app_secondary_abi)) {
+ args += [ "--secondary-android-abi=$android_app_secondary_abi" ]
+ }
+ if (invoker.native_libs != []) {
+ _rebased_native_libs = rebase_path(invoker.native_libs, root_build_dir)
+ args += [ "--native-libs=$_rebased_native_libs" ]
+ }
+ if (defined(invoker.native_libs_filearg)) {
+ args += [ "--native-libs=${invoker.native_libs_filearg}" ]
+ }
+ if (_native_lib_placeholders != []) {
+ args += [ "--native-lib-placeholders=$_native_lib_placeholders" ]
+ }
+ if (_secondary_native_lib_placeholders != []) {
+ args += [ "--secondary-native-lib-placeholders=$_secondary_native_lib_placeholders" ]
+ }
+
+ if (defined(invoker.secondary_abi_native_libs_filearg)) {
+ args += [ "--secondary-native-libs=${invoker.secondary_abi_native_libs_filearg}" ]
+ }
+
+ if (defined(invoker.uncompress_shared_libraries) &&
+ invoker.uncompress_shared_libraries) {
+ args += [ "--uncompress-shared-libraries=True" ]
+ }
+ }
+ }
+
+ # Packages resources, assets, dex, and native libraries into an apk. Signs and
+ # zipaligns the apk.
+ template("create_apk") {
+ set_sources_assignment_filter([])
+ forward_variables_from(invoker, [ "testonly" ])
+
+ _final_apk_path = invoker.apk_path
+
+ if (defined(invoker.dex_path)) {
+ _dex_path = invoker.dex_path
+ }
+ _load_library_from_apk = invoker.load_library_from_apk
+ assert(_load_library_from_apk || true)
+
+ _deps = []
+ if (defined(invoker.deps)) {
+ _deps = invoker.deps
+ }
+ _incremental_deps = []
+ if (defined(invoker.incremental_deps)) {
+ _incremental_deps = invoker.incremental_deps
+ }
+ _native_libs = []
+ if (defined(invoker.native_libs)) {
+ _native_libs = invoker.native_libs
+ }
+ _native_libs_even_when_incremental = []
+ if (defined(invoker.native_libs_even_when_incremental) &&
+ invoker.native_libs_even_when_incremental != []) {
+ _native_libs_even_when_incremental =
+ invoker.native_libs_even_when_incremental
+ }
+
+ _shared_resources =
+ defined(invoker.shared_resources) && invoker.shared_resources
+ assert(_shared_resources || true) # Mark as used.
+
+ _keystore_path = invoker.keystore_path
+ _keystore_name = invoker.keystore_name
+ _keystore_password = invoker.keystore_password
+
+ package_apk(target_name) {
+ forward_variables_from(invoker,
+ [
+ "apk_name",
+ "assets_build_config",
+ "native_lib_placeholders",
+ "native_libs_filearg",
+ "secondary_native_lib_placeholders",
+ "secondary_abi_native_libs_filearg",
+ "secondary_abi_loadable_modules",
+ "uncompress_dex",
+ "uncompress_shared_libraries",
+ "write_asset_list",
+ ])
+ if (!defined(uncompress_shared_libraries)) {
+ uncompress_shared_libraries = _load_library_from_apk
+ }
+ if (defined(invoker.optimized_resources_path)) {
+ packaged_resources_path = invoker.optimized_resources_path
+ not_needed(invoker, [ "packaged_resources_path" ])
+ } else {
+ packaged_resources_path = invoker.packaged_resources_path
+ }
+ deps = _deps
+ native_libs = _native_libs + _native_libs_even_when_incremental
+ keystore_path = _keystore_path
+ keystore_name = _keystore_name
+ keystore_password = _keystore_password
+
+ if (defined(_dex_path)) {
+ dex_path = _dex_path
+ }
+
+ output_apk_path = _final_apk_path
+ }
+
+ _incremental_allowed =
+ defined(invoker.incremental_allowed) && invoker.incremental_allowed
+ if (_incremental_allowed) {
+ _android_manifest = invoker.android_manifest
+ _base_path = invoker.base_path
+
+ _incremental_final_apk_path_helper =
+ process_file_template(
+ [ _final_apk_path ],
+ "{{source_dir}}/{{source_name_part}}_incremental.apk")
+ _incremental_final_apk_path = _incremental_final_apk_path_helper[0]
+
+ _incremental_compiled_resources_path = "${_base_path}_incremental.ap_"
+ _incremental_compile_resources_target_name =
+ "${target_name}_incremental__compile_resources"
+
+ _rebased_build_config =
+ rebase_path(invoker.assets_build_config, root_build_dir)
+
+ action_with_pydeps(_incremental_compile_resources_target_name) {
+ deps = _incremental_deps
+ script =
+ "//build/android/incremental_install/generate_android_manifest.py"
+ inputs = [
+ _android_manifest,
+ invoker.assets_build_config,
+ invoker.packaged_resources_path,
+ ]
+ outputs = [
+ _incremental_compiled_resources_path,
+ ]
+
+ args = [
+ "--src-manifest",
+ rebase_path(_android_manifest, root_build_dir),
+ "--in-apk",
+ rebase_path(invoker.packaged_resources_path, root_build_dir),
+ "--out-apk",
+ rebase_path(_incremental_compiled_resources_path, root_build_dir),
+ "--aapt2-path",
+ rebase_path(android_sdk_tools_bundle_aapt2, root_build_dir),
+ "--android-sdk-jars=@FileArg($_rebased_build_config:android:sdk_jars)",
+ ]
+ if (disable_incremental_isolated_processes) {
+ args += [ "--disable-isolated-processes" ]
+ }
+ }
+
+ package_apk("${target_name}_incremental") {
+ forward_variables_from(invoker,
+ [
+ "assets_build_config",
+ "secondary_abi_loadable_modules",
+ "uncompress_shared_libraries",
+ ])
+ _dex_target = "//build/android/incremental_install:bootstrap_java__dex"
+ deps = _incremental_deps + [
+ ":${_incremental_compile_resources_target_name}",
+ _dex_target,
+ ]
+
+ if (defined(_dex_path)) {
+ dex_path =
+ get_label_info(_dex_target, "target_gen_dir") + "/bootstrap.dex"
+ }
+
+ native_libs = _native_libs_even_when_incremental
+ keystore_path = _keystore_path
+ keystore_name = _keystore_name
+ keystore_password = _keystore_password
+
+ # http://crbug.com/384638
+ _has_native_libs =
+ defined(invoker.native_libs_filearg) || _native_libs != []
+ if (_has_native_libs && _native_libs_even_when_incremental == []) {
+ native_lib_placeholders = [ "libfix.crbug.384638.so" ]
+ }
+
+ output_apk_path = _incremental_final_apk_path
+ packaged_resources_path = _incremental_compiled_resources_path
+ }
+ }
+ }
+
+ # Compile Java source files into a .jar file, potentially using an
+ # annotation processor, and/or the errorprone compiler.
+ #
+ # Note that the only way to specify custom annotation processors is
+ # by using build_config to point to a file that corresponds to a java-related
+ # target that includes javac:processor_classes entries (i.e. there is no
+ # variable here that can be used for this purpose).
+ #
+ # Note also the peculiar use of java_files / java_sources_file. The content
+ # of the java_files list and the java_sources_file file must match exactly.
+ # This rule uses java_files only to list the inputs to the action that
+ # calls the javac.py script, but will pass the list of Java source files
+ # with the '@${java_sources_file}" command-line syntax. Not a problem in
+ # practice since this is only called from java_library_impl() that sets up
+ # the variables properly.
+ #
+ # Variables:
+ # main_target_name: Used when extracting srcjars for codesearch.
+ # java_files: Optional list of Java source file paths.
+ # srcjar_deps: Optional list of .srcjar dependencies (not file paths).
+ # The corresponding source files they contain will be compiled too.
+ # srcjar_filearg: Optional @FileArg for additional srcjars.
+ # java_sources_file: Optional path to file containing list of Java source
+ # file paths. This must always be provided if java_files is not empty
+ # and must match it exactly.
+ # build_config: Path to the .build_config file of the corresponding
+ # java_library_impl() target. The following entries will be used by this
+ # template: javac:srcjars, deps_info:javac_full_classpath,
+ # deps_info:javac_full_interface_classpath, javac:processor_classpath,
+ # javac:processor_classes
+ # javac_jar_path: Path to the final output .jar file.
+ # javac_args: Optional list of extra arguments to pass to javac.
+ # chromium_code: Whether this corresponds to Chromium-specific sources.
+ # requires_android: True if these sources can only run on Android.
+ # additional_jar_files: Optional list of files to copy into the resulting
+ # .jar file (by default, only .class files are put there). Each entry
+ # has the 'srcPath:dstPath' format.
+ # enable_errorprone: Optional. If True, use the errorprone compiler to
+ # check for error-prone constructs in the language. If not provided,
+ # whether this is enabled depends on chromium_code and the global
+ # use_errorprone_java_compiler variable.
+ # apk_name: Optional APK name. If provided, will tell javac.py to also
+ # generate an .apk.jar.info file under size-info/${apk_name}.apk.jar.info
+ # provider_configurations: Optional list of paths to Java service
+ # provider configuration files [1]. These will be copied under
+ # META-INF/services/ in the final .jar file.
+ # processor_args_javac: List of annotation processor arguments, each one
+ # will be passed to javac as -A<entry>.
+ # deps: Dependencies for the corresponding target.
+ # testonly: Usual meaning (should be True for test-only targets)
+ #
+ # [1] https://docs.oracle.com/javase/7/docs/api/java/util/ServiceLoader.html
+ #
+ template("compile_java") {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ _build_config = invoker.build_config
+ _chromium_code = invoker.chromium_code
+
+ _provider_configurations = []
+ if (defined(invoker.provider_configurations)) {
+ _provider_configurations = invoker.provider_configurations
+ }
+
+ _processor_args = []
+ if (defined(invoker.processor_args_javac)) {
+ _processor_args = invoker.processor_args_javac
+ }
+
+ _additional_jar_files = []
+ if (defined(invoker.additional_jar_files)) {
+ _additional_jar_files = invoker.additional_jar_files
+ }
+
+ _srcjar_deps = []
+ if (defined(invoker.srcjar_deps)) {
+ _srcjar_deps += invoker.srcjar_deps
+ }
+
+ _java_srcjars = []
+ foreach(dep, _srcjar_deps) {
+ _dep_gen_dir = get_label_info(dep, "target_gen_dir")
+ _dep_name = get_label_info(dep, "name")
+ _java_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ]
+ }
+
+ _javac_args = []
+ if (defined(invoker.javac_args)) {
+ _javac_args = invoker.javac_args
+ }
+
+ action_with_pydeps(target_name) {
+ script = "//build/android/gyp/javac.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ deps = _srcjar_deps
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+
+ outputs = [
+ invoker.javac_jar_path,
+ invoker.javac_jar_path + ".info",
+ ]
+ inputs = invoker.java_files + _java_srcjars + [ _build_config ]
+ if (invoker.java_files != []) {
+ inputs += [ invoker.java_sources_file ]
+ }
+
+ _rebased_build_config = rebase_path(_build_config, root_build_dir)
+ _rebased_javac_jar_path =
+ rebase_path(invoker.javac_jar_path, root_build_dir)
+ _rebased_java_srcjars = rebase_path(_java_srcjars, root_build_dir)
+ _rebased_depfile = rebase_path(depfile, root_build_dir)
+ _rebased_generated_dir = rebase_path(
+ "$target_gen_dir/${invoker.main_target_name}/generated_java",
+ root_build_dir)
+ args = [
+ "--depfile=$_rebased_depfile",
+ "--generated-dir=$_rebased_generated_dir",
+ "--jar-path=$_rebased_javac_jar_path",
+ "--java-srcjars=$_rebased_java_srcjars",
+ "--java-version=1.8",
+ "--full-classpath=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)",
+ "--interface-classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)",
+ "--processorpath=@FileArg($_rebased_build_config:javac:processor_classpath)",
+ "--processors=@FileArg($_rebased_build_config:javac:processor_classes)",
+ ]
+ if (defined(invoker.srcjar_filearg)) {
+ args += [ "--java-srcjars=${invoker.srcjar_filearg}" ]
+ }
+ if (invoker.requires_android) {
+ args += [ "--bootclasspath=@FileArg($_rebased_build_config:android:sdk_interface_jars)" ]
+ }
+ if (_chromium_code) {
+ args += [ "--chromium-code=1" ]
+ }
+
+ # Use errorprone with checks disabled when !enable_errorprone so that
+ # compile results are the same across machines.
+ # TODO(crbug.com/693079): Add javac to DEPS and use it for the
+ # !enable_errorprone case.
+ deps += [ "//third_party/errorprone:errorprone($default_toolchain)" ]
+ args += [
+ "--errorprone-path",
+ "bin/errorprone",
+ ]
+
+ if (invoker.enable_errorprone) {
+ deps += [ "//tools/android/errorprone_plugin:errorprone_plugin_java($default_toolchain)" ]
+ _rebased_errorprone_processorpath = [
+ "lib.java/tools/android/errorprone_plugin/errorprone_plugin_java.jar",
+ ]
+ args += [
+ "--processorpath=$_rebased_errorprone_processorpath",
+ "--enable-errorprone",
+ ]
+ }
+ foreach(e, _provider_configurations) {
+ args += [ "--provider-configuration=" + rebase_path(e, root_build_dir) ]
+ }
+ foreach(e, _processor_args) {
+ args += [ "--processor-arg=" + e ]
+ }
+
+ foreach(file_tuple, _additional_jar_files) {
+ # Each element is of length two, [ path_to_file, path_to_put_in_jar ]
+ inputs += [ file_tuple[0] ]
+ args +=
+ [ "--additional-jar-file=" +
+ rebase_path(file_tuple[0], root_build_dir) + ":" + file_tuple[1] ]
+ }
+ if (invoker.java_files != []) {
+ args += [ "@" + rebase_path(invoker.java_sources_file, root_build_dir) ]
+ }
+ foreach(e, _javac_args) {
+ args += [ "--javac-arg=" + e ]
+ }
+ }
+ }
+
+ # Create an interface jar from a normal jar.
+ #
+ # Variables
+ # input_jar: Path to input .jar.
+ # output_jar: Path to output .ijar.
+ #
+ template("generate_interface_jar") {
+ action_with_pydeps(target_name) {
+ _ijar_target = "//third_party/ijar:ijar($host_toolchain)"
+ _ijar_executable = get_label_info(_ijar_target, "root_out_dir") + "/ijar"
+ forward_variables_from(invoker,
+ [
+ "data",
+ "data_deps",
+ "public_deps",
+ "testonly",
+ "visibility",
+ ])
+ script = "//build/android/gyp/ijar.py"
+ deps = [
+ _ijar_target,
+ ]
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ inputs = [
+ invoker.input_jar,
+ _ijar_executable,
+ ]
+ if (defined(invoker.inputs)) {
+ inputs += invoker.inputs
+ }
+ outputs = [
+ invoker.output_jar,
+ ]
+ args = [
+ rebase_path(_ijar_executable, root_build_dir),
+ rebase_path(invoker.input_jar, root_build_dir),
+ rebase_path(invoker.output_jar, root_build_dir),
+ ]
+ }
+ }
+
+ # A rule that will handle multiple Java-related targets.
+ #
+ # The caller can provide a list of source files with 'java_files'
+ # and 'srcjar_deps', or a prebuilt .jar file through 'jar_path'.
+ #
+ # In the case of a 'java_binary' target type, it can even provide none of
+ # that (and the rule will just generate its wrapper script).
+ #
+ # The template will process the input .jar file (either the prebuilt one,
+ # or the result of compiling the sources), for example to apply Proguard,
+ # but also other ranges of bytecode-level rewriting schemes.
+ #
+ # Variables:
+ # type: type of Java target, valid values: 'java_library', 'java_binary',
+ # 'junit_binary', 'java_annotation_processor', and 'android_apk'
+ # main_target_name: optional. If provided, overrides target_name when
+ # creating sub-targets (e.g. "${main_target_name}__dex") and
+ # some output files (e.g. "${main_target_name}.sources"). Only used
+ # for 'android_apk' types at the moment, where main_target_name will
+ # be the name of the main APK target.
+ # supports_android: Optional. True if target can run on Android.
+ # requires_android: Optional. True if target can only run on Android.
+ # java_files: Optional list of Java source file paths for this target.
+ # javac_args: Optional list of extra arguments to pass to javac.
+ # errorprone_args: Optional list of extra arguments to pass to .
+ # srcjar_deps: Optional list of .srcjar targets (not file paths). The Java
+ # source files they contain will also be compiled for this target.
+ # java_sources_file: Optional path to a file which will be written with
+ # the content of java_files. If not provided, the file will be written
+ # under $target_gen_dir/$main_target_name.sources. Ignored if
+ # java_files is empty. If not
+ # jar_path: Optional path to a prebuilt .jar file for this target.
+ # Mutually exclusive with java_files and srcjar_deps.
+ # final_jar_path: Optional path to the final output .jar file (after
+ # processing). If not provided, the output will go under
+ # $root_build_dir/lib.java/
+ # output_name: Optional output name for the final jar path. Ignored if
+ # final_jar_path is provided. Otherwise, used to determine the name
+ # of the final jar. If not provided, the default is to use the same
+ # name as jar_path, if provided, or main_target_name.
+ # dex_path: Optional. Path to the output dex.jar file for this target.
+ # Ignored if !supports_android.
+ # main_class: Main Java class name for 'java_binary', 'junit_binary' and
+ # 'java_annotation_processor' target types. Should not be set for other
+ # ones.
+ # deps: Dependencies for this target.
+ # testonly: True iff target should only be used for tests.
+ # no_build_hooks: Disables bytecode rewriting of asserts and android
+ # resources methods.
+ # chromium_code: Optional. Whether this is Chromium-specific code. If not
+ # provided, this is determined automatically, based on the location of
+ # the source files (i.e. anything under third_party/ is not
+ # Chromium-specific unless it is in a 'chromium' sub-directory).
+ # emma_never_instrument: Optional. If provided, whether to forbid
+ # instrumentation with the Emma coverage processor. If not provided,
+ # this is controlled by the global emma_coverage build arg variable
+ # and only used for non-test Chromium code.
+ # include_android_sdk: Optional. Whether or not the android SDK dep
+ # should be added to deps. Defaults to true for non-system libraries
+ # that support android.
+ # alternative_android_sdk_dep: Optional. Alternative Android system
+ # android java target to use.
+ # annotation_processor_deps: Optional list of dependencies corresponding
+ # to annotation processors used to compile these sources.
+ # input_jars_paths: Optional list of additional .jar file paths, which will
+ # be added to the compile-time classpath when building this target (but
+ # not to the runtime classpath).
+ # classpath_deps: Optional list of additional java library dependencies,
+ # whose .jar files will be added to the compile-time classpath when
+ # building this target (but not to the runtime classpath).
+ # gradle_treat_as_prebuilt: Cause generate_gradle.py to reference this
+ # library via its built .jar rather than including its .java sources.
+ # proguard_enabled: Optional. True to enable ProGuard obfuscation.
+ # proguard_configs: Optional list of additional proguard config file paths.
+ # bypass_platform_checks: Optional. If True, platform checks will not
+ # be performed. They are used to verify that every target with
+ # requires_android only depends on targets that, at least supports_android.
+ # Similarly, if a target has !supports_android, then it cannot depend on
+ # any other target that has requires_android.
+ # include_java_resources: Optional. If True, include Java (not Android)
+ # resources into final .jar file.
+ # android_manifest_for_lint: Optional path to Android manifest to use
+ # if Android linting is enabled. Ignored for 'android_apk' types
+ # (since the value of android_manifest will be used instead).
+ # lint_suppressions_file: Optional lint suppressions input file.
+ # jar_excluded_patterns: Optional list of .class file patterns to exclude
+ # from the final .jar file.
+ # jar_included_patterns: Optional list of .class file patterns to include
+ # in the final .jar file. jar_excluded_patterns take precedence over this.
+ #
+ # For 'android_apk' and 'android_app_bundle_module' targets only:
+ #
+ # apk_path: Path to the final APK file.
+ # android_manifest: Path to AndroidManifest.xml file for the APK.
+ # android_manifest_dep: Optional. Dependency target that generates
+ # android_manifest.
+ # apk_under_test: For 'android_apk' targets used to test other APKs,
+ # this is the target name of APK being tested.
+ # incremental_allowed: Optional (default false). True to allow the
+ # generation of incremental APKs ('android_apk' targets only).
+ # incremental_apk_path: If incremental_allowed, path to the incremental
+ # output APK.
+ # incremental_install_json_path: If incremental_allowed, path to the output
+ # incremental install json configuration file.
+ # native_lib_placeholders: Optional. List of placeholder filenames to add to
+ # the APK.
+ # proguard_mapping_path: Path to .mapping file produced from ProGuard step.
+ # shared_libraries_runtime_deps_file: Optional. Path to a file listing the
+ # native shared libraries required at runtime by the APK.
+ # secondary_abi_shared_libraries_runtime_deps_file:
+ # secondary_native_lib_placeholders: Optional. List of placeholder filenames
+ # to add to the APK for the secondary ABI.
+ # extra_shared_libraries: Optional list of extra native libraries to
+ # be stored in the APK.
+ # uncompress_shared_libraries: Optional. True to store native shared
+ # libraries uncompressed and page-aligned.
+ # proto_resources_path: The path of an zip archive containing the APK's
+ # resources compiled to the protocol buffer format (instead of regular
+ # binary xml + resources.arsc).
+ # module_rtxt_path: The path of the R.txt file generated when compiling the
+ # resources for the bundle module.
+ # base_whitelist_rtxt_path: The path of the R.txt file containing the
+ # list of string resources to keep in the base split APK for any bundle
+ # that uses this target.
+ #
+ # For 'java_binary' and 'junit_binary' targets only. Ignored by others:
+ #
+ # bootclasspath: Optional list of boot class paths used by the generated
+ # wrapper script.
+ # wrapper_script_name: Optional name for the generated wrapper script.
+ # Default is main target name.
+ # wrapper_script_args: Optional list of extra arguments used by the
+ # generated wrapper script.
+ #
+ template("java_library_impl") {
+ set_sources_assignment_filter([])
+ forward_variables_from(invoker, [ "testonly" ])
+ _is_prebuilt = defined(invoker.jar_path)
+ _is_annotation_processor = invoker.type == "java_annotation_processor"
+ _is_java_binary =
+ invoker.type == "java_binary" || invoker.type == "junit_binary"
+ _is_system_library = invoker.type == "system_java_library"
+ _supports_android =
+ defined(invoker.supports_android) && invoker.supports_android
+ _requires_android =
+ defined(invoker.requires_android) && invoker.requires_android
+
+ _main_target_name = target_name
+ if (defined(invoker.main_target_name)) {
+ _main_target_name = invoker.main_target_name
+ }
+ _java_files = []
+ if (defined(invoker.java_files)) {
+ _java_files = invoker.java_files
+ }
+ _srcjar_deps = []
+ if (defined(invoker.srcjar_deps)) {
+ _srcjar_deps = invoker.srcjar_deps
+ }
+ _has_sources = _java_files != [] || _srcjar_deps != []
+
+ if (_is_prebuilt) {
+ assert(!_has_sources)
+ } else {
+ # Allow java_binary to not specify any sources. This is needed when a prebuilt
+ # is needed as a library as well as a binary.
+ assert(_is_annotation_processor || _is_java_binary || _has_sources)
+ }
+
+ if (_is_java_binary) {
+ assert(defined(invoker.main_class),
+ "${invoker.type}() must set main_class")
+ } else if (_is_annotation_processor) {
+ assert(defined(invoker.main_class),
+ "java_annotation_processor() must set main_class")
+ } else {
+ assert(!defined(invoker.main_class),
+ "main_class cannot be used for target of type ${invoker.type}")
+ }
+
+ # The only target that might have no prebuilt and no sources is a java_binary.
+ if (_is_prebuilt || _has_sources) {
+ if (defined(invoker.output_name)) {
+ _output_name = invoker.output_name
+ } else if (_is_prebuilt) {
+ _output_name = get_path_info(invoker.jar_path, "name")
+ } else {
+ _output_name = _main_target_name
+ }
+
+ # Jar files can be needed at runtime (by Robolectric tests or java binaries),
+ # so do not put them under gen/.
+ _target_dir_name = get_label_info(":$_main_target_name", "dir")
+ _final_jar_path =
+ "$root_out_dir/lib.java$_target_dir_name/$_output_name.jar"
+ if (defined(invoker.final_jar_path)) {
+ _final_jar_path = invoker.final_jar_path
+ }
+ _final_ijar_path =
+ get_path_info(_final_jar_path, "dir") + "/" +
+ get_path_info(_final_jar_path, "name") + ".interface.jar"
+
+ if (_has_sources) {
+ _javac_jar_path = "$target_gen_dir/$_main_target_name.javac.jar"
+ }
+
+ if (_is_prebuilt) {
+ _unprocessed_jar_path = invoker.jar_path
+ } else {
+ _unprocessed_jar_path = _javac_jar_path
+ }
+
+ if (_supports_android) {
+ _dex_path = "$target_gen_dir/$_main_target_name.dex.jar"
+ if (defined(invoker.dex_path)) {
+ _dex_path = invoker.dex_path
+ }
+ }
+ }
+
+ _accumulated_public_deps = []
+ _accumulated_deps = []
+ if (defined(invoker.deps)) {
+ _accumulated_deps = invoker.deps
+ }
+
+ _enable_build_hooks =
+ _supports_android &&
+ (!defined(invoker.no_build_hooks) || !invoker.no_build_hooks)
+ if (_enable_build_hooks) {
+ _accumulated_deps += [ "//build/android/buildhooks:build_hooks_java" ]
+ }
+
+ # Some testonly targets use their own resources and the code being
+ # tested will use custom resources so there's no need to enable this
+ # for testonly targets.
+ _enable_build_hooks_android =
+ _enable_build_hooks && _requires_android &&
+ (!defined(invoker.testonly) || !invoker.testonly)
+ if (_enable_build_hooks_android) {
+ _accumulated_deps +=
+ [ "//build/android/buildhooks:build_hooks_android_java" ]
+ }
+
+ # Don't enable coverage or lint unless the target has some non-generated
+ # files.
+ if (defined(invoker.chromium_code)) {
+ _chromium_code = invoker.chromium_code
+ } else {
+ # Default based on whether target is in third_party.
+ set_sources_assignment_filter([ "*\bthird_party\b*" ])
+ sources = [
+ get_label_info(":$_main_target_name", "dir"),
+ ]
+ _chromium_code = sources != []
+ if (!_chromium_code && !_is_prebuilt && _java_files != []) {
+ # Unless third_party code has an org.chromium file in it.
+ set_sources_assignment_filter([ "*\bchromium\b*" ])
+ sources = _java_files
+ _chromium_code = _java_files != sources
+ }
+ set_sources_assignment_filter([])
+ sources = []
+ }
+
+ if (defined(_final_jar_path)) {
+ _emma_instrument = emma_coverage && _chromium_code && _java_files != [] &&
+ (!defined(invoker.testonly) || !invoker.testonly)
+ if (defined(invoker.emma_never_instrument)) {
+ _emma_instrument = !invoker.emma_never_instrument && _emma_instrument
+ }
+ if (_emma_instrument) {
+ _accumulated_deps += [ "//third_party/android_sdk:emma_device_java" ]
+ }
+ }
+
+ if (_java_files != []) {
+ _java_sources_file = "$target_gen_dir/$_main_target_name.sources"
+ if (defined(invoker.java_sources_file)) {
+ _java_sources_file = invoker.java_sources_file
+ }
+ write_file(_java_sources_file, rebase_path(_java_files, root_build_dir))
+ }
+
+ _include_android_sdk = !_is_system_library && _supports_android
+ if (defined(invoker.include_android_sdk)) {
+ _include_android_sdk = invoker.include_android_sdk
+ }
+ if (_include_android_sdk) {
+ if (defined(invoker.alternative_android_sdk_dep)) {
+ _accumulated_deps += [ invoker.alternative_android_sdk_dep ]
+ } else {
+ _accumulated_deps += [ "//third_party/android_sdk:android_sdk_java" ]
+ }
+ }
+
+ # Define build_config_deps which will be a list of targets required to
+ # build the _build_config.
+ _build_config = "$target_gen_dir/$_main_target_name.build_config"
+ _build_config_target_name =
+ "${_main_target_name}$build_config_target_suffix"
+
+ write_build_config(_build_config_target_name) {
+ forward_variables_from(invoker,
+ [
+ "annotation_processor_deps",
+ "base_whitelist_rtxt_path",
+ "classpath_deps",
+ "gradle_treat_as_prebuilt",
+ "input_jars_paths",
+ "loadable_modules",
+ "main_class",
+ "proguard_configs",
+ "proguard_enabled",
+ "proguard_mapping_path",
+ "secondary_abi_loadable_modules",
+ "type",
+ ])
+ if (type == "android_apk" || type == "android_app_bundle_module") {
+ forward_variables_from(
+ invoker,
+ [
+ "android_manifest",
+ "android_manifest_dep",
+ "extra_shared_libraries",
+ "final_dex_path",
+ "native_lib_placeholders",
+ "secondary_abi_shared_libraries_runtime_deps_file",
+ "secondary_native_lib_placeholders",
+ "shared_libraries_runtime_deps_file",
+ "static_library_dependent_targets",
+ "uncompress_shared_libraries",
+ ])
+ }
+ if (type == "android_apk") {
+ forward_variables_from(invoker,
+ [
+ "apk_path",
+ "apk_under_test",
+ "incremental_allowed",
+ "incremental_apk_path",
+ "incremental_install_json_path",
+ ])
+ }
+ if (type == "android_app_bundle_module") {
+ forward_variables_from(invoker,
+ [
+ "base_module_target",
+ "module_rtxt_path",
+ "proto_resources_path",
+ ])
+ }
+ build_config = _build_config
+ is_prebuilt = _is_prebuilt
+ possible_config_deps = _accumulated_deps
+ if (defined(apk_under_test)) {
+ possible_config_deps += [ apk_under_test ]
+ }
+ supports_android = _supports_android
+ requires_android = _requires_android
+ bypass_platform_checks = defined(invoker.bypass_platform_checks) &&
+ invoker.bypass_platform_checks
+
+ if (defined(_final_jar_path)) {
+ jar_path = _final_jar_path
+ ijar_path = _final_ijar_path
+ unprocessed_jar_path = _unprocessed_jar_path
+ }
+ if (defined(_dex_path)) {
+ dex_path = _dex_path
+ }
+ if (_java_files != []) {
+ java_sources_file = _java_sources_file
+ }
+
+ bundled_srcjars = []
+ foreach(d, _srcjar_deps) {
+ _dep_gen_dir = get_label_info(d, "target_gen_dir")
+ _dep_name = get_label_info(d, "name")
+ bundled_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ]
+ }
+ if (defined(invoker.include_java_resources) &&
+ invoker.include_java_resources) {
+ if (defined(invoker.jar_path)) {
+ # Use original jar_path because _jar_path points to a library without
+ # resources.
+ java_resources_jar = invoker.jar_path
+ } else {
+ java_resources_jar = _final_jar_path
+ }
+ }
+ }
+ _accumulated_public_deps += [ ":$_build_config_target_name" ]
+
+ # Don't need to depend on the apk-under-test to be packaged.
+ if (defined(invoker.apk_under_test)) {
+ _accumulated_deps += [ "${invoker.apk_under_test}__java" ]
+ }
+ if (defined(invoker.android_manifest_dep)) {
+ _accumulated_deps += [ invoker.android_manifest_dep ]
+ }
+ if (defined(invoker.classpath_deps)) {
+ _accumulated_deps += invoker.classpath_deps
+ }
+ if (defined(invoker.annotation_processor_deps)) {
+ _accumulated_deps += invoker.annotation_processor_deps
+ }
+
+ # TODO(agrieve): Enable lint for _has_sources rather than just _java_files.
+ _lint_enabled = _java_files != [] && _supports_android && _chromium_code &&
+ !disable_android_lint
+ if (defined(invoker.enable_errorprone)) {
+ _enable_errorprone = invoker.enable_errorprone
+ } else {
+ _enable_errorprone =
+ _java_files != [] && _chromium_code && use_errorprone_java_compiler
+ }
+
+ if (_has_sources) {
+ _type = invoker.type
+ template("compile_java_helper") {
+ compile_java(target_name) {
+ forward_variables_from(invoker, "*")
+ enable_errorprone = invoker.enable_errorprone
+ javac_jar_path = invoker.javac_jar_path
+
+ main_target_name = _main_target_name
+ build_config = _build_config
+ java_files = _java_files
+ if (_java_files != []) {
+ java_sources_file = _java_sources_file
+ }
+ srcjar_deps = _srcjar_deps
+ chromium_code = _chromium_code
+ requires_android = _requires_android
+ deps = _accumulated_deps + _accumulated_public_deps
+
+ # android_apk and junit_binary pass R.java srcjars via srcjar_deps.
+ if (_type == "java_library" && _requires_android) {
+ _rebased_build_config = rebase_path(_build_config, root_build_dir)
+ srcjar_filearg = "@FileArg($_rebased_build_config:deps_info:owned_resource_srcjars)"
+ }
+ }
+ }
+ _analysis_public_deps = []
+ _compile_java_target = "${_main_target_name}__compile_java"
+ _compile_java_forward_variables = [
+ "additional_jar_files",
+ "apk_name",
+ "processor_args_javac",
+ "provider_configurations",
+ "javac_args",
+ ]
+ compile_java_helper(_compile_java_target) {
+ forward_variables_from(invoker, _compile_java_forward_variables)
+ enable_errorprone = false
+ javac_jar_path = _javac_jar_path
+ }
+ if (_enable_errorprone) {
+ _compile_java_errorprone_target =
+ "${_main_target_name}__compile_java_errorprone"
+ compile_java_helper(_compile_java_errorprone_target) {
+ forward_variables_from(invoker, _compile_java_forward_variables)
+ enable_errorprone = true
+ if (defined(invoker.errorprone_args)) {
+ if (!defined(javac_args)) {
+ javac_args = []
+ }
+ javac_args += invoker.errorprone_args
+ }
+ javac_jar_path = _javac_jar_path + ".errorprone.jar"
+ }
+ _analysis_public_deps += [ ":$_compile_java_errorprone_target" ]
+ }
+ if (defined(invoker.android_manifest_for_lint)) {
+ _android_manifest_for_lint = invoker.android_manifest_for_lint
+ assert(_android_manifest_for_lint != "") # Mark as used.
+ }
+ if (_lint_enabled) {
+ _android_lint_target = "${_main_target_name}__lint"
+ android_lint(_android_lint_target) {
+ if (invoker.type == "android_apk" ||
+ invoker.type == "android_app_bundle_module") {
+ forward_variables_from(invoker, [ "android_manifest" ])
+ } else if (defined(_android_manifest_for_lint)) {
+ android_manifest = _android_manifest_for_lint
+ }
+ srcjar_deps = _srcjar_deps
+ build_config = _build_config
+ requires_android = _requires_android
+ deps = _accumulated_deps + _accumulated_public_deps
+ java_files = _java_files
+ if (_java_files != []) {
+ java_sources_file = _java_sources_file
+ }
+ if (defined(invoker.lint_suppressions_file)) {
+ lint_suppressions_file = invoker.lint_suppressions_file
+ }
+ }
+ _analysis_public_deps += [ ":$_android_lint_target" ]
+ }
+
+ if (_analysis_public_deps != []) {
+ # Use an intermediate group() rather as the data_deps target in order to
+ # avoid errorprone or lint artifacts showing up as runtime_deps (while
+ # still having them run in parallel to other targets).
+ group("${_main_target_name}__analysis") {
+ public_deps = _analysis_public_deps
+ }
+ }
+
+ # Update this after lint so that lint does not depend on javac.
+ _accumulated_public_deps += [ ":$_compile_java_target" ]
+ } # _has_sources
+
+ if (defined(_final_jar_path)) {
+ if (_is_system_library) {
+ _copy_system_library_target_name = "${target_name}__copy_system_library"
+
+ # Use copy_ex rather than copy to ensure that we copy symlink targets
+ # rather than the symlink itself.
+ copy_ex(_copy_system_library_target_name) {
+ sources = [
+ _unprocessed_jar_path,
+ ]
+ dest = _final_jar_path
+ outputs = [
+ _final_jar_path,
+ ]
+ }
+ _accumulated_public_deps += [ ":$_copy_system_library_target_name" ]
+ } else {
+ _process_prebuilt_target_name = "${target_name}__process_prebuilt"
+ process_java_prebuilt(_process_prebuilt_target_name) {
+ forward_variables_from(invoker,
+ [
+ "enable_bytecode_checks",
+ "enable_bytecode_rewriter",
+ "jar_excluded_patterns",
+ "jar_included_patterns",
+ "split_compat_class_names",
+ ])
+ is_prebuilt = _is_prebuilt
+ supports_android = _supports_android
+ enable_build_hooks = _enable_build_hooks
+ enable_build_hooks_android = _enable_build_hooks_android
+ build_config = _build_config
+ input_jar_path = _unprocessed_jar_path
+ emma_instrument = _emma_instrument
+ if (_emma_instrument) {
+ java_files = _java_files
+ java_sources_file = _java_sources_file
+ }
+ output_jar_path = _final_jar_path
+ if (_has_sources) {
+ deps = _accumulated_public_deps # compile & build_config
+ } else {
+ deps = _accumulated_deps + _accumulated_public_deps
+ }
+ }
+ _accumulated_public_deps += [ ":$_process_prebuilt_target_name" ]
+
+ if (defined(_dex_path)) {
+ dex("${target_name}__dex") {
+ input_jars = [ _final_jar_path ]
+ output = _dex_path
+ deps = [
+ ":$_process_prebuilt_target_name",
+ ]
+ }
+ _accumulated_public_deps += [ ":${target_name}__dex" ]
+ }
+ }
+
+ if (!_is_java_binary) {
+ # Export the interface jar as the main target (rather than a group)
+ # so that ninja will notice when the output is unchanged and not rebuild
+ # reverse-dependencies. Targets that should be rebuilt when the
+ # non-interface .jar changes use a depfile to indicate that they should
+ # be rebuilt even when the interface jar does not change.
+ generate_interface_jar(target_name) {
+ forward_variables_from(invoker,
+ [
+ "data",
+ "data_deps",
+ "deps",
+ "visibility",
+ ])
+
+ # Export all of our steps as "public", so that all outputs can be used
+ # as inputs to other targets.
+ public_deps = _accumulated_public_deps
+
+ # Always used the unfiltered .jar to create the interface jar so that
+ # other targets will resolve filtered classes when depending on
+ # BuildConfig, NativeLibraries, etc.
+ input_jar = _unprocessed_jar_path
+ output_jar = _final_ijar_path
+ if (_lint_enabled || _enable_errorprone) {
+ if (!defined(data_deps)) {
+ data_deps = []
+ }
+ data_deps += [ ":${_main_target_name}__analysis" ]
+ }
+
+ # proguard_configs listed on java_library targets need to be marked
+ # as inputs to at least one action so that "gn analyze" will know
+ # about them. Although ijar doesn't use them, it's a convenient spot
+ # to list them.
+ # https://crbug.com/827197
+ if (defined(invoker.proguard_configs)) {
+ inputs = invoker.proguard_configs
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += _srcjar_deps # For the aapt-generated proguard rules.
+ }
+ }
+ }
+ }
+
+ if (_is_java_binary) {
+ # Targets might use the generated script while building, so make it a dep
+ # rather than a data_dep.
+ java_binary_script("${target_name}__java_binary_script") {
+ forward_variables_from(invoker,
+ [
+ "bootclasspath",
+ "main_class",
+ "wrapper_script_args",
+ ])
+ build_config = _build_config
+ if (defined(_final_jar_path)) {
+ jar_path = _final_jar_path
+ }
+ script_name = _main_target_name
+ if (defined(invoker.wrapper_script_name)) {
+ script_name = invoker.wrapper_script_name
+ }
+ deps = _accumulated_public_deps
+ }
+ _accumulated_public_deps += [ ":${target_name}__java_binary_script" ]
+ }
+
+ if (_is_java_binary ||
+ (_is_annotation_processor && !defined(_final_jar_path))) {
+ group(target_name) {
+ forward_variables_from(invoker,
+ [
+ "data",
+ "deps",
+ "data_deps",
+ "visibility",
+ ])
+ public_deps = _accumulated_public_deps
+ if (_lint_enabled || _enable_errorprone) {
+ if (!defined(data_deps)) {
+ data_deps = []
+ }
+ data_deps += [ ":${_main_target_name}__analysis" ]
+ }
+ }
+ }
+ }
+}
+
+# Create a zip archive corresponding to an application bundle module.
+#
+# Compile all the components of a given android_apk_or_module() target into a zip archive
+# suitable to later create an android_app_bundle() target. This archive's format is very
+# similar to that on an APK, except for a few differences in internal directory
+# layouts, and the fact that resources, as well ass xml files, are compiled using a
+# protocol-buffer based format (instead of the regular binary xml + resources.arsc).
+#
+# A final application bundle is built from one or more module bundle modules,
+# plus some configuration file.
+#
+# Variables:
+# module_zip_path: Output module path.
+#
+# build_config: Path to build_config of the android_apk_or_module() target.
+#
+# dex_path: If module is proguarded separately from the base module, dex_path is the
+# path to its dex file and is passed directly to the creation script.
+# Otherwise, dex_path is undefined and we retrieve the module's dex file
+# using its build_config.
+#
+template("create_android_app_bundle_module") {
+ _build_config = invoker.build_config
+ _rebased_build_config = rebase_path(_build_config, root_build_dir)
+
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+ "deps",
+ ])
+ script = "//build/android/gyp/apkbuilder.py"
+ depfile = "$target_gen_dir/$target_name.d"
+
+ # NOTE: Compared to the inputs of the "package_apk" template action,
+ # this list is much smaller, since finalize_apk is never called
+ # by apkbuild.py --format=bundle-module. This means not using
+ # apksigner and zipalign as well, nor the keystore. Other
+ # dependencies like extra native libraries are all pulled from the
+ # .build_config through @FileArg() references (see below) and
+ # will be listed in the generated depfile instead.
+ inputs = [
+ _build_config,
+ ]
+ outputs = [
+ invoker.module_zip_path,
+ ]
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--format=bundle-module",
+ "--output-apk",
+ rebase_path(invoker.module_zip_path, root_build_dir),
+ "--resource-apk=@FileArg(" +
+ "$_rebased_build_config:deps_info:proto_resources_path)",
+ "--assets=@FileArg($_rebased_build_config:assets)",
+ "--uncompressed-assets=@FileArg(" +
+ "$_rebased_build_config:uncompressed_assets)",
+ "--native-libs=@FileArg($_rebased_build_config:native:libraries)",
+ "--native-libs=@FileArg($_rebased_build_config:native:extra_shared_libraries)",
+ "--native-lib-placeholders=@FileArg($_rebased_build_config:native:native_library_placeholders)",
+ "--secondary-native-lib-placeholders=@FileArg($_rebased_build_config:native:secondary_native_library_placeholders)",
+ "--android-abi=$android_app_abi",
+ "--uncompress-shared-libraries=@FileArg(" +
+ "$_rebased_build_config:native:uncompress_shared_libraries)",
+ ]
+ if (defined(android_app_secondary_abi)) {
+ args += [
+ "--secondary-native-libs=@FileArg(" +
+ "$_rebased_build_config:native:secondary_abi_libraries)",
+ "--secondary-android-abi=$android_app_secondary_abi",
+ ]
+ }
+
+ # Use either provided dex path or build config path based on type of module.
+ if (defined(invoker.dex_path)) {
+ inputs += [ invoker.dex_path ]
+ _rebased_dex_path = rebase_path(invoker.dex_path, root_build_dir)
+ args += [ "--dex-file=$_rebased_dex_path" ]
+ } else {
+ args += [ "--dex-file=@FileArg($_rebased_build_config:final_dex:path)" ]
+ }
+ }
+}
+
+# Splits input dex file(s) based on given feature jars into seperate dex files
+# for each feature.
+#
+# Variables:
+# proguard_mapping: Path to input proguard mapping produced by synchronized
+# proguarding.
+# input_dex_zip: Path to zipped dex files to split.
+# all_modules: Path to list of all modules. Each Module must have
+# build_config, name, and build_config_target properties.
+template("dexsplitter") {
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker, [ "deps" ])
+ script = "//build/android/gyp/dexsplitter.py"
+ inputs = [
+ invoker.input_dex_zip,
+ ]
+ _stamp = "${target_gen_dir}/${target_name}.stamp"
+ outputs = [
+ _stamp,
+ ]
+
+ depfile = "${target_gen_dir}/${target_name}.d"
+ args = [
+ "--stamp",
+ rebase_path(_stamp, root_build_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--r8-path",
+ rebase_path(_r8_path, root_build_dir),
+ "--input-dex-zip",
+ rebase_path(invoker.input_dex_zip, root_build_dir),
+ "--proguard-mapping-file",
+ rebase_path(invoker.proguard_mapping, root_build_dir),
+ ]
+
+ foreach(_feature_module, invoker.all_modules) {
+ _rebased_module_build_config =
+ rebase_path(_feature_module.build_config, root_build_dir)
+ args += [
+ "--feature-name",
+ _feature_module.name,
+ "--feature-jars=@FileArg($_rebased_module_build_config:deps_info:java_runtime_classpath)",
+ "--dex-dest=@FileArg($_rebased_module_build_config:final_dex:path)",
+ ]
+ deps += [ _feature_module.build_config_target ]
+ }
+ }
+}
diff --git a/deps/v8/build/config/android/linker_version_script.gni b/deps/v8/build/config/android/linker_version_script.gni
new file mode 100644
index 0000000000..6cb3b09013
--- /dev/null
+++ b/deps/v8/build/config/android/linker_version_script.gni
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/python.gni")
+
+# Generate a custom linker version script that can later be used with
+# "-Wl,--version-script=<path>" ldflags.
+#
+# Variables:
+# export_java_symbols: Optional. If true, also export all Java_* symbols
+# exported for JNI.
+# export_symbol_whitelist_file: Optional. Path to an input file containing
+# a whitelist of exported symbols.
+# linker_script: Path to output linker version script.
+#
+template("generate_linker_version_script") {
+ action_with_pydeps(target_name) {
+ script = "//build/android/gyp/generate_linker_version_script.py"
+ outputs = [
+ invoker.linker_script,
+ ]
+ inputs = []
+ args = [ "--output=" + rebase_path(invoker.linker_script, root_build_dir) ]
+
+ if (defined(invoker.export_java_symbols) && invoker.export_java_symbols) {
+ args += [ "--export-java-symbols" ]
+ }
+
+ if (defined(invoker.export_symbol_whitelist_file)) {
+ inputs += [ invoker.export_symbol_whitelist_file ]
+ args +=
+ [ "--export-symbol-whitelist-file=" +
+ rebase_path(invoker.export_symbol_whitelist_file, root_build_dir) ]
+ }
+ }
+}
diff --git a/deps/v8/build/config/android/rules.gni b/deps/v8/build/config/android/rules.gni
new file mode 100644
index 0000000000..4846ade15c
--- /dev/null
+++ b/deps/v8/build/config/android/rules.gni
@@ -0,0 +1,4584 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Do not add any imports to non-//build directories here.
+# Some projects (e.g. V8) do not have non-build directories DEPS'ed in.
+import("//build/config/android/config.gni")
+import("//build/config/android/internal_rules.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/dcheck_always_on.gni")
+import("//build/config/python.gni")
+import("//build/config/zip.gni")
+import("//build/toolchain/toolchain.gni")
+assert(is_android)
+
+declare_args() {
+ enable_jni_tracing = false
+}
+
+if (target_cpu == "arm") {
+ _sanitizer_arch = "arm"
+} else if (target_cpu == "arm64") {
+ _sanitizer_arch = "aarch64"
+} else if (target_cpu == "x86") {
+ _sanitizer_arch = "i686"
+}
+
+_sanitizer_runtimes = []
+if (use_cfi_diag || is_ubsan || is_ubsan_security || is_ubsan_vptr) {
+ _sanitizer_runtimes = [ "$clang_base_path/lib/clang/$clang_version/lib/linux/libclang_rt.ubsan_standalone-$_sanitizer_arch-android.so" ]
+}
+
+if (is_hwasan) {
+ _sanitizer_runtimes = [ "$clang_base_path/lib/clang/$clang_version/lib/linux/libclang_rt.hwasan-$_sanitizer_arch-android.so" ]
+}
+
+# Creates a dist directory for a native executable.
+#
+# Running a native executable on a device requires all the shared library
+# dependencies of that executable. To make it easier to install and run such an
+# executable, this will create a directory containing the native exe and all
+# it's library dependencies.
+#
+# Note: It's usually better to package things as an APK than as a native
+# executable.
+#
+# Variables
+# dist_dir: Directory for the exe and libraries. Everything in this directory
+# will be deleted before copying in the exe and libraries.
+# binary: Path to (stripped) executable.
+# extra_files: List of extra files to copy in (optional).
+#
+# Example
+# create_native_executable_dist("foo_dist") {
+# dist_dir = "$root_build_dir/foo_dist"
+# binary = "$root_build_dir/foo"
+# deps = [ ":the_thing_that_makes_foo" ]
+# }
+template("create_native_executable_dist") {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ _libraries_list = "${target_gen_dir}/${target_name}_library_dependencies.list"
+
+ _runtime_deps_file = "$target_gen_dir/${target_name}.runtimedeps"
+ _runtime_deps_target_name = "${target_name}__runtime_deps"
+ group(_runtime_deps_target_name) {
+ data = _sanitizer_runtimes
+ data_deps = []
+ if (defined(invoker.deps)) {
+ data_deps += invoker.deps
+ }
+ write_runtime_deps = _runtime_deps_file
+ }
+
+ _find_deps_target_name = "${target_name}__find_library_dependencies"
+
+ # TODO(agrieve): Extract dependent libs from GN rather than readelf.
+ action_with_pydeps(_find_deps_target_name) {
+ deps = invoker.deps + [ ":$_runtime_deps_target_name" ]
+ script = "//build/android/gyp/write_ordered_libraries.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ inputs = [
+ invoker.binary,
+ _runtime_deps_file,
+ android_readelf,
+ ]
+ outputs = [
+ _libraries_list,
+ ]
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--runtime-deps",
+ rebase_path(_runtime_deps_file, root_build_dir),
+ "--output",
+ rebase_path(_libraries_list, root_build_dir),
+ "--readelf",
+ rebase_path(android_readelf, root_build_dir),
+ ]
+ }
+
+ copy_ex(target_name) {
+ inputs = [
+ _libraries_list,
+ invoker.binary,
+ ]
+
+ dest = invoker.dist_dir
+ data = [
+ "${invoker.dist_dir}/",
+ ]
+
+ _rebased_libraries_list = rebase_path(_libraries_list, root_build_dir)
+ _rebased_binaries_list = rebase_path([ invoker.binary ], root_build_dir)
+ args = [
+ "--clear",
+ "--files=@FileArg($_rebased_libraries_list:lib_paths)",
+ "--files=$_rebased_binaries_list",
+ ]
+ if (defined(invoker.extra_files)) {
+ _rebased_extra_files = rebase_path(invoker.extra_files, root_build_dir)
+ args += [ "--files=$_rebased_extra_files" ]
+ }
+
+ _depfile = "$target_gen_dir/$target_name.d"
+ _stamp_file = "$target_gen_dir/$target_name.stamp"
+ outputs = [
+ _stamp_file,
+ ]
+ args += [
+ "--depfile",
+ rebase_path(_depfile, root_build_dir),
+ "--stamp",
+ rebase_path(_stamp_file, root_build_dir),
+ ]
+
+ deps = [
+ ":$_find_deps_target_name",
+ ]
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ }
+}
+
+# Writes a script to root_out_dir/bin that passes --output-directory to the
+# wrapped script, in addition to forwarding arguments. Most / all of these
+# wrappers should be made deps of //tools/android:android_tools.
+#
+# Variables
+# target: Script to wrap.
+# flag_name: Default is "--output-directory"
+#
+# Example
+# wrapper_script("foo_wrapper") {
+# target = "//pkg/foo.py"
+# }
+template("wrapper_script") {
+ action_with_pydeps(target_name) {
+ _name = get_path_info(invoker.target, "name")
+ _output = "$root_out_dir/bin/$_name"
+
+ script = "//build/android/gyp/create_tool_wrapper.py"
+ outputs = [
+ _output,
+ ]
+
+ # The target isn't actually used by the script, but it's nice to have GN
+ # check that it exists.
+ inputs = [
+ invoker.target,
+ ]
+ args = [
+ "--output",
+ rebase_path(_output, root_build_dir),
+ "--target",
+ rebase_path(invoker.target, root_build_dir),
+ "--output-directory",
+ rebase_path(root_out_dir, root_build_dir),
+ ]
+ if (defined(invoker.flag_name)) {
+ args += [ "--flag-name=${invoker.flag_name}" ]
+ }
+ }
+}
+
+if (enable_java_templates) {
+ import("//build/config/sanitizers/sanitizers.gni")
+ import("//tools/grit/grit_rule.gni")
+
+ # Declare a jni target
+ #
+ # This target generates the native jni bindings for a set of .java files.
+ #
+ # See base/android/jni_generator/jni_generator.py for more info about the
+ # format of generating JNI bindings.
+ #
+ # Variables
+ # sources: list of .java files to generate jni for
+ # jni_package: subdirectory path for generated bindings
+ #
+ # Example
+ # generate_jni("foo_jni") {
+ # sources = [
+ # "android/java/src/org/chromium/foo/Foo.java",
+ # "android/java/src/org/chromium/foo/FooUtil.java",
+ # ]
+ # jni_package = "foo"
+ # }
+ template("generate_jni") {
+ set_sources_assignment_filter([])
+ forward_variables_from(invoker, [ "testonly" ])
+
+ _base_output_dir = "${target_gen_dir}/${target_name}"
+ _package_output_dir = "${_base_output_dir}/${invoker.jni_package}"
+ _jni_output_dir = "${_package_output_dir}/jni"
+
+ if (defined(invoker.jni_generator_include)) {
+ _jni_generator_include = invoker.jni_generator_include
+ _jni_generator_include_deps = []
+ } else {
+ _jni_generator_include =
+ "//base/android/jni_generator/jni_generator_helper.h"
+ _jni_generator_include_deps = [
+ # Using //base/android/jni_generator/jni_generator_helper.h introduces
+ # a dependency on debugging_buildflags indirectly through
+ # base/android/jni_android.h, which is part of the //base target.
+ # This can't depend directly on //base without causing a dependency
+ # cycle, though.
+ "//base:debugging_buildflags",
+ ]
+ }
+
+ _foreach_target_name = "${target_name}__jni_gen"
+ action_foreach_with_pydeps(_foreach_target_name) {
+ script = "//base/android/jni_generator/jni_generator.py"
+ sources = invoker.sources
+ outputs = [
+ "${_jni_output_dir}/{{source_name_part}}_jni.h",
+ ]
+
+ args = [
+ "--input_file={{source}}",
+ "--ptr_type=long",
+ "--output_dir",
+ rebase_path(_jni_output_dir, root_build_dir),
+ "--includes",
+ rebase_path(_jni_generator_include, _jni_output_dir),
+ ]
+
+ if (use_hashed_jni_names) {
+ args += [ "--use_proxy_hash" ]
+ }
+
+ if (enable_profiling) {
+ args += [ "--enable_profiling" ]
+ }
+ if (defined(invoker.namespace)) {
+ args += [ "-n ${invoker.namespace}" ]
+ }
+ if (enable_jni_tracing) {
+ args += [ "--enable_tracing" ]
+ }
+ }
+
+ config("jni_includes_${target_name}") {
+ # TODO(cjhopman): #includes should probably all be relative to
+ # _base_output_dir. Remove that from this config once the includes are
+ # updated.
+ include_dirs = [
+ _base_output_dir,
+ _package_output_dir,
+ ]
+ }
+
+ group(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "public_deps",
+ "visibility",
+ ])
+ if (!defined(public_deps)) {
+ public_deps = []
+ }
+ public_deps += [ ":$_foreach_target_name" ]
+ public_deps += _jni_generator_include_deps
+ public_configs = [ ":jni_includes_${target_name}" ]
+ }
+ }
+
+ # Declare a jni target for a prebuilt jar
+ #
+ # This target generates the native jni bindings for a set of classes in a .jar.
+ #
+ # See base/android/jni_generator/jni_generator.py for more info about the
+ # format of generating JNI bindings.
+ #
+ # Variables
+ # classes: list of .class files in the jar to generate jni for. These should
+ # include the full path to the .class file.
+ # jni_package: subdirectory path for generated bindings
+ # jar_file: the path to the .jar. If not provided, will default to the sdk's
+ # android.jar
+ # always_mangle: Mangle all generated method names. By default, the script
+ # only mangles methods that cause ambiguity due to method overload.
+ #
+ # deps, public_deps: As normal
+ #
+ # Example
+ # generate_jar_jni("foo_jni") {
+ # classes = [
+ # "android/view/Foo.class",
+ # ]
+ # jni_package = "foo"
+ # }
+ template("generate_jar_jni") {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ if (defined(invoker.jar_file)) {
+ _jar_file = invoker.jar_file
+ } else {
+ _jar_file = android_sdk_jar
+ }
+
+ _always_mangle = defined(invoker.always_mangle) && invoker.always_mangle
+
+ _base_output_dir = "${target_gen_dir}/${target_name}/${invoker.jni_package}"
+ _jni_output_dir = "${_base_output_dir}/jni"
+
+ if (defined(invoker.jni_generator_include)) {
+ _jni_generator_include = invoker.jni_generator_include
+ } else {
+ _jni_generator_include =
+ "//base/android/jni_generator/jni_generator_helper.h"
+ }
+
+ # TODO(cjhopman): make jni_generator.py support generating jni for multiple
+ # .class files from a .jar.
+ _jni_actions = []
+ foreach(_class, invoker.classes) {
+ _classname = get_path_info(_class, "name")
+ _jni_target_name = "${target_name}__jni_${_classname}"
+ _jni_actions += [ ":$_jni_target_name" ]
+ action_with_pydeps(_jni_target_name) {
+ # The sources aren't compiled so don't check their dependencies.
+ check_includes = false
+ script = "//base/android/jni_generator/jni_generator.py"
+ inputs = [
+ _jar_file,
+ ]
+ outputs = [
+ "${_jni_output_dir}/${_classname}_jni.h",
+ ]
+
+ args = [
+ "--jar_file",
+ rebase_path(_jar_file, root_build_dir),
+ "--input_file",
+ _class,
+ "--ptr_type=long",
+ "--output_dir",
+ rebase_path(_jni_output_dir, root_build_dir),
+ "--includes",
+ rebase_path(_jni_generator_include, _jni_output_dir),
+ ]
+
+ if (enable_profiling) {
+ args += [ "--enable_profiling" ]
+ }
+ if (enable_jni_tracing) {
+ args += [ "--enable_tracing" ]
+ }
+ if (_always_mangle) {
+ args += [ "--always_mangle" ]
+ }
+ }
+ }
+
+ config("jni_includes_${target_name}") {
+ include_dirs = [ _base_output_dir ]
+ }
+
+ group(target_name) {
+ public_deps = []
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "public_deps",
+ "visibility",
+ ])
+ public_deps += _jni_actions
+ public_configs = [ ":jni_includes_${target_name}" ]
+ }
+ }
+
+ # Declare a jni registration target.
+ #
+ # This target generates a srcjar containing a copy of GEN_JNI.java, which has
+ # the native methods of all dependent java files. It can also create a .h file
+ # for use with manual JNI registration.
+ #
+ # The script does not scan any generated sources (those within .srcjars, or
+ # within root_build_dir). This could be fixed by adding deps & logic to scan
+ # .srcjars, but isn't currently needed.
+ #
+ # See base/android/jni_generator/jni_registration_generator.py for more info
+ # about the format of the header file.
+ #
+ # Variables
+ # target: The Apk target to use for the java sources list.
+ # header_output: Path to the generated .h file (optional).
+ # sources_blacklist: List of .java files that should be skipped. (optional)
+ # namespace: Registration functions will be wrapped into this. (optional)
+ #
+ # Example
+ # generate_jni_registration("chrome_jni_registration") {
+ # target = ":chrome_public_apk"
+ # header_output = "$target_gen_dir/$target_name.h"
+ # sources_blacklist = [
+ # "//path/to/Exception.java",
+ # ]
+ # }
+ template("generate_jni_registration") {
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker, [ "testonly" ])
+ _build_config = get_label_info(invoker.target, "target_gen_dir") + "/" +
+ get_label_info(invoker.target, "name") + ".build_config"
+ _rebased_build_config = rebase_path(_build_config, root_build_dir)
+ _srcjar_output = "$target_gen_dir/$target_name.srcjar"
+
+ script = "//base/android/jni_generator/jni_registration_generator.py"
+ deps = [
+ "${invoker.target}$build_config_target_suffix",
+ ]
+ inputs = [
+ _build_config,
+ ]
+ outputs = [
+ _srcjar_output,
+ ]
+ depfile = "$target_gen_dir/$target_name.d"
+
+ args = [
+ # This is a list of .sources files.
+ "--sources-files=@FileArg($_rebased_build_config:deps_info:jni:all_source)",
+ "--srcjar-path",
+ rebase_path(_srcjar_output, root_build_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ ]
+
+ if (use_hashed_jni_names) {
+ args += [ "--use_proxy_hash" ]
+ }
+
+ if (defined(invoker.enable_native_mocks) && invoker.enable_native_mocks) {
+ args += [ "--enable_proxy_mocks" ]
+ }
+
+ if (defined(invoker.require_native_mocks) &&
+ invoker.require_native_mocks) {
+ args += [ "--require_mocks" ]
+ }
+
+ if (defined(invoker.header_output)) {
+ outputs += [ invoker.header_output ]
+ args += [
+ "--header-path",
+ rebase_path(invoker.header_output, root_build_dir),
+ ]
+ }
+
+ if (defined(invoker.sources_blacklist)) {
+ _rebase_sources_blacklist =
+ rebase_path(invoker.sources_blacklist, root_build_dir)
+ args += [ "--sources-blacklist=$_rebase_sources_blacklist" ]
+ }
+
+ if (defined(invoker.namespace)) {
+ args += [ "--namespace=${invoker.namespace}" ]
+ }
+ }
+ }
+
+ # Declare a target for c-preprocessor-generated java files
+ #
+ # NOTE: For generating Java conterparts to enums prefer using the java_cpp_enum
+ # rule instead.
+ #
+ # This target generates java files using the host C pre-processor. Each file in
+ # sources will be compiled using the C pre-processor. If include_path is
+ # specified, it will be passed (with --I) to the pre-processor.
+ #
+ # This target will create a single .srcjar. Adding this target to an
+ # android_library target's srcjar_deps will make the generated java files be
+ # included in that library's final outputs.
+ #
+ # Variables
+ # sources: list of files to be processed by the C pre-processor. For each
+ # file in sources, there will be one .java file in the final .srcjar. For a
+ # file named FooBar.template, a java file will be created with name
+ # FooBar.java.
+ # inputs: additional compile-time dependencies. Any files
+ # `#include`-ed in the templates should be listed here.
+ # package_path: this will be the subdirectory for each .java file in the
+ # .srcjar.
+ #
+ # Example
+ # java_cpp_template("foo_generated_enum") {
+ # sources = [
+ # "android/java/templates/Foo.template",
+ # ]
+ # inputs = [
+ # "android/java/templates/native_foo_header.h",
+ # ]
+ #
+ # package_path = "org/chromium/base/library_loader"
+ # include_path = "android/java/templates"
+ # }
+ template("java_cpp_template") {
+ set_sources_assignment_filter([])
+ forward_variables_from(invoker, [ "testonly" ])
+
+ _include_path = "//"
+ if (defined(invoker.include_path)) {
+ _include_path = invoker.include_path
+ }
+
+ _apply_gcc_target_name = "${target_name}__apply_gcc"
+ _base_gen_dir = "${target_gen_dir}/${target_name}/java_cpp_template"
+ _package_path = invoker.package_path
+
+ action_foreach_with_pydeps(_apply_gcc_target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "inputs",
+ "public_deps",
+ "data_deps",
+ ])
+ script = "//build/android/gyp/gcc_preprocess.py"
+ depfile =
+ "${target_gen_dir}/${invoker.target_name}_{{source_name_part}}.d"
+
+ sources = invoker.sources
+
+ outputs = [
+ "$_base_gen_dir/${_package_path}/{{source_name_part}}.java",
+ ]
+
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--include-path",
+ rebase_path(_include_path, root_build_dir),
+ "--output",
+ rebase_path(outputs[0], root_build_dir),
+ "--template={{source}}",
+ ]
+
+ if (defined(invoker.defines)) {
+ foreach(_def, invoker.defines) {
+ args += [
+ "--defines",
+ _def,
+ ]
+ }
+ }
+ }
+
+ # Filter out .d files.
+ set_sources_assignment_filter([ "*.d" ])
+ sources = get_target_outputs(":$_apply_gcc_target_name")
+
+ zip(target_name) {
+ forward_variables_from(invoker, [ "visibility" ])
+ inputs = sources
+ output = "${target_gen_dir}/${target_name}.srcjar"
+ base_dir = _base_gen_dir
+ deps = [
+ ":$_apply_gcc_target_name",
+ ]
+ }
+ }
+
+ # Declare a target for generating Java classes from C++ enums.
+ #
+ # This target generates Java files from C++ enums using a script.
+ #
+ # This target will create a single .srcjar. Adding this target to an
+ # android_library target's srcjar_deps will make the generated java files be
+ # included in that library's final outputs.
+ #
+ # Variables
+ # sources: list of files to be processed by the script. For each annotated
+ # enum contained in the sources files the script will generate a .java
+ # file with the same name as the name of the enum.
+ #
+ # Example
+ # java_cpp_enum("foo_generated_enum") {
+ # sources = [
+ # "src/native_foo_header.h",
+ # ]
+ # }
+ template("java_cpp_enum") {
+ set_sources_assignment_filter([])
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "sources",
+ "testonly",
+ "visibility",
+ ])
+
+ # The sources aren't compiled so don't check their dependencies.
+ check_includes = false
+ script = "//build/android/gyp/java_cpp_enum.py"
+ depfile = "$target_gen_dir/$target_name.d"
+
+ _srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+ _rebased_srcjar_path = rebase_path(_srcjar_path, root_build_dir)
+ _rebased_sources = rebase_path(invoker.sources, root_build_dir)
+
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--srcjar=$_rebased_srcjar_path",
+ ] + _rebased_sources
+ outputs = [
+ _srcjar_path,
+ ]
+ }
+ }
+
+ # Declare a target for generating Java classes with string constants matching
+ # those found in C++ files using a python script.
+ #
+ # This target will create a single .srcjar. Adding this target to an
+ # android_library target's srcjar_deps will make the generated java files be
+ # included in that library's final outputs.
+ #
+ # Variables
+ # sources: list of files to be processed by the script. For each string
+ # constant in the source files, the script will add a corresponding
+ # Java string to the specified template file.
+ # Example
+ # java_cpp_strings("foo_switches") {
+ # sources = [
+ # "src/foo_switches.cc",
+ # ]
+ # template = "src/templates/FooSwitches.java.tmpl
+ # }
+ #
+ # foo_switches.cc:
+ #
+ # // A switch.
+ # const char kASwitch = "a-switch";
+ #
+ # FooSwitches.java.tmpl
+ #
+ # // Copyright {YEAR} The Chromium Authors. All rights reserved.
+ # // Use of this source code is governed by a BSD-style license that can be
+ # // found in the LICENSE file.
+ #
+ # // This file is autogenerated by
+ # // {SCRIPT_NAME}
+ # // From
+ # // {SOURCE_PATH}, and
+ # // {TEMPLATE_PATH}
+ #
+ # package my.java.package;
+ #
+ # public abstract class FooSwitches {{
+ # // ...snip...
+ # {NATIVE_STRINGS}
+ # // ...snip...
+ # }}
+ #
+ # result:
+ # A FooSwitches.java file, defining a class named FooSwitches in the package
+ # my.java.package.
+ template("java_cpp_strings") {
+ set_sources_assignment_filter([])
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "sources",
+ "testonly",
+ "visibility",
+ ])
+
+ # The sources aren't compiled so don't check their dependencies.
+ check_includes = false
+ script = "//build/android/gyp/java_cpp_strings.py"
+
+ _srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+ _rebased_srcjar_path = rebase_path(_srcjar_path, root_build_dir)
+ _rebased_sources = rebase_path(invoker.sources, root_build_dir)
+ _rebased_template = rebase_path(invoker.template, root_build_dir)
+
+ args = [
+ "--srcjar=$_rebased_srcjar_path",
+ "--template=$_rebased_template",
+ ]
+ args += _rebased_sources
+ sources += [ invoker.template ]
+
+ outputs = [
+ _srcjar_path,
+ ]
+ }
+ }
+
+ # Declare a target for processing a Jinja template.
+ #
+ # Variables
+ # input: The template file to be processed.
+ # includes: List of files {% include %}'ed by input.
+ # output: Where to save the result.
+ # variables: (Optional) A list of variables to make available to the template
+ # processing environment, e.g. ["name=foo", "color=red"].
+ #
+ # Example
+ # jinja_template("chrome_public_manifest") {
+ # input = "java/AndroidManifest.xml"
+ # output = "$target_gen_dir/AndroidManifest.xml"
+ # }
+ template("jinja_template") {
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "visibility",
+ "deps",
+ "testonly",
+ ])
+ inputs = [
+ invoker.input,
+ ]
+ if (defined(invoker.includes)) {
+ inputs += invoker.includes
+ }
+ script = "//build/android/gyp/jinja_template.py"
+
+ outputs = [
+ invoker.output,
+ ]
+
+ args = [
+ "--loader-base-dir",
+ rebase_path("//", root_build_dir),
+ "--inputs",
+ rebase_path(invoker.input, root_build_dir),
+ "--output",
+ rebase_path(invoker.output, root_build_dir),
+ "--check-includes",
+ ]
+ if (defined(invoker.includes)) {
+ _rebased_includes = rebase_path(invoker.includes, root_build_dir)
+ args += [ "--includes=$_rebased_includes" ]
+ }
+ if (defined(invoker.variables)) {
+ args += [ "--variables=${invoker.variables}" ]
+ }
+ }
+ }
+
+ # Declare a target for a set of Android resources generated at build
+ # time and stored in a single zip archive. The content of the archive
+ # should match the layout of a regular Android res/ folder (but the
+ # archive should not include a top-level res/ directory).
+ #
+ # Note that there is no associated .srcjar, R.txt or package name
+ # associated with this target.
+ #
+ # Variables:
+ # generated_resources_zip: Generated zip archive path.
+ # generating_target_name: Name of the target generating
+ # generated_resources_zip. This rule will check that it is part
+ # of its outputs.
+ # deps: Specifies the dependencies of this target. Any Android resources
+ # listed here will be also be included *after* this one when compiling
+ # all resources for a final apk or junit binary. This is useful to
+ # ensure that the resources of the current target override those of the
+ # dependency as well (and would not work if you have these deps to the
+ # generating target's dependencies).
+ #
+ # Example
+ # _zip_archive = "$target_gen_dir/${target_name}.resources_zip"
+ #
+ # action("my_resources__create_zip") {
+ # _depfile = "$target_gen_dir/${target_name}.d"
+ # script = "//build/path/to/create_my_resources_zip.py"
+ # args = [
+ # "--depfile", rebase_path(_depfile, root_build_dir),
+ # "--output-zip", rebase_path(_zip_archive, root_build_dir),
+ # ]
+ # inputs = []
+ # outputs = _zip_archive
+ # depfile = _depfile
+ # }
+ #
+ # android_generated_resources("my_resources") {
+ # generated_resources_zip = _zip_archive
+ # generating_target_name = ":my_resources__create_zip"
+ # }
+ #
+ template("android_generated_resources") {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ _build_config = "$target_gen_dir/${target_name}.build_config"
+
+ write_build_config("$target_name$build_config_target_suffix") {
+ build_config = _build_config
+ resources_zip = invoker.generated_resources_zip
+ type = "android_resources"
+ if (defined(invoker.deps)) {
+ possible_config_deps = invoker.deps
+ }
+ }
+
+ group(target_name) {
+ public_deps = [
+ ":$target_name$build_config_target_suffix",
+ invoker.generating_target_name,
+ ]
+ }
+ }
+
+ # Declare a target for processing Android resources as Jinja templates.
+ #
+ # This takes an Android resource directory where each resource is a Jinja
+ # template, processes each template, then packages the results in a zip file
+ # which can be consumed by an android resources, library, or apk target.
+ #
+ # If this target is included in the deps of an android resources/library/apk,
+ # the resources will be included with that target.
+ #
+ # Variables
+ # resources: The list of resources files to process.
+ # res_dir: The resource directory containing the resources.
+ # variables: (Optional) A list of variables to make available to the template
+ # processing environment, e.g. ["name=foo", "color=red"].
+ #
+ # Example
+ # jinja_template_resources("chrome_public_template_resources") {
+ # res_dir = "res_template"
+ # resources = ["res_template/xml/syncable.xml"]
+ # variables = ["color=red"]
+ # }
+ template("jinja_template_resources") {
+ # JUnit tests use resource zip files. These must not be put in gen/
+ # directory or they will not be available to tester bots.
+ _resources_zip_rebased_path = rebase_path(target_gen_dir, root_gen_dir)
+ _resources_zip = "${root_out_dir}/resource_zips/${_resources_zip_rebased_path}/${target_name}.resources.zip"
+
+ _generating_target_name = "${target_name}__template"
+
+ action_with_pydeps(_generating_target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "testonly",
+ ])
+ inputs = invoker.resources
+ script = "//build/android/gyp/jinja_template.py"
+
+ outputs = [
+ _resources_zip,
+ ]
+
+ _rebased_resources = rebase_path(invoker.resources, root_build_dir)
+ args = [
+ "--inputs=${_rebased_resources}",
+ "--inputs-base-dir",
+ rebase_path(invoker.res_dir, root_build_dir),
+ "--outputs-zip",
+ rebase_path(_resources_zip, root_build_dir),
+ "--check-includes",
+ ]
+ if (defined(invoker.variables)) {
+ variables = invoker.variables
+ args += [ "--variables=${variables}" ]
+ }
+ }
+
+ android_generated_resources(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "testonly",
+ ])
+ generating_target_name = ":$_generating_target_name"
+ generated_resources_zip = _resources_zip
+ }
+ }
+
+ # Declare an Android resources target
+ #
+ # This creates a resources zip file that will be used when building an Android
+ # library or apk and included into a final apk.
+ #
+ # To include these resources in a library/apk, this target should be listed in
+ # the library's deps. A library/apk will also include any resources used by its
+ # own dependencies.
+ #
+ # Variables
+ # deps: Specifies the dependencies of this target. Any Android resources
+ # listed in deps will be included by libraries/apks that depend on this
+ # target.
+ # alternative_android_sdk_dep: Optional. Alternative Android system
+ # android java target to use.
+ # resource_dirs: List of directories containing resources for this target.
+ # generated_resource_dirs: List of directories containing resources for this
+ # target which are *generated* by a dependency. |generated_resource_files|
+ # must be specified if |generated_resource_dirs| is specified.
+ # generated_resource_files: List of all files in |generated_resource_dirs|.
+ # |generated_resource_dirs| must be specified in |generated_resource_files|
+ # is specified.
+ # android_manifest: AndroidManifest.xml for this target (optional). Will be
+ # merged into apks that directly or indirectly depend on this target.
+ # android_manifest_dep: Target that generates AndroidManifest (if applicable)
+ # custom_package: java package for generated .java files.
+ # v14_skip: If true, don't run v14 resource generator on this. Defaults to
+ # false. (see build/android/gyp/generate_v14_compatible_resources.py)
+ # shared_resources: If true make a resource package that can be loaded by a
+ # different application at runtime to access the package's resources.
+ # r_text_file: (optional) path to pre-generated R.txt to be used when
+ # generating R.java instead of resource-based aapt-generated one.
+ # create_srcjar: If false, does not create an R.java file. Needed only for
+ # prebuilts that have R.txt files that do not match their res/
+ # (Play Services).
+ #
+ # Example:
+ # android_resources("foo_resources") {
+ # deps = [":foo_strings_grd"]
+ # resource_dirs = ["res"]
+ # custom_package = "org.chromium.foo"
+ # }
+ #
+ # android_resources("foo_resources_overrides") {
+ # deps = [":foo_resources"]
+ # resource_dirs = ["res_overrides"]
+ # }
+ template("android_resources") {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ _base_path = "$target_gen_dir/$target_name"
+
+ # JUnit tests use resource zip files. These must not be put in gen/
+ # directory or they will not be available to tester bots.
+ _resources_zip_rebased_path = rebase_path(target_gen_dir, root_gen_dir)
+ _zip_path = "${root_out_dir}/resource_zips/${_resources_zip_rebased_path}/${target_name}.resources.zip"
+ _r_text_out_path = _base_path + "_R.txt"
+ _build_config = _base_path + ".build_config"
+ _build_config_target_name = "$target_name$build_config_target_suffix"
+
+ if (!defined(invoker.create_srcjar) || invoker.create_srcjar) {
+ _srcjar_path = _base_path + ".srcjar"
+ }
+
+ _deps = []
+ if (defined(invoker.deps)) {
+ _deps += invoker.deps
+ }
+
+ if (defined(invoker.alternative_android_sdk_dep)) {
+ _deps += [ invoker.alternative_android_sdk_dep ]
+ } else {
+ _deps += [ "//third_party/android_sdk:android_sdk_java" ]
+ }
+
+ write_build_config(_build_config_target_name) {
+ type = "android_resources"
+ build_config = _build_config
+ resources_zip = _zip_path
+
+ resource_dirs = invoker.resource_dirs
+ if (defined(invoker.generated_resource_dirs)) {
+ resource_dirs += invoker.generated_resource_dirs
+ }
+
+ if (defined(_srcjar_path)) {
+ forward_variables_from(invoker,
+ [
+ "android_manifest",
+ "android_manifest_dep",
+ "custom_package",
+ ])
+
+ # No package means resources override their deps.
+ if (defined(custom_package) || defined(android_manifest)) {
+ r_text = _r_text_out_path
+ } else {
+ assert(defined(invoker.deps),
+ "Must specify deps when custom_package is omitted.")
+ }
+ srcjar = _srcjar_path
+ }
+
+ possible_config_deps = _deps
+ }
+
+ prepare_resources(target_name) {
+ forward_variables_from(invoker,
+ [
+ "android_manifest",
+ "custom_package",
+ "generated_resource_dirs",
+ "generated_resource_files",
+ "resource_dirs",
+ "v14_skip",
+ "strip_drawables",
+ ])
+ deps = _deps
+ deps += [ ":$_build_config_target_name" ]
+ if (defined(invoker.android_manifest_dep)) {
+ deps += [ invoker.android_manifest_dep ]
+ }
+
+ build_config = _build_config
+ zip_path = _zip_path
+ r_text_out_path = _r_text_out_path
+
+ if (defined(invoker.r_text_file)) {
+ r_text_in_path = invoker.r_text_file
+ }
+ if (defined(_srcjar_path)) {
+ srcjar_path = _srcjar_path
+ }
+
+ # Always generate R.onResourcesLoaded() method, it is required for
+ # compiling ResourceRewriter, there is no side effect because the
+ # generated R.class isn't used in final apk.
+ shared_resources = true
+ }
+ }
+
+ # Declare an Android assets target.
+ #
+ # Defines a set of files to include as assets in a dependent apk.
+ #
+ # To include these assets in an apk, this target should be listed in
+ # the apk's deps, or in the deps of a library target used by an apk.
+ #
+ # Variables
+ # deps: Specifies the dependencies of this target. Any Android assets
+ # listed in deps will be included by libraries/apks that depend on this
+ # target.
+ # sources: List of files to include as assets.
+ # renaming_sources: List of files to include as assets and be renamed.
+ # renaming_destinations: List of asset paths for files in renaming_sources.
+ # disable_compression: Whether to disable compression for files that are
+ # known to be compressable (default: false).
+ # treat_as_locale_paks: Causes base's BuildConfig.java to consider these
+ # assets to be locale paks.
+ #
+ # Example:
+ # android_assets("content_shell_assets") {
+ # deps = [
+ # ":generates_foo",
+ # ":other_assets",
+ # ]
+ # sources = [
+ # "//path/asset1.png",
+ # "//path/asset2.png",
+ # "$target_gen_dir/foo.dat",
+ # ]
+ # }
+ #
+ # android_assets("overriding_content_shell_assets") {
+ # deps = [ ":content_shell_assets" ]
+ # # Override foo.dat from content_shell_assets.
+ # sources = [ "//custom/foo.dat" ]
+ # renaming_sources = [ "//path/asset2.png" ]
+ # renaming_destinations = [ "renamed/asset2.png" ]
+ # }
+ template("android_assets") {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ _build_config = "$target_gen_dir/$target_name.build_config"
+ _build_config_target_name = "$target_name$build_config_target_suffix"
+
+ write_build_config(_build_config_target_name) {
+ type = "android_assets"
+ build_config = _build_config
+
+ forward_variables_from(invoker,
+ [
+ "disable_compression",
+ "treat_as_locale_paks",
+ ])
+
+ if (defined(invoker.deps)) {
+ possible_config_deps = invoker.deps
+ }
+
+ if (defined(invoker.sources)) {
+ asset_sources = invoker.sources
+ }
+ if (defined(invoker.renaming_sources)) {
+ assert(defined(invoker.renaming_destinations))
+ _source_count = 0
+ foreach(_, invoker.renaming_sources) {
+ _source_count += 1
+ }
+ _dest_count = 0
+ foreach(_, invoker.renaming_destinations) {
+ _dest_count += 1
+ }
+ assert(
+ _source_count == _dest_count,
+ "android_assets() renaming_sources.length != renaming_destinations.length")
+ asset_renaming_sources = invoker.renaming_sources
+ asset_renaming_destinations = invoker.renaming_destinations
+ }
+ }
+
+ group(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "visibility",
+ ])
+ public_deps = [
+ ":$_build_config_target_name",
+ ]
+ }
+ }
+
+ # Declare a group() that supports forwarding java dependency information.
+ #
+ # Example
+ # java_group("conditional_deps") {
+ # if (enable_foo) {
+ # deps = [":foo_java"]
+ # }
+ # }
+ template("java_group") {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "input_jars_paths",
+ ])
+ write_build_config("$target_name$build_config_target_suffix") {
+ type = "group"
+ build_config = "$target_gen_dir/${invoker.target_name}.build_config"
+ supports_android = true
+ if (defined(invoker.deps)) {
+ possible_config_deps = invoker.deps
+ }
+ }
+ group(target_name) {
+ forward_variables_from(invoker, "*")
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += [ ":$target_name$build_config_target_suffix" ]
+ }
+ }
+
+ # Declare a target that generates localized strings.xml from a .grd file.
+ #
+ # If this target is included in the deps of an android resources/library/apk,
+ # the strings.xml will be included with that target.
+ #
+ # Variables
+ # deps: Specifies the dependencies of this target.
+ # grd_file: Path to the .grd file to generate strings.xml from.
+ # outputs: Expected grit outputs (see grit rule).
+ #
+ # Example
+ # java_strings_grd("foo_strings_grd") {
+ # grd_file = "foo_strings.grd"
+ # }
+ template("java_strings_grd") {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ # JUnit tests use resource zip files. These must not be put in gen/
+ # directory or they will not be available to tester bots.
+ _resources_zip_rebased_path = rebase_path(target_gen_dir, root_gen_dir)
+ _resources_zip = "${root_out_dir}/resource_zips/${_resources_zip_rebased_path}/${target_name}.resources.zip"
+
+ _grit_target_name = "${target_name}__grit"
+ _grit_output_dir = "$target_gen_dir/${target_name}_grit_output"
+
+ grit(_grit_target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "defines",
+ ])
+ grit_flags = [
+ "-E",
+ "ANDROID_JAVA_TAGGED_ONLY=false",
+ ]
+ output_dir = _grit_output_dir
+ resource_ids = ""
+ source = invoker.grd_file
+ outputs = invoker.outputs
+ }
+
+ _zip_target_name = "${target_name}__zip"
+
+ zip(_zip_target_name) {
+ base_dir = _grit_output_dir
+
+ # This needs to get outputs from grit's internal target, not the final
+ # source_set.
+ inputs = get_target_outputs(":${_grit_target_name}_grit")
+ output = _resources_zip
+ deps = [
+ ":$_grit_target_name",
+ ]
+ }
+
+ android_generated_resources(target_name) {
+ generating_target_name = ":$_zip_target_name"
+ generated_resources_zip = _resources_zip
+ }
+ }
+
+ # Declare a target that packages strings.xml generated from a grd file.
+ #
+ # If this target is included in the deps of an android resources/library/apk,
+ # the strings.xml will be included with that target.
+ #
+ # Variables
+ # grit_output_dir: directory containing grit-generated files.
+ # generated_files: list of android resource files to package.
+ #
+ # Example
+ # java_strings_grd_prebuilt("foo_strings_grd") {
+ # grit_output_dir = "$root_gen_dir/foo/grit"
+ # generated_files = [
+ # "values/strings.xml"
+ # ]
+ # }
+ template("java_strings_grd_prebuilt") {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ # JUnit tests use resource zip files. These must not be put in gen/
+ # directory or they will not be available to tester bots.
+ _resources_zip_rebased_path = rebase_path(target_gen_dir, root_gen_dir)
+ _resources_zip = "${root_out_dir}/resource_zips/${_resources_zip_rebased_path}/${target_name}.resources.zip"
+
+ _zip_target_name = "${target_name}__zip"
+
+ zip(_zip_target_name) {
+ forward_variables_from(invoker, [ "visibility" ])
+
+ base_dir = invoker.grit_output_dir
+ inputs = rebase_path(invoker.generated_files, ".", base_dir)
+ output = _resources_zip
+ if (defined(invoker.deps)) {
+ deps = invoker.deps
+ }
+ }
+
+ android_generated_resources(target_name) {
+ generating_target_name = ":$_zip_target_name"
+ generated_resources_zip = _resources_zip
+ }
+ }
+
+ # Declare a Java executable target
+ #
+ # Same as java_library, but also creates a wrapper script within
+ # $root_out_dir/bin.
+ #
+ # Supports all variables of java_library(), plus:
+ # main_class: When specified, a wrapper script is created within
+ # $root_build_dir/bin to launch the binary with the given class as the
+ # entrypoint.
+ # wrapper_script_name: Filename for the wrapper script (default=target_name)
+ # wrapper_script_args: List of additional arguments for the wrapper script.
+ #
+ # Example
+ # java_binary("foo") {
+ # java_files = [ "org/chromium/foo/FooMain.java" ]
+ # deps = [ ":bar_java" ]
+ # main_class = "org.chromium.foo.FooMain"
+ # }
+ #
+ # java_binary("foo") {
+ # jar_path = "lib/prebuilt.jar"
+ # deps = [ ":bar_java" ]
+ # main_class = "org.chromium.foo.FooMain"
+ # }
+ template("java_binary") {
+ java_library_impl(target_name) {
+ forward_variables_from(invoker, "*")
+ type = "java_binary"
+ }
+ }
+
+ # Declare a Java Annotation Processor.
+ #
+ # Supports all variables of java_library(), plus:
+ # jar_path: Path to a prebuilt jar. Mutually exclusive with java_files &
+ # srcjar_deps.
+ # main_class: The fully-quallified class name of the processor's entry
+ # point.
+ #
+ # Example
+ # java_annotation_processor("foo_processor") {
+ # java_files = [ "org/chromium/foo/FooProcessor.java" ]
+ # deps = [ ":bar_java" ]
+ # main_class = "org.chromium.foo.FooProcessor"
+ # }
+ #
+ # java_annotation_processor("foo_processor") {
+ # jar_path = "lib/prebuilt.jar"
+ # main_class = "org.chromium.foo.FooMain"
+ # }
+ #
+ # java_library("...") {
+ # annotation_processor_deps = [":foo_processor"]
+ # }
+ #
+ template("java_annotation_processor") {
+ java_library_impl(target_name) {
+ forward_variables_from(invoker, "*")
+ type = "java_annotation_processor"
+ }
+ }
+
+ # Declare a Junit executable target
+ #
+ # This target creates an executable from java code for running as a junit test
+ # suite. The executable will be in the output folder's /bin/ directory.
+ #
+ # Supports all variables of java_binary().
+ #
+ # Example
+ # junit_binary("foo") {
+ # java_files = [ "org/chromium/foo/FooTest.java" ]
+ # deps = [ ":bar_java" ]
+ # }
+ template("junit_binary") {
+ testonly = true
+
+ _java_binary_target_name = "${target_name}__java_binary"
+ _test_runner_target_name = "${target_name}__test_runner_script"
+ _main_class = "org.chromium.testing.local.JunitTestMain"
+
+ _build_config = "$target_gen_dir/$target_name.build_config"
+ _build_config_target_name = "$target_name$build_config_target_suffix"
+ _deps = [
+ "//testing/android/junit:junit_test_support",
+ "//third_party/junit",
+ "//third_party/mockito:mockito_java",
+ "//third_party/robolectric:robolectric_all_java",
+
+ # This dep is required if any deps require android (but it doesn't hurt
+ # to add it regardless) and is used by bytecode rewritten classes.
+ "//build/android/buildhooks:build_hooks_android_impl_java",
+ ]
+ if (defined(invoker.deps)) {
+ _deps += invoker.deps
+ }
+
+ # a package name or a manifest is required to have resources. This is
+ # added so that junit tests that do not care about the package name can
+ # still use resources without having to explicitly set one.
+ if (defined(invoker.package_name)) {
+ _package_name = invoker.package_name
+ } else if (!defined(invoker.android_manifest_path)) {
+ _package_name = "org.chromium.test"
+ }
+
+ _prepare_resources_target = "${target_name}__prepare_resources"
+ prepare_resources(_prepare_resources_target) {
+ deps = _deps + [ ":$_build_config_target_name" ]
+ build_config = _build_config
+ srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+ if (defined(_package_name)) {
+ custom_package = _package_name
+ }
+ if (defined(invoker.android_manifest_path)) {
+ android_manifest = invoker.android_manifest_path
+ } else {
+ android_manifest = "//build/android/AndroidManifest.xml"
+ }
+ }
+
+ _jni_srcjar_target = "${target_name}__final_jni"
+ _outer_target_name = target_name
+ generate_jni_registration(_jni_srcjar_target) {
+ enable_native_mocks = true
+ require_native_mocks = true
+ target = ":$_outer_target_name"
+ }
+
+ java_library_impl(_java_binary_target_name) {
+ forward_variables_from(invoker, "*", [ "deps" ])
+ type = "junit_binary"
+ main_target_name = invoker.target_name
+
+ # Include the android SDK jar(s) for resource processing.
+ include_android_sdk = true
+
+ # Robolectric can handle deps that set !supports_android as well those
+ # that set requires_android.
+ bypass_platform_checks = true
+ deps = _deps
+ testonly = true
+ main_class = _main_class
+ wrapper_script_name = "helper/$main_target_name"
+ if (!defined(srcjar_deps)) {
+ srcjar_deps = []
+ }
+ srcjar_deps += [
+ ":$_jni_srcjar_target",
+ ":$_prepare_resources_target",
+
+ # This dep is required for any targets that depend on //base:base_java.
+ "//base:base_build_config_gen",
+ ]
+ }
+
+ test_runner_script(_test_runner_target_name) {
+ test_name = invoker.target_name
+ test_suite = invoker.target_name
+ test_type = "junit"
+ ignore_all_data_deps = true
+ forward_variables_from(invoker, [ "android_manifest_path" ])
+ if (defined(_package_name)) {
+ package_name = _package_name
+ }
+ }
+
+ group(target_name) {
+ public_deps = [
+ ":$_build_config_target_name",
+ ":$_java_binary_target_name",
+ ":$_test_runner_target_name",
+ ]
+ }
+ }
+
+ # Declare a java library target
+ #
+ # Variables
+ # deps: Specifies the dependencies of this target. Java targets in this list
+ # will be added to the javac classpath.
+ # annotation_processor_deps: List of java_annotation_processor targets to
+ # use when compiling.
+ #
+ # jar_path: Path to a prebuilt jar. Mutually exclusive with java_files &
+ # srcjar_deps.
+ # java_files: List of .java files included in this library.
+ # srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+ # will be added to java_files and be included in this library.
+ #
+ # input_jars_paths: A list of paths to the jars that should be included
+ # in the compile-time classpath. These are in addition to library .jars
+ # that appear in deps.
+ # classpath_deps: Deps that should added to the classpath for this target,
+ # but not linked into the apk (use this for annotation processors).
+ #
+ # chromium_code: If true, extra analysis warning/errors will be enabled.
+ # enable_errorprone: If true, enables the errorprone compiler.
+ #
+ # jar_excluded_patterns: List of patterns of .class files to exclude.
+ # jar_included_patterns: List of patterns of .class files to include.
+ # When omitted, all classes not matched by jar_excluded_patterns are
+ # included. When specified, all non-matching .class files are stripped.
+ #
+ # output_name: File name for the output .jar (not including extension).
+ # Defaults to the input .jar file name.
+ #
+ # proguard_configs: List of proguard configs to use in final apk step for
+ # any apk that depends on this library.
+ #
+ # supports_android: If true, Android targets (android_library, android_apk)
+ # may depend on this target. Note: if true, this target must only use the
+ # subset of Java available on Android.
+ # bypass_platform_checks: Disables checks about cross-platform (Java/Android)
+ # dependencies for this target. This will allow depending on an
+ # android_library target, for example.
+ #
+ # additional_jar_files: Use to package additional files (Java resources)
+ # into the output jar. Pass a list of length-2 lists with format:
+ # [ [ path_to_file, path_to_put_in_jar ] ]
+ #
+ # javac_args: Additional arguments to pass to javac.
+ # errorprone_args: Additional arguments to pass to errorprone.
+ #
+ # data_deps, testonly
+ #
+ # Example
+ # java_library("foo_java") {
+ # java_files = [
+ # "org/chromium/foo/Foo.java",
+ # "org/chromium/foo/FooInterface.java",
+ # "org/chromium/foo/FooService.java",
+ # ]
+ # deps = [
+ # ":bar_java"
+ # ]
+ # srcjar_deps = [
+ # ":foo_generated_enum"
+ # ]
+ # jar_excluded_patterns = [
+ # "*/FooService.class", "org/chromium/FooService\$*.class"
+ # ]
+ # }
+ template("java_library") {
+ java_library_impl(target_name) {
+ forward_variables_from(invoker, "*")
+ type = "java_library"
+ }
+ }
+
+ # Declare a java library target for a prebuilt jar
+ #
+ # Supports all variables of java_library().
+ #
+ # Example
+ # java_prebuilt("foo_java") {
+ # jar_path = "foo.jar"
+ # deps = [
+ # ":foo_resources",
+ # ":bar_java"
+ # ]
+ # }
+ template("java_prebuilt") {
+ java_library_impl(target_name) {
+ forward_variables_from(invoker, "*")
+ type = "java_library"
+ }
+ }
+
+ # Combines all dependent .jar files into a single .jar file.
+ #
+ # Variables:
+ # output: Path to the output jar.
+ # override_build_config: Use a pre-existing .build_config. Must be of type
+ # "apk".
+ # use_interface_jars: Use all dependent interface .jars rather than
+ # implementation .jars.
+ # use_unprocessed_jars: Use unprocessed / undesugared .jars.
+ # direct_deps_only: Do not recurse on deps.
+ # jar_excluded_patterns (optional)
+ # List of globs for paths to exclude.
+ #
+ # Example
+ # dist_jar("lib_fatjar") {
+ # deps = [ ":my_java_lib" ]
+ # output = "$root_build_dir/MyLibrary.jar"
+ # }
+ # dist_jar("sideloaded_dex") {
+ # deps = [ ":my_java_lib" ]
+ # output = "$root_build_dir/MyLibrary.jar"
+ # dex_path = "$root_build_dir/MyLibrary.dex"
+ # }
+ template("dist_jar") {
+ forward_variables_from(invoker, [ "testonly" ])
+ _supports_android =
+ !defined(invoker.supports_android) || invoker.supports_android
+ _requires_android =
+ defined(invoker.requires_android) && invoker.requires_android
+ _use_interface_jars =
+ defined(invoker.use_interface_jars) && invoker.use_interface_jars
+ _use_unprocessed_jars =
+ defined(invoker.use_unprocessed_jars) && invoker.use_unprocessed_jars
+ _direct_deps_only =
+ defined(invoker.direct_deps_only) && invoker.direct_deps_only
+ assert(!(_use_unprocessed_jars && _use_interface_jars),
+ "Cannot set both use_interface_jars and use_unprocessed_jars")
+
+ _jar_target_name = target_name
+
+ _deps = []
+ if (defined(invoker.deps)) {
+ _deps = invoker.deps
+ }
+ if (_supports_android) {
+ _deps += [ "//third_party/android_sdk:android_sdk_java" ]
+ }
+ _enable_build_hooks =
+ _supports_android &&
+ (!defined(invoker.no_build_hooks) || !invoker.no_build_hooks)
+ if (_enable_build_hooks && _requires_android) {
+ _deps += [ "//build/android/buildhooks:build_hooks_android_impl_java" ]
+ }
+
+ if (defined(invoker.override_build_config)) {
+ _build_config = invoker.override_build_config
+ } else {
+ _build_config = "$target_gen_dir/$target_name.build_config"
+ _build_config_target_name = "$target_name$build_config_target_suffix"
+
+ write_build_config(_build_config_target_name) {
+ type = "dist_jar"
+ supports_android = _supports_android
+ requires_android = _requires_android
+ possible_config_deps = _deps
+ build_config = _build_config
+ }
+
+ _deps += [ ":$_build_config_target_name" ]
+ }
+
+ _rebased_build_config = rebase_path(_build_config, root_build_dir)
+ action_with_pydeps(_jar_target_name) {
+ forward_variables_from(invoker, [ "data" ])
+ script = "//build/android/gyp/zip.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ deps = _deps
+
+ inputs = [
+ _build_config,
+ ]
+
+ outputs = [
+ invoker.output,
+ ]
+
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--output",
+ rebase_path(invoker.output, root_build_dir),
+ "--no-compress",
+ ]
+
+ if (_direct_deps_only) {
+ if (_use_interface_jars) {
+ args += [ "--input-zips=@FileArg($_rebased_build_config:javac:interface_classpath)" ]
+ } else if (_use_unprocessed_jars) {
+ args += [
+ "--input-zips=@FileArg($_rebased_build_config:javac:classpath)",
+ ]
+ } else {
+ assert(
+ false,
+ "direct_deps_only does not work without use_interface_jars or use_unprocessed_jars")
+ }
+ } else {
+ if (_use_interface_jars) {
+ args += [ "--input-zips=@FileArg($_rebased_build_config:dist_jar:all_interface_jars)" ]
+ } else if (_use_unprocessed_jars) {
+ args += [ "--input-zips=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)" ]
+ } else {
+ args += [ "--input-zips=@FileArg($_rebased_build_config:deps_info:java_runtime_classpath)" ]
+ }
+ }
+ if (defined(invoker.jar_excluded_patterns)) {
+ args +=
+ [ "--input-zips-excluded-globs=${invoker.jar_excluded_patterns}" ]
+ }
+ }
+ }
+
+ # Combines all dependent .jar files into a single proguarded .dex file.
+ #
+ # Variables:
+ # output: Path to the output dex.
+ # proguard_configs: List of proguard configs.
+ # proguard_jar_path: The path to proguard.jar you wish to use. If undefined,
+ # the proguard used will be the checked in one in //third_party/proguard.
+ #
+ # Example
+ # dist_dex("lib_fatjar") {
+ # deps = [ ":my_java_lib" ]
+ # output = "$root_build_dir/MyLibrary.jar"
+ # }
+ # dist_jar("sideloaded_dex") {
+ # deps = [ ":my_java_lib" ]
+ # output = "$root_build_dir/MyLibrary.jar"
+ # dex_path = "$root_build_dir/MyLibrary.dex"
+ # }
+ template("proguarded_dist_dex") {
+ _deps = [
+ "//third_party/android_sdk:android_sdk_java",
+ "//build/android/buildhooks:build_hooks_android_impl_java",
+ ]
+ if (defined(invoker.deps)) {
+ _deps += invoker.deps
+ }
+
+ _build_config = "$target_gen_dir/$target_name.build_config"
+ _build_config_target_name = "$target_name$build_config_target_suffix"
+
+ write_build_config(_build_config_target_name) {
+ type = "dist_jar"
+ forward_variables_from(invoker, [ "proguard_configs" ])
+ supports_android = true
+ requires_android = true
+ proguard_enabled = true
+ possible_config_deps = _deps
+ build_config = _build_config
+ }
+
+ _deps += [ ":$_build_config_target_name" ]
+
+ dex(target_name) {
+ deps = _deps
+ build_config = _build_config
+ proguard_enabled = true
+ forward_variables_from(invoker,
+ [
+ "proguard_configs",
+ "min_sdk_version",
+ ])
+ output = invoker.output
+ }
+ }
+
+ # Creates an Android .aar library.
+ #
+ # Currently supports:
+ # * AndroidManifest.xml
+ # * classes.jar
+ # * jni/
+ # * res/
+ # * R.txt
+ # * proguard.txt
+ # Does not yet support:
+ # * public.txt
+ # * annotations.zip
+ # * assets/
+ # See: https://developer.android.com/studio/projects/android-library.html#aar-contents
+ #
+ # Variables:
+ # output: Path to the output .aar.
+ # proguard_configs: List of proguard configs (optional).
+ # android_manifest: Path to AndroidManifest.xml (optional).
+ # native_libraries: list of native libraries (optional).
+ # direct_deps_only: Do not recurse on deps. (optional, defaults false).
+ #
+ # Example
+ # dist_aar("my_aar") {
+ # deps = [ ":my_java_lib" ]
+ # output = "$root_build_dir/MyLibrary.aar"
+ # }
+ template("dist_aar") {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ _deps = []
+ if (defined(invoker.deps)) {
+ _deps = invoker.deps
+ }
+
+ _direct_deps_only =
+ defined(invoker.direct_deps_only) && invoker.direct_deps_only
+
+ _build_config = "$target_gen_dir/$target_name.build_config"
+ _build_config_target_name = "$target_name$build_config_target_suffix"
+
+ write_build_config(_build_config_target_name) {
+ type = "dist_aar"
+ forward_variables_from(invoker, [ "proguard_configs" ])
+ possible_config_deps = _deps
+ supports_android = true
+ requires_android = true
+ build_config = _build_config
+ }
+
+ _deps += [ ":$_build_config_target_name" ]
+
+ _rebased_build_config = rebase_path(_build_config, root_build_dir)
+
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker, [ "data" ])
+ depfile = "$target_gen_dir/$target_name.d"
+ deps = _deps
+ script = "//build/android/gyp/dist_aar.py"
+
+ inputs = [
+ _build_config,
+ ]
+
+ # Although these will be listed as deps in the depfile, they must also
+ # appear here so that "gn analyze" knows about them.
+ # https://crbug.com/827197
+ if (defined(invoker.proguard_configs)) {
+ inputs += invoker.proguard_configs
+ }
+
+ outputs = [
+ invoker.output,
+ ]
+
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--output",
+ rebase_path(invoker.output, root_build_dir),
+ "--dependencies-res-zips=@FileArg($_rebased_build_config:resources:dependency_zips)",
+ "--r-text-files=@FileArg($_rebased_build_config:resources:extra_r_text_files)",
+ "--proguard-configs=@FileArg($_rebased_build_config:deps_info:proguard_all_configs)",
+ ]
+ if (_direct_deps_only) {
+ args += [ "--jars=@FileArg($_rebased_build_config:javac:classpath)" ]
+ } else {
+ args += [ "--jars=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)" ]
+ }
+ if (defined(invoker.android_manifest)) {
+ args += [
+ "--android-manifest",
+ rebase_path(invoker.android_manifest, root_build_dir),
+ ]
+ }
+ if (defined(invoker.native_libraries) && invoker.native_libraries != []) {
+ inputs += invoker.native_libraries
+ _rebased_native_libraries =
+ rebase_path(invoker.native_libraries, root_build_dir)
+
+ args += [
+ "--native-libraries=$_rebased_native_libraries",
+ "--abi=$android_app_abi",
+ ]
+ }
+ }
+ }
+
+ # Declare an Android library target
+ #
+ # This target creates an Android library containing java code and Android
+ # resources.
+ #
+ # Supports all variables of java_library(), plus:
+ # android_manifest_for_lint: Path to AndroidManifest.xml (optional). This
+ # manifest will be used by Android lint, but will not be merged into apks.
+ # To have a manifest merged, add it to an android_resources() target.
+ # deps: In addition to defining java deps, this can also include
+ # android_assets() and android_resources() targets.
+ # dex_path: If set, the resulting .dex.jar file will be placed under this
+ # path.
+ # alternative_android_sdk_ijar: if set, the given android_sdk_ijar file
+ # replaces the default android_sdk_ijar.
+ # alternative_android_sdk_ijar_dep: the target that generates
+ # alternative_android_sdk_ijar, must be set if alternative_android_sdk_ijar
+ # is used.
+ # alternative_android_sdk_jar: actual jar corresponding to
+ # alternative_android_sdk_ijar, must be set if alternative_android_sdk_ijar
+ # is used.
+ #
+ # Example
+ # android_library("foo_java") {
+ # java_files = [
+ # "android/org/chromium/foo/Foo.java",
+ # "android/org/chromium/foo/FooInterface.java",
+ # "android/org/chromium/foo/FooService.java",
+ # ]
+ # deps = [
+ # ":bar_java"
+ # ]
+ # srcjar_deps = [
+ # ":foo_generated_enum"
+ # ]
+ # jar_excluded_patterns = [
+ # "*/FooService.class", "org/chromium/FooService\$*.class"
+ # ]
+ # }
+ template("android_library") {
+ java_library(target_name) {
+ forward_variables_from(invoker, "*")
+
+ supports_android = true
+ requires_android = true
+
+ if (!defined(jar_excluded_patterns)) {
+ jar_excluded_patterns = []
+ }
+ jar_excluded_patterns += [
+ "*/R.class",
+ "*/R\$*.class",
+ "*/Manifest.class",
+ "*/Manifest\$*.class",
+ ]
+ if (use_hashed_jni_names) {
+ jar_excluded_patterns += [ "J/N.class" ]
+ } else {
+ jar_excluded_patterns += [ "*/GEN_JNI.class" ]
+ }
+ }
+ }
+
+ # Declare an Android library target for a prebuilt jar
+ #
+ # This target creates an Android library containing java code and Android
+ # resources.
+ #
+ # Supports all variables of android_library().
+ #
+ # Example
+ # android_java_prebuilt("foo_java") {
+ # jar_path = "foo.jar"
+ # deps = [
+ # ":foo_resources",
+ # ":bar_java"
+ # ]
+ # }
+ template("android_java_prebuilt") {
+ android_library(target_name) {
+ forward_variables_from(invoker, "*")
+ }
+ }
+
+ template("android_system_java_prebuilt") {
+ java_library_impl(target_name) {
+ forward_variables_from(invoker, "*")
+ no_build_hooks = true
+ supports_android = true
+ type = "system_java_library"
+ }
+ }
+
+ # Creates org/chromium/base/BuildConfig.java
+ # This doesn't really belong in //build since it genates a file for //base.
+ # However, we don't currently have a better way to include this file in all
+ # apks that depend on //base:base_java.
+ #
+ # Variables:
+ # use_final_fields: True to use final fields. All other variables are
+ # ignored when this is false.
+ # build_config: Path to build_config used for locale list
+ # enable_multidex: Value for ENABLE_MULTIDEX.
+ # firebase_app_id: Value for FIREBASE_APP_ID.
+ # min_sdk_version: Value for MIN_SDK_VERSION.
+ #
+ template("generate_build_config_srcjar") {
+ java_cpp_template(target_name) {
+ package_path = "org/chromium/base"
+ sources = [
+ "//base/android/java/templates/BuildConfig.template",
+ ]
+ defines = []
+
+ # TODO(agrieve): These two are not target-specific and should be moved
+ # to BuildHooks.java.
+ # Set these even when !use_final_fields so that they have correct default
+ # values withnin junit_binary().
+ if (is_java_debug || dcheck_always_on) {
+ defines += [ "_DCHECK_IS_ON" ]
+ }
+ if (use_cfi_diag || is_ubsan || is_ubsan_security || is_ubsan_vptr) {
+ defines += [ "_IS_UBSAN" ]
+ }
+
+ if (invoker.use_final_fields) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "testonly",
+ ])
+ defines += [ "USE_FINAL" ]
+ if (invoker.enable_multidex) {
+ defines += [ "ENABLE_MULTIDEX" ]
+ }
+ inputs = [
+ invoker.build_config,
+ ]
+ _rebased_build_config =
+ rebase_path(invoker.build_config, root_build_dir)
+ defines += [
+ "COMPRESSED_LOCALE_LIST=" +
+ "@FileArg($_rebased_build_config:compressed_locales_java_list)",
+ "UNCOMPRESSED_LOCALE_LIST=" +
+ "@FileArg($_rebased_build_config:uncompressed_locales_java_list)",
+ ]
+ if (defined(invoker.firebase_app_id)) {
+ defines += [ "_FIREBASE_APP_ID=${invoker.firebase_app_id}" ]
+ }
+ if (defined(invoker.min_sdk_version)) {
+ defines += [ "_MIN_SDK_VERSION=${invoker.min_sdk_version}" ]
+ }
+ if (defined(invoker.resources_version_variable)) {
+ defines += [
+ "_RESOURCES_VERSION_VARIABLE=${invoker.resources_version_variable}",
+ ]
+ }
+ }
+ }
+ }
+
+ # Declare an Android app module target, which is used as the basis for an
+ # Android APK or an Android app bundle module.
+ #
+ # Supports all variables of android_library(), plus:
+ # android_manifest: Path to AndroidManifest.xml.
+ # android_manifest_dep: Target that generates AndroidManifest (if applicable)
+ # png_to_webp: If true, pngs (with the exception of 9-patch) are
+ # converted to webp during resource packaging.
+ # loadable_modules: List of paths to native libraries to include. Different
+ # from |shared_libraries| in that:
+ # * dependencies of this .so are not automatically included
+ # * ".cr.so" is never added
+ # * they are not side-loaded for _incremental targets.
+ # * load_library_from_apk, use_chromium_linker,
+ # and enable_relocation_packing do not apply
+ # Use this instead of shared_libraries when you are going to load the library
+ # conditionally, and only when shared_libraries doesn't work for you.
+ # secondary_abi_loadable_modules: This is the loadable_modules analog to
+ # secondary_abi_shared_libraries.
+ # shared_libraries: List shared_library targets to bundle. If these
+ # libraries depend on other shared_library targets, those dependencies will
+ # also be included in the apk (e.g. for is_component_build).
+ # secondary_abi_shared_libraries: secondary abi shared_library targets to
+ # bundle. If these libraries depend on other shared_library targets, those
+ # dependencies will also be included in the apk (e.g. for is_component_build).
+ # native_lib_placeholders: List of placeholder filenames to add to the apk
+ # (optional).
+ # secondary_native_lib_placeholders: List of placeholder filenames to add to
+ # the apk for the secondary ABI (optional).
+ # write_asset_list: Adds an extra file to the assets, which contains a list of
+ # all other asset files.
+ # generate_buildconfig_java: If defined and false, skip generating the
+ # BuildConfig java class describing the build configuration. The default
+ # is true for non-test APKs.
+ # generate_final_jni: If defined and false, skip generating the
+ # GEN_JNI srcjar.
+ # jni_registration_header: If specified, causes the
+ # ${target_name}__final_jni target to additionally output a
+ # header file to this path for use with manual JNI registration.
+ # jni_sources_blacklist: List of source path to exclude from the
+ # final_jni step.
+ # firebase_app_id: The value for BuildConfig.FIREBASE_APP_ID (optional).
+ # Identifier is sent with crash reports to enable Java stack deobfuscation.
+ # aapt_locale_whitelist: If set, all locales not in this list will be
+ # stripped from resources.arsc.
+ # resource_blacklist_regex: Causes all drawable images matching the regex to
+ # be excluded (mipmaps are still included).
+ # resource_blacklist_exceptions: A list of globs used when
+ # resource_blacklist_regex is set. Files that match this whitelist will
+ # still be included.
+ # shared_resources: True if this is a runtime shared library APK, like
+ # the system_webview_apk target. Ensures that its resources can be
+ # used by the loading application process.
+ # app_as_shared_lib: True if this is a regular application apk that can
+ # also serve as a runtime shared library, like the monochrome_public_apk
+ # target. Ensures that the resources are usable both by the APK running
+ # as an application, or by another process that loads it at runtime.
+ # shared_resources_whitelist_target: Optional name of a target specifying
+ # an input R.txt file that lists the resources that can be exported
+ # by the APK when shared_resources or app_as_shared_lib is defined.
+ # uncompress_shared_libraries: True if shared libraries should be stored
+ # uncompressed in the APK. Must be unset or true if load_library_from_apk
+ # is set to true.
+ # uncompress_dex: Store final .dex files uncompressed in the apk.
+ # optimize_resources: True if resource names should be stripped from the
+ # resources.arsc file in the apk or module.
+ # resources_config_path: Path to the aapt2 optimize config file that tags
+ # resources with acceptable/non-acceptable optimizations.
+ # verify_android_configuration: Enables verification of expected merged
+ # manifest and proguard flags based on a golden file.
+ template("android_apk_or_module") {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ assert(defined(invoker.final_apk_path) || defined(invoker.name))
+ assert(defined(invoker.android_manifest))
+ _gen_dir = "$target_gen_dir/$target_name"
+ _base_path = "$_gen_dir/$target_name"
+ _build_config = "$target_gen_dir/$target_name.build_config"
+ _build_config_target = "$target_name$build_config_target_suffix"
+
+ # Mark as used
+ assert(!defined(invoker.min_sdk_version) || invoker.min_sdk_version != 0)
+
+ # JUnit tests use resource zip files. These must not be put in gen/
+ # directory or they will not be available to tester bots.
+ _jar_path = "$_base_path.jar"
+ _lib_dex_path = "$_base_path.dex.jar"
+ _template_name = target_name
+
+ _is_bundle_module =
+ defined(invoker.is_bundle_module) && invoker.is_bundle_module
+ _is_base_module = defined(invoker.is_base_module) && invoker.is_base_module
+
+ _enable_multidex =
+ !defined(invoker.enable_multidex) || invoker.enable_multidex
+ _final_dex_path = "$_gen_dir/classes.dex.zip"
+
+ if (defined(invoker.final_apk_path)) {
+ _final_apk_path = invoker.final_apk_path
+ } else {
+ _final_apk_path = "$root_build_dir/apks/${invoker.name}.apk"
+ }
+ if (!_is_bundle_module) {
+ _final_rtxt_path = "${_final_apk_path}.R.txt"
+ }
+ _final_apk_path_no_ext_list =
+ process_file_template([ _final_apk_path ],
+ "{{source_dir}}/{{source_name_part}}")
+ _final_apk_path_no_ext = _final_apk_path_no_ext_list[0]
+ assert(_final_apk_path_no_ext != "") # Mark as used.
+
+ # Non-base bundle modules create only proto resources.
+ if (!_is_bundle_module || _is_base_module) {
+ _packaged_resources_path = "$target_out_dir/$target_name.ap_"
+ }
+ if (_is_bundle_module) {
+ # Path to the intermediate proto-format resources zip file.
+ _proto_resources_path = "$target_gen_dir/$target_name.proto.ap_"
+ } else {
+ # resource_sizes.py needs to be able to find the unpacked resources.arsc
+ # file based on apk name to compute normatlized size.
+ _resource_sizes_arsc_path =
+ "$root_out_dir/arsc/" +
+ rebase_path(_final_apk_path_no_ext, root_build_dir) + ".ap_"
+ }
+ _optimize_resources =
+ defined(invoker.optimize_resources) && invoker.optimize_resources
+ if (_optimize_resources) {
+ _optimized_resources_path = "$target_out_dir/$_template_name.optimized."
+ if (_is_bundle_module) {
+ _optimized_resources_path += ".proto.ap_"
+ } else {
+ _optimized_resources_path += ".ap_"
+ }
+ }
+
+ if (defined(invoker.version_code)) {
+ _version_code = invoker.version_code
+ } else {
+ _version_code = android_default_version_code
+ }
+
+ if (android_override_version_code != "") {
+ _version_code = android_override_version_code
+ }
+
+ if (defined(invoker.version_name)) {
+ _version_name = invoker.version_name
+ } else {
+ _version_name = android_default_version_name
+ }
+
+ if (android_override_version_name != "") {
+ _version_name = android_override_version_name
+ }
+
+ _deps = []
+ if (defined(invoker.deps)) {
+ _deps = invoker.deps
+ }
+
+ _srcjar_deps = []
+ if (defined(invoker.srcjar_deps)) {
+ _srcjar_deps = invoker.srcjar_deps
+ }
+
+ _use_build_hooks =
+ !defined(invoker.no_build_hooks) || !invoker.no_build_hooks
+ if (defined(invoker.build_hooks_android_impl_deps)) {
+ assert(_use_build_hooks,
+ "Cannot set no_build_hooks and build_hooks_android_impl_deps at " +
+ "the same time")
+ _deps += invoker.build_hooks_android_impl_deps
+ } else if (_use_build_hooks) {
+ _deps += [ "//build/android/buildhooks:build_hooks_android_impl_java" ]
+ }
+
+ _android_root_manifest_deps = []
+ if (defined(invoker.android_manifest_dep)) {
+ _android_root_manifest_deps = [ invoker.android_manifest_dep ]
+ }
+ _android_root_manifest = invoker.android_manifest
+
+ _use_chromium_linker =
+ defined(invoker.use_chromium_linker) && invoker.use_chromium_linker
+
+ _load_library_from_apk =
+ defined(invoker.load_library_from_apk) && invoker.load_library_from_apk
+
+ assert(_use_chromium_linker || true) # Mark as used.
+ assert(!_load_library_from_apk || _use_chromium_linker,
+ "load_library_from_apk requires use_chromium_linker")
+
+ # Make sure that uncompress_shared_libraries is set to true if
+ # load_library_from_apk is true.
+ if (defined(invoker.uncompress_shared_libraries)) {
+ _uncompress_shared_libraries = invoker.uncompress_shared_libraries
+ assert(!_load_library_from_apk || _uncompress_shared_libraries)
+ } else {
+ _uncompress_shared_libraries = _load_library_from_apk
+ }
+
+ # The dependency that makes the chromium linker, if any is needed.
+ _native_libs_deps = []
+ _shared_libraries_is_valid =
+ defined(invoker.shared_libraries) && invoker.shared_libraries != []
+ _secondary_abi_native_libs_deps = []
+ assert(_secondary_abi_native_libs_deps == []) # mark as used.
+ _secondary_abi_shared_libraries_is_valid =
+ defined(invoker.secondary_abi_shared_libraries) &&
+ invoker.secondary_abi_shared_libraries != []
+
+ if (_shared_libraries_is_valid) {
+ _native_libs_deps += invoker.shared_libraries
+
+ # To determine the filenames of all dependent shared libraries, write the
+ # runtime deps of |shared_libraries| to a file during "gn gen".
+ # write_build_config.py will then grep this file for *.so to obtain the
+ # complete list.
+ _runtime_deps_file =
+ "$target_gen_dir/${_template_name}.native.runtimedeps"
+ group("${_template_name}__runtime_deps") {
+ deps = _native_libs_deps
+ write_runtime_deps = _runtime_deps_file
+ }
+ } else {
+ # Must exist for instrumentation_test_apk() to depend on.
+ group("${_template_name}__runtime_deps") {
+ }
+ }
+
+ if (_secondary_abi_shared_libraries_is_valid) {
+ _secondary_abi_native_libs_deps += invoker.secondary_abi_shared_libraries
+
+ # To determine the filenames of all dependent shared libraries, write the
+ # runtime deps of |shared_libraries| to a file during "gn gen".
+ # write_build_config.py will then grep this file for *.so to obtain the
+ # complete list.
+ _secondary_abi_runtime_deps_file =
+ "$target_gen_dir/${_template_name}.secondary.abi.native.runtimedeps"
+ group("${_template_name}__secondary_abi__runtime_deps") {
+ deps = _secondary_abi_native_libs_deps
+ write_runtime_deps = _secondary_abi_runtime_deps_file
+ }
+ } else {
+ # Must exist for instrumentation_test_apk() to depend on.
+ group("${_template_name}__secondary_abi__runtime_deps") {
+ }
+ }
+
+ if (_shared_libraries_is_valid ||
+ _secondary_abi_shared_libraries_is_valid) {
+ _native_lib_version_rule = ""
+ if (defined(invoker.native_lib_version_rule)) {
+ _native_lib_version_rule = invoker.native_lib_version_rule
+ }
+ _native_lib_version_arg = "\"\""
+ if (defined(invoker.native_lib_version_arg)) {
+ _native_lib_version_arg = invoker.native_lib_version_arg
+ }
+ }
+
+ _rebased_build_config = rebase_path(_build_config, root_build_dir)
+ assert(_rebased_build_config != "") # Mark as used.
+
+ _generate_buildconfig_java = !defined(invoker.apk_under_test)
+ if (defined(invoker.generate_buildconfig_java)) {
+ _generate_buildconfig_java = invoker.generate_buildconfig_java
+ }
+
+ # JNI generation usually goes hand-in-hand with buildconfig generation.
+ _generate_final_jni = _generate_buildconfig_java
+ if (defined(invoker.generate_final_jni)) {
+ _generate_final_jni = invoker.generate_final_jni
+ }
+
+ _proguard_enabled =
+ defined(invoker.proguard_enabled) && invoker.proguard_enabled
+ if (_proguard_enabled) {
+ _proguard_mapping_path = "$_final_apk_path.mapping"
+ }
+
+ # TODO(crbug.com/864142): Allow incremental installs of bundle modules.
+ _incremental_allowed =
+ !_is_bundle_module &&
+ !(defined(invoker.never_incremental) && invoker.never_incremental)
+ if (_incremental_allowed) {
+ _target_dir_name = get_label_info(target_name, "dir")
+ _incremental_install_json_path = "$root_out_dir/gen.runtime/$_target_dir_name/$target_name.incremental.json"
+ }
+
+ _verify_android_configuration =
+ defined(invoker.verify_android_configuration) &&
+ invoker.verify_android_configuration && !is_java_debug
+ if (_verify_android_configuration) {
+ _target_src_dir = get_label_info(":$target_name", "dir")
+ }
+
+ _android_manifest =
+ "$target_gen_dir/${_template_name}_manifest/AndroidManifest.xml"
+ _merge_manifest_target = "${_template_name}__merge_manifests"
+ merge_manifests(_merge_manifest_target) {
+ input_manifest = _android_root_manifest
+ output_manifest = _android_manifest
+ build_config = _build_config
+ if (_verify_android_configuration) {
+ expected_manifest =
+ "$_target_src_dir/java/$_template_name.AndroidManifest.expected"
+ }
+ deps = _android_root_manifest_deps + [ ":$_build_config_target" ]
+ }
+
+ _final_deps = []
+
+ _enable_main_dex_list =
+ _enable_multidex &&
+ (!defined(invoker.min_sdk_version) || invoker.min_sdk_version < 21)
+ if (_enable_main_dex_list) {
+ _generated_proguard_main_dex_config =
+ "$_base_path.resources.main-dex-proguard.txt"
+ }
+ _generated_proguard_config = "$_base_path.resources.proguard.txt"
+
+ if (_generate_buildconfig_java &&
+ defined(invoker.product_version_resources_dep)) {
+ _deps += [ invoker.product_version_resources_dep ]
+ }
+
+ if (defined(invoker.alternative_android_sdk_dep)) {
+ _android_sdk_dep = invoker.alternative_android_sdk_dep
+ } else {
+ _android_sdk_dep = "//third_party/android_sdk:android_sdk_java"
+ }
+
+ if (defined(invoker.shared_resources_whitelist_target)) {
+ _whitelist_gen_dir =
+ get_label_info(invoker.shared_resources_whitelist_target,
+ "target_gen_dir")
+ _whitelist_target_name =
+ get_label_info(invoker.shared_resources_whitelist_target, "name")
+ _whitelist_r_txt_path =
+ "${_whitelist_gen_dir}/${_whitelist_target_name}" +
+ "__compile_resources_R.txt"
+ _whitelist_deps =
+ "${invoker.shared_resources_whitelist_target}__compile_resources"
+ }
+
+ _compile_resources_target = "${_template_name}__compile_resources"
+ _compile_resources_rtxt_out =
+ "${target_gen_dir}/${_compile_resources_target}_R.txt"
+ _compile_resources_emit_ids_out =
+ "${target_gen_dir}/${_compile_resources_target}.resource_ids"
+ compile_resources(_compile_resources_target) {
+ forward_variables_from(invoker,
+ [
+ "aapt_locale_whitelist",
+ "app_as_shared_lib",
+ "no_xml_namespaces",
+ "package_name",
+ "package_name_to_id_mapping",
+ "png_to_webp",
+ "resource_blacklist_exceptions",
+ "resource_blacklist_regex",
+ "resource_ids_provider_dep",
+ "resources_config_path",
+ "shared_resources",
+ "shared_resources_whitelist_locales",
+ "support_zh_hk",
+ ])
+ android_manifest = _android_manifest
+ version_code = _version_code
+ version_name = _version_name
+
+ if (defined(invoker.post_process_package_resources_script)) {
+ post_process_script = invoker.post_process_package_resources_script
+ }
+ srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+ r_text_out_path = _compile_resources_rtxt_out
+ emit_ids_out_path = _compile_resources_emit_ids_out
+ proguard_file = _generated_proguard_config
+ if (_enable_main_dex_list) {
+ proguard_file_main_dex = _generated_proguard_main_dex_config
+ }
+
+ build_config = _build_config
+ deps = _deps + [
+ ":$_merge_manifest_target",
+ ":$_build_config_target",
+ _android_sdk_dep,
+ ]
+
+ if (defined(invoker.apk_under_test)) {
+ # Set the arsc package name to match the apk_under_test package name
+ # So that test resources can references under_test resources via
+ # @type/name syntax.
+ arsc_package_name =
+ "@FileArg($_rebased_build_config:resources:arsc_package_name)"
+
+ # Passing in the --emit-ids mapping will cause aapt2 to assign resources
+ # IDs that do not conflict with those from apk_under_test.
+ assert(!defined(resource_ids_provider_dep))
+ resource_ids_provider_dep = invoker.apk_under_test
+
+ deps += [ "${invoker.apk_under_test}__compile_resources" ]
+ include_resource =
+ get_label_info(invoker.apk_under_test, "target_out_dir") + "/" +
+ get_label_info(invoker.apk_under_test, "name") + ".ap_"
+ }
+
+ if (_is_bundle_module) {
+ proto_format = true
+ output = _proto_resources_path
+
+ if (defined(invoker.base_module_target)) {
+ deps += [ "${invoker.base_module_target}__compile_arsc_resources" ]
+ include_resource =
+ get_label_info(invoker.base_module_target, "target_out_dir") +
+ "/" + get_label_info(invoker.base_module_target, "name") + ".ap_"
+ }
+ } else {
+ output = _packaged_resources_path
+ }
+
+ if (_optimize_resources) {
+ optimized_resources_path = _optimized_resources_path
+ }
+
+ if (defined(invoker.shared_resources_whitelist_target)) {
+ # Used to ensure that the WebView resources are properly shared
+ # (i.e. are non-final and with package ID 0).
+ shared_resources_whitelist = _whitelist_r_txt_path
+ deps += [ _whitelist_deps ]
+ }
+ }
+ if (defined(_resource_sizes_arsc_path)) {
+ _copy_arsc_target = "${_template_name}__copy_arsc"
+ copy(_copy_arsc_target) {
+ deps = [
+ ":$_compile_resources_target",
+ ]
+
+ # resource_sizes.py doesn't care if it gets the optimized .arsc.
+ sources = [
+ _packaged_resources_path,
+ ]
+ outputs = [
+ _resource_sizes_arsc_path,
+ ]
+ }
+ _final_deps += [ ":$_copy_arsc_target" ]
+ }
+
+ if (!_is_bundle_module) {
+ # Output the R.txt file to a more easily discoverable location for
+ # archiving. This is necessary when stripping resource names so that we
+ # have an archive of resource names to ids for shipped apks (for
+ # debugging purposes). We copy the file rather than change the location
+ # of the original because other targets rely on the location of the R.txt
+ # file.
+ _copy_rtxt_target = "${_template_name}__copy_rtxt"
+ copy(_copy_rtxt_target) {
+ deps = [
+ ":$_compile_resources_target",
+ ]
+ sources = [
+ _compile_resources_rtxt_out,
+ ]
+ outputs = [
+ _final_rtxt_path,
+ ]
+ }
+ _final_deps += [ ":$_copy_rtxt_target" ]
+ }
+
+ if (_is_base_module && _is_bundle_module) {
+ # Bundle modules have to reference resources from the base module.
+ # However, to compile the bundle module's resources we have to give it an
+ # arsc resource to link against (aapt2 fails with proto resources). Thus,
+ # add an arsc resource compilation step to make the bundle module's link
+ # step work.
+ compile_resources("${_template_name}__compile_arsc_resources") {
+ forward_variables_from(invoker,
+ [
+ "support_zh_hk",
+ "aapt_locale_whitelist",
+ "resource_blacklist_regex",
+ "resource_blacklist_exceptions",
+ "png_to_webp",
+ "no_xml_namespaces",
+ ])
+ android_manifest = _android_manifest
+ version_code = _version_code
+ version_name = _version_name
+
+ proto_format = false
+ output = _packaged_resources_path
+
+ build_config = _build_config
+ deps = _deps + [
+ ":$_merge_manifest_target",
+ ":$_build_config_target",
+ _android_sdk_dep,
+ ]
+ }
+ }
+
+ _srcjar_deps += [ ":$_compile_resources_target" ]
+
+ if (_native_libs_deps != [] || _secondary_abi_native_libs_deps != []) {
+ _enable_chromium_linker_tests = false
+ if (defined(invoker.enable_chromium_linker_tests)) {
+ _enable_chromium_linker_tests = invoker.enable_chromium_linker_tests
+ }
+ _ordered_libraries_json =
+ "$target_gen_dir/$target_name.ordered_libraries.json"
+ _rebased_ordered_libraries_json =
+ rebase_path(_ordered_libraries_json, root_build_dir)
+ _ordered_libraries_target = "${_template_name}__write_ordered_libraries"
+
+ # TODO(agrieve): Make GN write runtime deps in dependency order so as to
+ # not need this manual sorting step.
+ action_with_pydeps(_ordered_libraries_target) {
+ script = "//build/android/gyp/write_ordered_libraries.py"
+ deps = [
+ ":$_build_config_target",
+ ":${_template_name}__runtime_deps",
+ ":${_template_name}__secondary_abi__runtime_deps",
+ ]
+ if (_native_libs_deps != []) {
+ _deps_file_to_use = _runtime_deps_file
+ } else {
+ _deps_file_to_use = _secondary_abi_runtime_deps_file
+ }
+ inputs = [
+ _deps_file_to_use,
+ ]
+ outputs = [
+ _ordered_libraries_json,
+ ]
+ _rebased_android_readelf = rebase_path(android_readelf, root_build_dir)
+ args = [
+ "--readelf=$_rebased_android_readelf",
+ "--output=$_rebased_ordered_libraries_json",
+ "--runtime-deps=" + rebase_path(_deps_file_to_use, root_build_dir),
+ ]
+ if (defined(invoker.dont_load_shared_libraries)) {
+ args += [ "--exclude-shared-libraries=" +
+ invoker.dont_load_shared_libraries ]
+ }
+ }
+
+ java_cpp_template("${_template_name}__native_libraries_srcjar") {
+ package_path = "org/chromium/base/library_loader"
+ sources = [
+ "//base/android/java/templates/NativeLibraries.template",
+ ]
+ inputs = [
+ _ordered_libraries_json,
+ ]
+ deps = [
+ ":${_ordered_libraries_target}",
+ ]
+ if (_native_lib_version_rule != "") {
+ deps += [ _native_lib_version_rule ]
+ }
+
+ defines = [
+ "NATIVE_LIBRARIES_LIST=" +
+ "@FileArg($_rebased_ordered_libraries_json:java_libraries_list)",
+ "NATIVE_LIBRARIES_VERSION_NUMBER=$_native_lib_version_arg",
+ ]
+ if (current_cpu == "arm" || current_cpu == "arm64") {
+ defines += [ "ANDROID_APP_CPU_FAMILY_ARM" ]
+ } else if (current_cpu == "x86" || current_cpu == "x64") {
+ defines += [ "ANDROID_APP_CPU_FAMILY_X86" ]
+ } else if (current_cpu == "mipsel" || current_cpu == "mips64el") {
+ defines += [ "ANDROID_APP_CPU_FAMILY_MIPS" ]
+ } else {
+ assert(false, "Unsupported CPU family")
+ }
+ if (_use_chromium_linker) {
+ defines += [ "ENABLE_CHROMIUM_LINKER" ]
+ }
+ if (_load_library_from_apk) {
+ defines += [ "ENABLE_CHROMIUM_LINKER_LIBRARY_IN_ZIP_FILE" ]
+ }
+ if (_enable_chromium_linker_tests) {
+ defines += [ "ENABLE_CHROMIUM_LINKER_TESTS" ]
+ }
+ }
+ _srcjar_deps += [ ":${_template_name}__native_libraries_srcjar" ]
+ }
+
+ _extra_native_libs = []
+ _extra_native_libs_deps = []
+ assert(_extra_native_libs_deps == []) # Mark as used.
+ if (_native_libs_deps != []) {
+ _extra_native_libs += _sanitizer_runtimes
+ if (_use_chromium_linker) {
+ _extra_native_libs +=
+ [ "$root_out_dir/libchromium_android_linker$shlib_extension" ]
+ _extra_native_libs_deps +=
+ [ "//base/android/linker:chromium_android_linker" ]
+ }
+ }
+
+ if (_generate_buildconfig_java) {
+ generate_build_config_srcjar("${_template_name}__build_config_srcjar") {
+ forward_variables_from(invoker,
+ [
+ "firebase_app_id",
+ "min_sdk_version",
+ ])
+ use_final_fields = true
+ build_config = _build_config
+ enable_multidex = _enable_multidex
+ if (defined(invoker.product_version_resources_dep)) {
+ resources_version_variable =
+ "org.chromium.base.R.string.product_version"
+ }
+ deps = [
+ ":$_build_config_target",
+ ]
+ }
+ _srcjar_deps += [ ":${_template_name}__build_config_srcjar" ]
+ }
+
+ if (_generate_final_jni) {
+ generate_jni_registration("${_template_name}__final_jni") {
+ target = ":$_template_name"
+ if (defined(invoker.jni_registration_header)) {
+ header_output = invoker.jni_registration_header
+ }
+ if (defined(invoker.jni_sources_blacklist)) {
+ sources_blacklist = invoker.jni_sources_blacklist
+ }
+ }
+ _srcjar_deps += [ ":${_template_name}__final_jni" ]
+ }
+
+ _java_target = "${_template_name}__java"
+ java_library_impl(_java_target) {
+ forward_variables_from(invoker,
+ [
+ "alternative_android_sdk_dep",
+ "android_manifest",
+ "android_manifest_dep",
+ "apk_under_test",
+ "base_module_target",
+ "chromium_code",
+ "classpath_deps",
+ "emma_never_instrument",
+ "java_files",
+ "javac_args",
+ "loadable_modules",
+ "native_lib_placeholders",
+ "no_build_hooks",
+ "secondary_abi_loadable_modules",
+ "secondary_native_lib_placeholders",
+ "static_library_dependent_targets",
+ ])
+ if (_is_bundle_module) {
+ type = "android_app_bundle_module"
+ } else {
+ type = "android_apk"
+ }
+ main_target_name = _template_name
+ supports_android = true
+ requires_android = true
+ deps = _deps
+
+ srcjar_deps = _srcjar_deps
+ final_jar_path = _jar_path
+ dex_path = _lib_dex_path
+ final_dex_path = _final_dex_path
+
+ if (_is_bundle_module) {
+ proto_resources_path = _proto_resources_path
+ module_rtxt_path = _compile_resources_rtxt_out
+ } else {
+ apk_path = _final_apk_path
+ incremental_allowed = _incremental_allowed
+ if (_incremental_allowed) {
+ incremental_apk_path = "${_final_apk_path_no_ext}_incremental.apk"
+ incremental_install_json_path = _incremental_install_json_path
+ }
+ }
+
+ proguard_enabled = _proguard_enabled
+ if (_proguard_enabled) {
+ proguard_configs = [ _generated_proguard_config ]
+ if (defined(invoker.proguard_configs)) {
+ proguard_configs += invoker.proguard_configs
+ }
+ if (_enable_main_dex_list) {
+ proguard_configs += [ "//build/android/multidex.flags" ]
+ }
+ proguard_mapping_path = _proguard_mapping_path
+ }
+
+ # Don't depend on the runtime_deps target in order to avoid having to
+ # build the native libraries just to create the .build_config file.
+ # The dep is unnecessary since the runtime_deps file is created by gn gen
+ # and the runtime_deps file is added to write_build_config.py's depfile.
+ if (_native_libs_deps != []) {
+ shared_libraries_runtime_deps_file = _runtime_deps_file
+ }
+ if (_secondary_abi_native_libs_deps != []) {
+ secondary_abi_shared_libraries_runtime_deps_file =
+ _secondary_abi_runtime_deps_file
+ }
+
+ extra_shared_libraries = _extra_native_libs
+
+ uncompress_shared_libraries = _uncompress_shared_libraries
+
+ if (defined(_whitelist_r_txt_path) && _is_bundle_module) {
+ # Used to write the file path to the target's .build_config only.
+ base_whitelist_rtxt_path = _whitelist_r_txt_path
+ }
+ }
+
+ # TODO(cjhopman): This is only ever needed to calculate the list of tests to
+ # run. See build/android/pylib/instrumentation/test_jar.py. We should be
+ # able to just do that calculation at build time instead.
+ if (defined(invoker.dist_ijar_path)) {
+ _dist_ijar_path = invoker.dist_ijar_path
+ dist_jar("${_template_name}_dist_ijar") {
+ override_build_config = _build_config
+ output = _dist_ijar_path
+ data = [
+ _dist_ijar_path,
+ ]
+ use_interface_jars = true
+ deps = [
+ ":$_build_config_target",
+ ":$_java_target",
+ ]
+ }
+ }
+
+ # Dex generation for app bundle modules with proguarding enabled takes
+ # place later due to synchronized proguarding. For more details,
+ # read build/android/docs/android_app_bundles.md
+ if (!(_is_bundle_module && _proguard_enabled)) {
+ _final_dex_target_name = "${_template_name}__final_dex"
+ dex(_final_dex_target_name) {
+ forward_variables_from(invoker,
+ [
+ "min_sdk_version",
+ "dexlayout_profile",
+ ])
+ proguard_enabled = _proguard_enabled
+ build_config = _build_config
+ deps = [
+ ":$_build_config_target",
+ ":$_java_target",
+ ]
+ if (_proguard_enabled) {
+ forward_variables_from(invoker, [ "proguard_jar_path" ])
+ deps += _deps + [ ":$_compile_resources_target" ]
+ proguard_mapping_path = _proguard_mapping_path
+ if (!defined(invoker.proguard_jar_path) &&
+ _verify_android_configuration) {
+ proguard_expectations_file =
+ "$_target_src_dir/java/$_template_name.proguard_flags.expected"
+ }
+ } else {
+ input_jars = [ _lib_dex_path ]
+ input_dex_classpath =
+ "${_rebased_build_config}:final_dex:dependency_dex_files"
+ if (_enable_main_dex_list) {
+ input_jar_classpath =
+ "${_rebased_build_config}:deps_info:java_runtime_classpath"
+ }
+ }
+
+ output = _final_dex_path
+ enable_multidex = _enable_multidex
+
+ if (_enable_main_dex_list) {
+ forward_variables_from(invoker, [ "negative_main_dex_globs" ])
+ extra_main_dex_proguard_config = _generated_proguard_main_dex_config
+ deps += [ ":$_compile_resources_target" ]
+ } else if (_enable_multidex) {
+ if (defined(invoker.negative_main_dex_globs)) {
+ not_needed(invoker, [ "negative_main_dex_globs" ])
+ }
+ }
+ }
+ } else {
+ # A small sanity check to help developers with a subtle point!
+ assert(
+ !defined(invoker.proguard_jar_path),
+ "proguard_jar_path should not be used for app bundle modules " +
+ "when proguard is enabled. Pass it to the android_app_bundle() " +
+ "target instead!")
+
+ _final_deps += [ ":$_java_target" ]
+ }
+
+ _extra_native_libs_even_when_incremental = []
+ assert(_extra_native_libs_even_when_incremental == []) # Mark as used.
+ if (_native_libs_deps != []) {
+ _create_stack_script_rule_name = "${_template_name}__stack_script"
+ _final_deps += [ ":${_create_stack_script_rule_name}" ]
+ stack_script(_create_stack_script_rule_name) {
+ stack_target_name = invoker.target_name
+ deps = _native_libs_deps
+ }
+ }
+
+ if (defined(invoker.loadable_modules) && invoker.loadable_modules != []) {
+ _extra_native_libs_even_when_incremental += invoker.loadable_modules
+ }
+
+ _all_native_libs_deps = []
+ if (_native_libs_deps != [] ||
+ _extra_native_libs_even_when_incremental != []) {
+ _native_libs_file_arg_dep = ":$_build_config_target"
+ if (!_is_bundle_module) {
+ _native_libs_file_arg =
+ "@FileArg($_rebased_build_config:native:libraries)"
+ }
+ _all_native_libs_deps += _native_libs_deps + _extra_native_libs_deps +
+ [ _native_libs_file_arg_dep ]
+ }
+
+ if (!_is_bundle_module) {
+ # Generate size-info/*.jar.info files.
+ if (defined(invoker.name)) {
+ # Create size info files for targets that care about size
+ # (have proguard enabled).
+ if (_proguard_enabled) {
+ _size_info_target = "${target_name}__size_info"
+ create_size_info_files(_size_info_target) {
+ name = "${invoker.name}.apk"
+ build_config = _build_config
+ packaged_resources_path = _packaged_resources_path
+ deps = _deps + [
+ ":$_build_config_target",
+ ":$_compile_resources_target",
+ ":$_java_target",
+ ]
+ }
+ _final_deps += [ ":$_size_info_target" ]
+ } else {
+ not_needed(invoker, [ "name" ])
+ }
+ }
+
+ _keystore_path = android_keystore_path
+ _keystore_name = android_keystore_name
+ _keystore_password = android_keystore_password
+
+ if (defined(invoker.keystore_path)) {
+ _keystore_path = invoker.keystore_path
+ _keystore_name = invoker.keystore_name
+ _keystore_password = invoker.keystore_password
+ }
+
+ _create_apk_target = "${_template_name}__create"
+ _final_deps += [ ":$_create_apk_target" ]
+ create_apk("$_create_apk_target") {
+ forward_variables_from(invoker,
+ [
+ "native_lib_placeholders",
+ "public_deps",
+ "secondary_native_lib_placeholders",
+ "shared_resources",
+ "write_asset_list",
+ "uncompress_dex",
+ ])
+ packaged_resources_path = _packaged_resources_path
+ if (_optimize_resources) {
+ optimized_resources_path = _optimized_resources_path
+ }
+
+ apk_path = _final_apk_path
+ assets_build_config = _build_config
+ dex_path = _final_dex_path
+ load_library_from_apk = _load_library_from_apk
+
+ keystore_name = _keystore_name
+ keystore_path = _keystore_path
+ keystore_password = _keystore_password
+
+ incremental_allowed = _incremental_allowed
+ if (_incremental_allowed) {
+ android_manifest = _android_manifest
+ base_path = _base_path
+ }
+
+ # Incremental apk does not use native libs nor final dex.
+ incremental_deps = _deps + [
+ ":$_merge_manifest_target",
+ ":$_build_config_target",
+ ":$_compile_resources_target",
+ ]
+
+ # This target generates the input file _all_resources_zip_path.
+ deps = _deps + [
+ ":$_merge_manifest_target",
+ ":$_build_config_target",
+ ":$_final_dex_target_name",
+ ":$_compile_resources_target",
+ ]
+
+ if (_native_libs_deps != [] ||
+ _extra_native_libs_even_when_incremental != []) {
+ native_libs_filearg = _native_libs_file_arg
+ native_libs = _extra_native_libs
+ native_libs_even_when_incremental =
+ _extra_native_libs_even_when_incremental
+ }
+ deps += _all_native_libs_deps
+ deps += _secondary_abi_native_libs_deps
+ secondary_abi_native_libs_filearg =
+ "@FileArg($_rebased_build_config:native:secondary_abi_libraries)"
+
+ uncompress_shared_libraries = _uncompress_shared_libraries
+ }
+ } else {
+ _final_deps += [
+ ":$_merge_manifest_target",
+ ":$_build_config_target",
+ ":$_compile_resources_target",
+ ] + _all_native_libs_deps + _secondary_abi_native_libs_deps
+ }
+
+ if (_incremental_allowed) {
+ _write_installer_json_rule_name = "${_template_name}__incremental_json"
+ action_with_pydeps(_write_installer_json_rule_name) {
+ script = "//build/android/incremental_install/write_installer_json.py"
+ deps = [
+ ":$_build_config_target",
+ ]
+
+ data = [
+ _incremental_install_json_path,
+ ]
+ inputs = [
+ _build_config,
+ ]
+ outputs = [
+ _incremental_install_json_path,
+ ]
+
+ _rebased_apk_path_no_ext =
+ rebase_path(_final_apk_path_no_ext, root_build_dir)
+ _rebased_incremental_install_json_path =
+ rebase_path(_incremental_install_json_path, root_build_dir)
+ _rebased_lib_dex_path = rebase_path(_lib_dex_path, root_build_dir)
+ _dex_arg_key = "${_rebased_build_config}:final_dex:dependency_dex_files"
+ args = [
+ "--apk-path=${_rebased_apk_path_no_ext}_incremental.apk",
+ "--output-path=$_rebased_incremental_install_json_path",
+ "--dex-file=$_rebased_lib_dex_path",
+ "--dex-file-list=@FileArg($_dex_arg_key)",
+ ]
+ if (_proguard_enabled) {
+ args += [ "--show-proguard-warning" ]
+ }
+ if (_native_libs_deps != []) {
+ args += [ "--native-libs=$_native_libs_file_arg" ]
+ deps += [ _native_libs_file_arg_dep ]
+ }
+ if (_extra_native_libs != []) {
+ _rebased_extra_native_libs =
+ rebase_path(_extra_native_libs, root_build_dir)
+ args += [ "--native-libs=$_rebased_extra_native_libs" ]
+ }
+ if (_load_library_from_apk) {
+ args += [ "--dont-even-try=Incremental builds do not work with load_library_from_apk. Try setting is_component_build=true in your GN args." ]
+ }
+ }
+ _incremental_apk_operations = []
+ }
+
+ _apk_operations = []
+
+ # Generate apk operation related script.
+ if (!_is_bundle_module &&
+ (!defined(invoker.create_apk_script) || invoker.create_apk_script)) {
+ _apk_operations_target_name = "${target_name}__apk_operations"
+ action_with_pydeps(_apk_operations_target_name) {
+ _generated_script = "$root_build_dir/bin/${invoker.target_name}"
+ script = "//build/android/gyp/create_apk_operations_script.py"
+ outputs = [
+ _generated_script,
+ ]
+ if (_proguard_enabled) {
+ # Required by logcat command.
+ data_deps = [
+ "//build/android/stacktrace:java_deobfuscate",
+ ]
+ }
+ args = [
+ "--script-output-path",
+ rebase_path(_generated_script, root_build_dir),
+ "--apk-path",
+ rebase_path(_final_apk_path, root_build_dir),
+ "--target-cpu=$target_cpu",
+ ]
+ if (defined(invoker.command_line_flags_file)) {
+ args += [
+ "--command-line-flags-file",
+ invoker.command_line_flags_file,
+ ]
+ }
+ if (_incremental_allowed) {
+ args += [
+ "--incremental-install-json-path",
+ rebase_path(_incremental_install_json_path, root_build_dir),
+ ]
+ }
+ if (_proguard_enabled) {
+ args += [
+ "--proguard-mapping-path",
+ rebase_path("$_final_apk_path.mapping", root_build_dir),
+ ]
+ }
+ }
+ _apk_operations += [ ":$_apk_operations_target_name" ]
+ if (_incremental_allowed) {
+ _incremental_apk_operations += [ ":$_apk_operations_target_name" ]
+ }
+ }
+
+ group(target_name) {
+ if (_incremental_allowed && incremental_apk_by_default) {
+ deps = [
+ ":${target_name}_incremental",
+ ]
+ assert(_apk_operations != [] || true) # Prevent "unused variable".
+ } else {
+ forward_variables_from(invoker,
+ [
+ "data",
+ "data_deps",
+ ])
+ public_deps = _final_deps
+
+ # Generate apk related operations at runtime.
+ public_deps += _apk_operations
+ }
+ }
+
+ if (_incremental_allowed) {
+ group("${target_name}_incremental") {
+ forward_variables_from(invoker,
+ [
+ "data",
+ "data_deps",
+ ])
+ if (!defined(data_deps)) {
+ data_deps = []
+ }
+
+ # device/commands is used by the installer script to push files via .zip.
+ data_deps += [ "//build/android/pylib/device/commands" ] +
+ _native_libs_deps + _extra_native_libs_deps
+
+ # Since the _incremental.apk does not include use .so nor .dex from the
+ # actual target, but instead loads them at runtime, we need to explicitly
+ # depend on them here.
+ public_deps = [
+ ":${_java_target}",
+ ":${_template_name}__create_incremental",
+ ":${_write_installer_json_rule_name}",
+ ]
+
+ # Generate incremental apk related operations at runtime.
+ public_deps += _incremental_apk_operations
+ }
+ }
+ }
+
+ # Declare an Android APK target
+ #
+ # This target creates an Android APK containing java code, resources, assets,
+ # and (possibly) native libraries.
+ #
+ # Supports all variables of android_apk_or_module(), plus:
+ # apk_name: Name for final apk.
+ #
+ # Example
+ # android_apk("foo_apk") {
+ # android_manifest = "AndroidManifest.xml"
+ # java_files = [
+ # "android/org/chromium/foo/FooApplication.java",
+ # "android/org/chromium/foo/FooActivity.java",
+ # ]
+ # deps = [
+ # ":foo_support_java"
+ # ":foo_resources"
+ # ]
+ # srcjar_deps = [
+ # ":foo_generated_enum"
+ # ]
+ # shared_libraries = [
+ # ":my_shared_lib",
+ # ]
+ # }
+ template("android_apk") {
+ android_apk_or_module(target_name) {
+ forward_variables_from(invoker,
+ [
+ "aapt_locale_whitelist",
+ "additional_jar_files",
+ "alternative_android_sdk_dep",
+ "android_manifest",
+ "android_manifest_dep",
+ "apk_under_test",
+ "app_as_shared_lib",
+ "build_hooks_android_impl_deps",
+ "chromium_code",
+ "classpath_deps",
+ "command_line_flags_file",
+ "create_apk_script",
+ "data",
+ "data_deps",
+ "deps",
+ "dexlayout_profile",
+ "dist_ijar_path",
+ "dont_load_shared_libraries",
+ "emit_resource_ids",
+ "emma_never_instrument",
+ "enable_chromium_linker_tests",
+ "enable_multidex",
+ "final_apk_path",
+ "firebase_app_id",
+ "generate_buildconfig_java",
+ "generate_final_jni",
+ "input_jars_paths",
+ "java_files",
+ "javac_args",
+ "jni_registration_header",
+ "jni_sources_blacklist",
+ "keystore_name",
+ "keystore_password",
+ "keystore_path",
+ "load_library_from_apk",
+ "loadable_modules",
+ "min_sdk_version",
+ "native_lib_placeholders",
+ "native_lib_version_arg",
+ "native_lib_version_rule",
+ "negative_main_dex_globs",
+ "never_incremental",
+ "no_build_hooks",
+ "no_xml_namespaces",
+ "optimize_resources",
+ "png_to_webp",
+ "post_process_package_resources_script",
+ "product_version_resources_dep",
+ "proguard_configs",
+ "proguard_enabled",
+ "verify_android_configuration",
+ "proguard_jar_path",
+ "resource_blacklist_regex",
+ "resource_blacklist_exceptions",
+ "resource_ids_provider_dep",
+ "resources_config_path",
+ "secondary_abi_loadable_modules",
+ "secondary_abi_shared_libraries",
+ "secondary_native_lib_placeholders",
+ "shared_libraries",
+ "shared_resources",
+ "shared_resources_whitelist_locales",
+ "shared_resources_whitelist_target",
+ "srcjar_deps",
+ "static_library_dependent_targets",
+ "support_zh_hk",
+ "testonly",
+ "uncompress_shared_libraries",
+ "uncompress_dex",
+ "use_chromium_linker",
+ "version_code",
+ "version_name",
+ "write_asset_list",
+ ])
+ is_bundle_module = false
+ if (defined(invoker.apk_name)) {
+ name = invoker.apk_name
+ }
+ }
+ }
+
+ # Declare an Android app bundle module target.
+ #
+ # The module can be used for an android_apk_or_module().
+ #
+ # Supports all variables of android_library(), plus:
+ # module_name: Name of the module.
+ # is_base_module: If defined and true, indicates that this is the bundle's
+ # base module (optional).
+ # base_module_target: Base module target of the bundle this module will be
+ # added to (optional). Can only be specified for non-base modules.
+ # native_switches: Forwarded switches to decide how to assign native
+ # libraries and placeholders (optional). Its members are:
+ # * is_64_bit_browser
+ # * include_32_bit_webview
+ # loadable_modules_if_32_bit: Native libraries to use if the binary ABI is
+ # 32-bit (optional).
+ # loadable_modules_if_64_bit: Native libraries to use if the binary ABI is
+ # 64-bit (optional).
+ template("android_app_bundle_module") {
+ _is_base_module = defined(invoker.is_base_module) && invoker.is_base_module
+
+ if (_is_base_module) {
+ assert(!defined(invoker.base_module_target))
+ } else {
+ assert(!defined(invoker.write_asset_list))
+ assert(!defined(invoker.firebase_app_id))
+ assert(!defined(invoker.app_as_shared_lib))
+ assert(!defined(invoker.shared_resources))
+ assert(!defined(invoker.shared_resources_whitelist_target))
+ assert(!defined(invoker.shared_resources_whitelist_locales))
+ assert(!defined(invoker.build_hooks_android_impl_deps))
+ assert(!defined(invoker.shared_libraries))
+ assert(defined(invoker.base_module_target))
+ }
+
+ # TODO(tiborg): We have several flags that are necessary for workarounds
+ # that come from the fact that the resources get compiled in the bundle
+ # module target, but bundle modules have to have certain flags in
+ # common or bundle modules have to know information about the base module.
+ # Those flags include version_code, version_name, and base_module_target.
+ # It would be better to move the resource compile target into the bundle
+ # target. Doing so would keep the bundle modules independent from the bundle
+ # and potentially reuse the same bundle modules for multiple bundles.
+ android_apk_or_module(target_name) {
+ forward_variables_from(invoker,
+ [
+ "aapt_locale_whitelist",
+ "additional_jar_files",
+ "alternative_android_sdk_dep",
+ "android_manifest",
+ "android_manifest_dep",
+ "app_as_shared_lib",
+ "base_module_target",
+ "chromium_code",
+ "classpath_deps",
+ "data",
+ "data_deps",
+ "deps",
+ "emma_never_instrument",
+ "enable_chromium_linker_tests",
+ "enable_multidex",
+ "firebase_app_id",
+ "generate_buildconfig_java",
+ "generate_final_jni",
+ "input_jars_paths",
+ "is_base_module",
+ "java_files",
+ "javac_args",
+ "jni_registration_header",
+ "jni_sources_blacklist",
+ "load_library_from_apk",
+ "min_sdk_version",
+ "native_lib_version_arg",
+ "native_lib_version_rule",
+ "negative_main_dex_globs",
+ "no_xml_namespaces",
+ "optimize_resources",
+ "package_name",
+ "package_name_to_id_mapping",
+ "png_to_webp",
+ "product_version_resources_dep",
+ "proguard_configs",
+ "proguard_enabled",
+ "proguard_jar_path",
+ "resource_blacklist_exceptions",
+ "resource_blacklist_regex",
+ "resources_config_path",
+ "secondary_abi_shared_libraries",
+ "shared_libraries",
+ "shared_resources",
+ "shared_resources_whitelist_locales",
+ "shared_resources_whitelist_target",
+ "srcjar_deps",
+ "support_zh_hk",
+ "testonly",
+ "uncompress_shared_libraries",
+ "use_chromium_linker",
+ "verify_android_configuration",
+ "version_code",
+ "version_name",
+ "write_asset_list",
+ ])
+
+ # Specify native libraries and placeholders.
+ if (defined(invoker.native_switches)) {
+ assert(invoker.loadable_modules_if_32_bit != [])
+ assert(invoker.loadable_modules_if_64_bit != [])
+
+ # Decision logic: Assign decision variables:
+ # loadable_modules_to_use: Either |loadable_modules_if_64_bit| or
+ # |loadable_modules_if_32_bit|.
+ # native_is_primary: Whether |loadable_modules_to_use| should be
+ # assigned as primary ABI or secondary ABI.
+ # native_need_placeholder: Whether a placeholder is needed for the
+ # complementary ABI to the library.
+ _native_switches = invoker.native_switches
+ if (_native_switches.is_64_bit_browser) {
+ _loadable_modules_to_use = invoker.loadable_modules_if_64_bit
+ _native_is_primary =
+ !build_apk_secondary_abi || android_64bit_target_cpu
+ _native_need_placeholder =
+ build_apk_secondary_abi && _native_switches.include_32_bit_webview
+ } else {
+ _loadable_modules_to_use = invoker.loadable_modules_if_32_bit
+ _native_is_primary =
+ !build_apk_secondary_abi || !android_64bit_target_cpu
+ _native_need_placeholder =
+ build_apk_secondary_abi && android_64bit_target_cpu
+ }
+
+ # Realization logic: Assign libraries and placeholders.
+ if (_native_is_primary) {
+ loadable_modules = _loadable_modules_to_use
+ if (_native_need_placeholder) {
+ secondary_native_lib_placeholders = [ "libdummy.so" ]
+ }
+ } else {
+ secondary_abi_loadable_modules = _loadable_modules_to_use
+ if (_native_need_placeholder) {
+ native_lib_placeholders = [ "libdummy.so" ]
+ }
+ }
+ } else {
+ assert(!defined(invoker.loadable_modules_if_32_bit))
+ assert(!defined(invoker.loadable_modules_if_64_bit))
+ forward_variables_from(invoker,
+ [
+ "loadable_modules",
+ "native_lib_placeholders",
+ "secondary_abi_loadable_modules",
+ "secondary_native_lib_placeholders",
+ ])
+ }
+
+ is_bundle_module = true
+ generate_buildconfig_java = _is_base_module
+ no_build_hooks = !_is_base_module
+ if (defined(invoker.module_name)) {
+ name = invoker.module_name
+ }
+ }
+ }
+
+ # Declare an Android instrumentation test apk
+ #
+ # This target creates an Android instrumentation test apk.
+ #
+ # Supports all variables of android_apk(), plus:
+ # apk_under_test: The apk being tested (optional).
+ #
+ # Example
+ # instrumentation_test_apk("foo_test_apk") {
+ # android_manifest = "AndroidManifest.xml"
+ # apk_name = "FooTest"
+ # apk_under_test = "Foo"
+ # java_files = [
+ # "android/org/chromium/foo/FooTestCase.java",
+ # "android/org/chromium/foo/FooExampleTest.java",
+ # ]
+ # deps = [
+ # ":foo_test_support_java"
+ # ]
+ # }
+ template("instrumentation_test_apk") {
+ assert(defined(invoker.apk_name))
+ testonly = true
+ _incremental_allowed =
+ !defined(invoker.never_incremental) || !invoker.never_incremental
+ _apk_target_name = "${target_name}__apk"
+ _test_runner_target_name = "${target_name}__test_runner_script"
+ _dist_ijar_path =
+ "$root_build_dir/test.lib.java/" + invoker.apk_name + ".jar"
+ if (_incremental_allowed) {
+ _incremental_test_runner_target_name =
+ "${_test_runner_target_name}_incremental"
+ _incremental_test_name = "${invoker.target_name}_incremental"
+ }
+
+ if (incremental_apk_by_default && _incremental_allowed) {
+ _incremental_test_runner_target_name = _test_runner_target_name
+ _incremental_test_name = invoker.target_name
+ }
+
+ if (!incremental_apk_by_default ||
+ (incremental_apk_by_default && !_incremental_allowed)) {
+ test_runner_script(_test_runner_target_name) {
+ forward_variables_from(invoker,
+ [
+ "additional_apks",
+ "apk_under_test",
+ "data",
+ "data_deps",
+ "deps",
+ "ignore_all_data_deps",
+ "proguard_enabled",
+ "public_deps",
+ ])
+ test_name = invoker.target_name
+ test_type = "instrumentation"
+ apk_target = ":$_apk_target_name"
+ test_jar = _dist_ijar_path
+ }
+ }
+ if (_incremental_allowed) {
+ test_runner_script(_incremental_test_runner_target_name) {
+ forward_variables_from(invoker,
+ [
+ "additional_apks",
+ "apk_under_test",
+ "data",
+ "data_deps",
+ "deps",
+ "ignore_all_data_deps",
+ "public_deps",
+ ])
+ test_name = _incremental_test_name
+ test_type = "instrumentation"
+ apk_target = ":$_apk_target_name"
+ test_jar = _dist_ijar_path
+ incremental_install = true
+ }
+ }
+
+ android_apk(_apk_target_name) {
+ deps = []
+ data_deps = []
+ forward_variables_from(invoker, "*")
+ deps += [ "//testing/android/broker:broker_java" ]
+ data_deps += [
+ "//build/android/pylib/device/commands",
+ "//tools/android/forwarder2",
+ "//tools/android/md5sum",
+ ]
+ if (defined(invoker.apk_under_test)) {
+ data_deps += [ invoker.apk_under_test ]
+ }
+ if (defined(invoker.additional_apks)) {
+ data_deps += invoker.additional_apks
+ }
+ if (defined(invoker.apk_under_test)) {
+ # Prevent a build_hooks_android_impl exising in both the test apks as
+ # well as the apk_under_test.
+ no_build_hooks = true
+ }
+
+ if (defined(invoker.proguard_enabled) && invoker.proguard_enabled) {
+ # When ProGuard is on, we use ProGuard to combine the under test java
+ # code and the test java code. This is to allow us to apply all ProGuard
+ # optimizations that we ship with, but not have them break tests. The
+ # apk under test will still have the same resources, assets, and
+ # manifest, all of which are the ones used in the tests.
+ if (!defined(invoker.proguard_configs)) {
+ proguard_configs = []
+ }
+ proguard_configs += [ "//testing/android/proguard_for_test.flags" ]
+ if (defined(final_apk_path)) {
+ _final_apk_path = final_apk_path
+ } else {
+ _final_apk_path = "$root_build_dir/apks/${apk_name}.apk"
+ }
+ data = [
+ "$_final_apk_path.mapping",
+ ]
+ data_deps += [ "//build/android/stacktrace:java_deobfuscate" ]
+ }
+
+ dist_ijar_path = _dist_ijar_path
+ create_apk_script = false
+ }
+
+ group(target_name) {
+ if (incremental_apk_by_default && _incremental_allowed) {
+ public_deps = [
+ ":${target_name}_incremental",
+ ]
+ } else {
+ public_deps = [
+ ":$_apk_target_name",
+ ":$_test_runner_target_name",
+
+ # Required by test runner to enumerate test list.
+ ":${_apk_target_name}_dist_ijar",
+ ]
+ if (defined(invoker.apk_under_test)) {
+ public_deps += [ invoker.apk_under_test ]
+ }
+ }
+
+ # Ensure unstripped libraries are included in runtime deps so that
+ # symbolization can be done.
+ deps = [
+ ":${_apk_target_name}__runtime_deps",
+ ":${_apk_target_name}__secondary_abi__runtime_deps",
+ ]
+ if (defined(invoker.apk_under_test)) {
+ _under_test_label =
+ get_label_info(invoker.apk_under_test, "label_no_toolchain")
+ deps += [
+ "${_under_test_label}__runtime_deps",
+ "${_under_test_label}__secondary_abi__runtime_deps",
+ ]
+ }
+ }
+
+ if (_incremental_allowed) {
+ group("${target_name}_incremental") {
+ public_deps = [
+ ":$_incremental_test_runner_target_name",
+ ":${_apk_target_name}_dist_ijar",
+ ":${_apk_target_name}_incremental",
+ ]
+ if (defined(invoker.apk_under_test)) {
+ public_deps += [ "${invoker.apk_under_test}_incremental" ]
+ }
+ }
+ }
+ }
+
+ # Declare an Android gtest apk
+ #
+ # This target creates an Android apk for running gtest-based unittests.
+ #
+ # Variables
+ # deps: Specifies the dependencies of this target. These will be passed to
+ # the underlying android_apk invocation and should include the java and
+ # resource dependencies of the apk.
+ # shared_library: shared_library target that contains the unit tests.
+ # apk_name: The name of the produced apk. If unspecified, it uses the name
+ # of the shared_library target suffixed with "_apk"
+ # use_default_launcher: Whether the default activity (NativeUnitTestActivity)
+ # should be used for launching tests.
+ # use_native_activity: Test implements ANativeActivity_onCreate().
+ #
+ # Example
+ # unittest_apk("foo_unittests_apk") {
+ # deps = [ ":foo_java", ":foo_resources" ]
+ # shared_library = ":foo_unittests"
+ # }
+ template("unittest_apk") {
+ _use_native_activity =
+ defined(invoker.use_native_activity) && invoker.use_native_activity
+ _android_manifest = "$target_gen_dir/$target_name/AndroidManifest.xml"
+ assert(invoker.shared_library != "")
+
+ # This trivial assert is needed in case android_manifest is defined,
+ # as otherwise _use_native_activity and _android_manifest would not be used.
+ assert(_use_native_activity != "" && _android_manifest != "")
+
+ if (!defined(invoker.android_manifest)) {
+ jinja_template("${target_name}_manifest") {
+ _native_library_name = get_label_info(invoker.shared_library, "name")
+ input = "//testing/android/native_test/java/AndroidManifest.xml.jinja2"
+ output = _android_manifest
+ variables = [
+ "is_component_build=${is_component_build}",
+ "native_library_name=${_native_library_name}",
+ "use_native_activity=${_use_native_activity}",
+ ]
+ }
+ }
+
+ android_apk(target_name) {
+ data_deps = []
+ forward_variables_from(invoker, "*")
+ testonly = true
+ create_apk_script = false
+
+ assert(!defined(invoker.proguard_enabled) || !invoker.proguard_enabled ||
+ invoker.proguard_configs != [])
+
+ if (!defined(apk_name)) {
+ apk_name = get_label_info(invoker.shared_library, "name")
+ }
+
+ if (!defined(android_manifest)) {
+ android_manifest_dep = ":${target_name}_manifest"
+ android_manifest = _android_manifest
+ }
+
+ final_apk_path = "$root_build_dir/${apk_name}_apk/${apk_name}-debug.apk"
+
+ if (!defined(use_default_launcher) || use_default_launcher) {
+ deps += [ "//testing/android/native_test:native_test_java" ]
+ }
+ shared_libraries = [ invoker.shared_library ]
+ deps += [
+ ":${target_name}__runtime_deps",
+ ":${target_name}__secondary_abi__runtime_deps",
+ "//base:base_java",
+ "//testing/android/reporter:reporter_java",
+ ]
+ data_deps += [
+ "//build/android/pylib/device/commands",
+ "//tools/android/md5sum",
+ ]
+ if (host_os == "linux") {
+ data_deps += [ "//tools/android/forwarder2" ]
+ }
+ }
+ }
+
+ # Generate .java files from .aidl files.
+ #
+ # This target will store the .java files in a srcjar and should be included in
+ # an android_library or android_apk's srcjar_deps.
+ #
+ # Variables
+ # sources: Paths to .aidl files to compile.
+ # import_include: Path to directory containing .java files imported by the
+ # .aidl files.
+ # interface_file: Preprocessed aidl file to import.
+ #
+ # Example
+ # android_aidl("foo_aidl") {
+ # import_include = "java/src"
+ # sources = [
+ # "java/src/com/foo/bar/FooBarService.aidl",
+ # "java/src/com/foo/bar/FooBarServiceCallback.aidl",
+ # ]
+ # }
+ template("android_aidl") {
+ action_with_pydeps(target_name) {
+ set_sources_assignment_filter([])
+ forward_variables_from(invoker, [ "testonly" ])
+
+ script = "//build/android/gyp/aidl.py"
+ sources = invoker.sources
+
+ _srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+ _aidl_path = "${android_sdk_build_tools}/aidl"
+ _framework_aidl = "$android_sdk/framework.aidl"
+ _imports = [ _framework_aidl ]
+ if (defined(invoker.interface_file)) {
+ assert(invoker.interface_file != "")
+ _imports += [ invoker.interface_file ]
+ }
+
+ inputs = [ _aidl_path ] + _imports
+
+ outputs = [
+ _srcjar_path,
+ ]
+ _rebased_imports = rebase_path(_imports, root_build_dir)
+ args = [
+ "--aidl-path",
+ rebase_path(_aidl_path, root_build_dir),
+ "--imports=$_rebased_imports",
+ "--srcjar",
+ rebase_path(_srcjar_path, root_build_dir),
+ ]
+ if (defined(invoker.import_include) && invoker.import_include != []) {
+ # TODO(cjhopman): aidl supports creating a depfile. We should be able to
+ # switch to constructing a depfile for the overall action from that
+ # instead of having all the .java files in the include paths as inputs.
+ _rebased_import_paths = []
+ foreach(_import_path, invoker.import_include) {
+ _rebased_import_path = []
+ _rebased_import_path = [ rebase_path(_import_path, root_build_dir) ]
+ _rebased_import_paths += _rebased_import_path
+ _java_files_build_rel = []
+ _java_files_build_rel =
+ exec_script("//build/android/gyp/find.py",
+ [ "--pattern=*.java" ] + _rebased_import_path,
+ "list lines")
+ inputs += rebase_path(_java_files_build_rel, ".", root_build_dir)
+ }
+ args += [ "--includes=$_rebased_import_paths" ]
+ }
+ args += rebase_path(sources, root_build_dir)
+ }
+ }
+
+ # Compile a protocol buffer to java.
+ #
+ # This generates java files from protocol buffers and creates an Android library
+ # containing the classes.
+ #
+ # Variables
+ # sources (required)
+ # Paths to .proto files to compile.
+ #
+ # proto_path (required)
+ # Root directory of .proto files.
+ #
+ # generate_nano (optional, default false)
+ # Whether to generate nano protos. If false, this will use the lite proto generator.
+ # Nano protos are deprecated, so please use lite new proto libraries.
+ #
+ # Example:
+ # proto_java_library("foo_proto_java") {
+ # proto_path = "src/foo"
+ # sources = [ "$proto_path/foo.proto" ]
+ # }
+ template("proto_java_library") {
+ set_sources_assignment_filter([])
+ forward_variables_from(invoker, [ "testonly" ])
+ _generate_nano =
+ defined(invoker.generate_nano) && invoker.generate_nano == true
+
+ if (_generate_nano) {
+ # Use the legacy Android nano proto generator.
+ _protoc_dep =
+ "//third_party/android_protobuf:android_protoc($host_toolchain)"
+ _protoc_out_dir = get_label_info(_protoc_dep, "root_out_dir")
+ _protoc_bin = "$_protoc_out_dir/android_protoc"
+ _proto_runtime = "//third_party/android_protobuf:protobuf_nano_javalib"
+ } else {
+ # Use the regular proto library to generate lite protos.
+ _protoc_dep = "//third_party/protobuf:protoc($host_toolchain)"
+ _protoc_out_dir = get_label_info(_protoc_dep, "root_out_dir")
+ _protoc_bin = "$_protoc_out_dir/protoc"
+ _proto_runtime =
+ "//third_party/android_deps:com_google_protobuf_protobuf_lite_java"
+ _protoc_javalite_plugin_dir = "//third_party/protoc_javalite/"
+ }
+ _proto_path = invoker.proto_path
+ _template_name = target_name
+
+ action_with_pydeps("${_template_name}__protoc_java") {
+ _srcjar_path = "$target_gen_dir/$target_name.srcjar"
+ script = "//build/protoc_java.py"
+
+ deps = [
+ _protoc_dep,
+ ]
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+
+ sources = invoker.sources
+ depfile = "$target_gen_dir/$target_name.d"
+ outputs = [
+ _srcjar_path,
+ ]
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--protoc",
+ rebase_path(_protoc_bin, root_build_dir),
+ "--proto-path",
+ rebase_path(_proto_path, root_build_dir),
+ "--srcjar",
+ rebase_path(_srcjar_path, root_build_dir),
+ ] + rebase_path(sources, root_build_dir)
+ if (_generate_nano) {
+ args += [ "--nano" ]
+ } else {
+ args += [
+ "--protoc-javalite-plugin-dir",
+ rebase_path(_protoc_javalite_plugin_dir, root_build_dir),
+ ]
+ }
+ }
+
+ android_library(target_name) {
+ chromium_code = false
+ java_files = []
+ srcjar_deps = [ ":${_template_name}__protoc_java" ]
+ deps = [
+ _proto_runtime,
+ ]
+ }
+ }
+
+ # Declare an Android library target for a prebuilt AAR.
+ #
+ # This target creates an Android library containing java code and Android
+ # resources. For libraries without resources, it will not generate
+ # corresponding android_resources targets.
+ #
+ # To avoid slowing down "gn gen", an associated .info file must be committed
+ # along with the .aar file. In order to create this file, define the target
+ # and then run once with the gn arg "update_android_aar_prebuilts = true".
+ #
+ # Variables
+ # aar_path: Path to the AAR.
+ # info_path: Path to the .aar.info file (generated via
+ # update_android_aar_prebuilts GN arg).
+ # proguard_configs: List of proguard configs to use in final apk step for
+ # any apk that depends on this library.
+ # ignore_aidl: Whether to ignore .aidl files found with the .aar.
+ # ignore_assets: Whether to ignore assets found in the .aar.
+ # ignore_native_libraries: Whether to ignore .so files found in the .aar.
+ # See also extract_native_libraries.
+ # extract_native_libraries: Whether to extract .so files found in the .aar.
+ # If the file contains .so, either extract_native_libraries or
+ # ignore_native_libraries must be set.
+ # split_compat_class_names: Names of the classes that will have their
+ # bytecode rewritten to inject the call to SplitCompat.install().
+ # Used to make dependencies compatible with SplitCompat to immediately
+ # access resources brought in by the modules.
+ # create_srcjar: If false, does not create an R.java file.
+ # TODO(jbudorick@): remove this arguments after crbug.com/522043 is fixed.
+ # requires_android: Whether this target can only be used for compiling
+ # Android related targets.
+ #
+ # Example
+ # android_aar_prebuilt("foo_java") {
+ # aar_path = "foo.aar"
+ # }
+ template("android_aar_prebuilt") {
+ _info_path = "$target_name.info"
+ if (defined(invoker.info_path)) {
+ _info_path = invoker.info_path
+ }
+ _output_path = "${target_gen_dir}/${target_name}"
+ _unpack_target_name = "${target_name}__unpack_aar"
+ _ignore_aidl = defined(invoker.ignore_aidl) && invoker.ignore_aidl
+ _ignore_assets = defined(invoker.ignore_assets) && invoker.ignore_assets
+ _ignore_native_libraries = defined(invoker.ignore_native_libraries) &&
+ invoker.ignore_native_libraries
+ _extract_native_libraries = defined(invoker.extract_native_libraries) &&
+ invoker.extract_native_libraries
+
+ # Scan the AAR file and determine the resources and jar files.
+ # Some libraries might not have resources; others might have two jars.
+ if (update_android_aar_prebuilts) {
+ print("Writing " + rebase_path(_info_path, "//"))
+ exec_script("//build/android/gyp/aar.py",
+ [
+ "list",
+ rebase_path(invoker.aar_path, root_build_dir),
+ "--output",
+ rebase_path(_info_path, root_build_dir),
+ ])
+ }
+
+ # If "gn gen" is failing on the following line, you need to generate an
+ # .info file for your new target by running:
+ # gn gen --args='target_os="android" update_android_aar_prebuilts=true' out/tmp
+ # rm -r out/tmp
+ _scanned_files = read_file(_info_path, "scope")
+
+ assert(_ignore_aidl || _scanned_files.aidl == [],
+ "android_aar_prebuilt() aidl not yet supported." +
+ " Implement or use ignore_aidl = true." +
+ " http://crbug.com/644439")
+ assert(_ignore_assets || _scanned_files.assets == [],
+ "android_aar_prebuilt() assets not yet supported." +
+ " Implement or use ignore_assets = true." +
+ " http://crbug.com/643966")
+ assert(
+ !_scanned_files.has_native_libraries ||
+ (_ignore_native_libraries || _extract_native_libraries),
+ "android_aar_prebuilt() contains .so files." +
+ " Please set ignore_native_libraries or extract_native_libraries.")
+ assert(
+ !(_ignore_native_libraries && _extract_native_libraries),
+ "ignore_native_libraries and extract_native_libraries cannot both be set.")
+ assert(!_scanned_files.has_native_libraries ||
+ _scanned_files.native_libraries != [])
+ assert(_scanned_files.has_classes_jar || _scanned_files.subjars == [])
+
+ action_with_pydeps(_unpack_target_name) {
+ script = "//build/android/gyp/aar.py" # Unzips the AAR
+ args = [
+ "extract",
+ rebase_path(invoker.aar_path, root_build_dir),
+ "--output-dir",
+ rebase_path(_output_path, root_build_dir),
+ "--assert-info-file",
+ rebase_path(_info_path, root_build_dir),
+ ]
+ inputs = [
+ invoker.aar_path,
+ ]
+ outputs = [
+ "${_output_path}/AndroidManifest.xml",
+ ]
+
+ if (_scanned_files.has_r_text_file) {
+ # Certain packages, in particular Play Services have no R.txt even
+ # though its presence is mandated by AAR spec. Such packages cause
+ # spurious rebuilds if this output is specified unconditionally.
+ outputs += [ "${_output_path}/R.txt" ]
+ }
+
+ if (_scanned_files.resources != []) {
+ outputs += get_path_info(
+ rebase_path(_scanned_files.resources, "", _output_path),
+ "abspath")
+ }
+ if (_scanned_files.has_classes_jar) {
+ outputs += [ "${_output_path}/classes.jar" ]
+ }
+ outputs +=
+ get_path_info(rebase_path(_scanned_files.subjars, "", _output_path),
+ "abspath")
+ if (_scanned_files.has_proguard_flags) {
+ outputs += [ "${_output_path}/proguard.txt" ]
+ }
+
+ if (_extract_native_libraries && _scanned_files.has_native_libraries) {
+ outputs += get_path_info(
+ rebase_path(_scanned_files.native_libraries, "", _output_path),
+ "abspath")
+ }
+ }
+
+ _strip_resources =
+ defined(invoker.strip_resources) && invoker.strip_resources
+ _has_unignored_resources =
+ !_strip_resources &&
+ (_scanned_files.resources != [] || _scanned_files.has_r_text_file)
+
+ # Create the android_resources target for resources.
+ if (_has_unignored_resources || !_scanned_files.is_manifest_empty) {
+ _res_target_name = "${target_name}__res"
+ android_resources(_res_target_name) {
+ forward_variables_from(invoker,
+ [
+ "create_srcjar",
+ "deps",
+ "testonly",
+ "strip_drawables",
+ ])
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += [ ":$_unpack_target_name" ]
+ android_manifest_dep = ":$_unpack_target_name"
+ android_manifest = "${_output_path}/AndroidManifest.xml"
+ resource_dirs = []
+ generated_resource_dirs = []
+ if (!_strip_resources && _scanned_files.resources != []) {
+ generated_resource_dirs += [ "${_output_path}/res" ]
+ }
+ generated_resource_files = []
+ if (!_strip_resources) {
+ generated_resource_files =
+ rebase_path(_scanned_files.resources, "", _output_path)
+ }
+ if (!_strip_resources && _scanned_files.has_r_text_file) {
+ r_text_file = "${_output_path}/R.txt"
+ }
+ v14_skip = true
+ }
+ } else if (defined(invoker.strip_drawables)) {
+ not_needed(invoker, [ "strip_drawables" ])
+ }
+
+ # Create android_java_prebuilt target for extra jars within jars/.
+ _subjar_targets = []
+ foreach(_tuple, _scanned_files.subjar_tuples) {
+ _current_target = "${target_name}__subjar_${_tuple[0]}"
+ _subjar_targets += [ ":$_current_target" ]
+ java_prebuilt(_current_target) {
+ forward_variables_from(invoker,
+ [
+ "jar_excluded_patterns",
+ "jar_included_patterns",
+ "requires_android",
+ ])
+ deps = [
+ ":$_unpack_target_name",
+ ]
+ if (!defined(requires_android)) {
+ requires_android = true
+ }
+ supports_android = true
+ jar_path = "$_output_path/${_tuple[1]}"
+ _base_output_name = get_path_info(jar_path, "name")
+ output_name = "${invoker.target_name}-$_base_output_name"
+ }
+ }
+
+ # Create android_java_prebuilt target for classes.jar.
+ if (_scanned_files.has_classes_jar) {
+ _jar_target_name = "${target_name}__classes"
+ java_prebuilt(_jar_target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "input_jars_paths",
+ "jar_excluded_patterns",
+ "jar_included_patterns",
+ "proguard_configs",
+ "requires_android",
+ "split_compat_class_names",
+ "testonly",
+ ])
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += _subjar_targets + [ ":$_unpack_target_name" ]
+ if (defined(_res_target_name)) {
+ deps += [ ":$_res_target_name" ]
+ }
+ if (!defined(requires_android)) {
+ requires_android = true
+ }
+ supports_android = true
+ jar_path = "$_output_path/classes.jar"
+ output_name = invoker.target_name
+
+ if (_scanned_files.has_proguard_flags) {
+ if (!defined(proguard_configs)) {
+ proguard_configs = []
+ }
+ proguard_configs += [ "$_output_path/proguard.txt" ]
+ }
+ }
+ }
+
+ java_group(target_name) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+ ])
+ public_deps = [
+ ":$_unpack_target_name",
+ ]
+ deps = []
+ if (defined(_jar_target_name)) {
+ deps += [ ":$_jar_target_name" ]
+
+ # Although subjars are meant to be private, we add them as deps here
+ # because in practice they seem to contain classes required to be in the
+ # classpath.
+ deps += _subjar_targets
+ }
+ if (defined(_res_target_name)) {
+ deps += [ ":$_res_target_name" ]
+ }
+ }
+ }
+
+ # Create an Android application bundle from one base android_apk target,
+ # and zero or more associated android_apk.
+ #
+ # Variables:
+ # base_module_target: Name of the android_app_bundle_module target
+ # corresponding to the base module for this application bundle. The
+ # bundle file will include the same content in its base module, though in
+ # a slightly different format.
+ #
+ # bundle_base_path: Optional. If set, the bundle will be output to this
+ # directory. Defaults to "$root_build_dir/apks".
+ #
+ # bundle_name: Optional. If set, the bundle will be output to the
+ # filename "${bundle_name}.aab".
+ #
+ # extra_modules: Optional list of scopes, one per extra module used by
+ # this bundle. Each scope must have a 'name' field that specifies the
+ # module name (which cannot be 'base', since this is reserved for the
+ # base module), and an 'apk_target' field that specified the
+ # corresponding android_apk target name the module is modeled on.
+ # A scope may have an additional field, 'proguard_async', that
+ # specifies whether or not the module is asynchronous. This field should
+ # be set to true if the module is asynchronous, and set to false or left
+ # undefined otherwise.
+ # Async modules are those that are proguarded in a separate build step.
+ # This ensures that changes to these modules do not change the base
+ # module.
+ #
+ # enable_language_splits: Optional. If true, enable APK splits based
+ # on languages.
+ #
+ # sign_bundle: Optional. If true, sign the bundle. Default is false
+ # because signing is very slow, and there is no reason to do it
+ # unless one wants to upload the bundle to the Play Store (e.g.
+ # for official builds).
+ #
+ # keystore_path: optional keystore path, used only when generating APKs.
+ # keystore_name: optional keystore name, used only when generating APKs.
+ # keystore_password: optional keystore password, used only when
+ # generating APKs.
+ #
+ # command_line_flags_file: Optional. If provided, named of the on-device
+ # file that will be used to store command-line arguments. The default
+ # is 'command_line_flags_file', but this is typically redefined to
+ # something more specific for certain bundles (e.g. the Chromium based
+ # APKs use 'chrome-command-line', the WebView one uses
+ # 'webview-command-line').
+ #
+ # proguard_enabled: Optional. True if proguarding is enabled for this
+ # bundle. Default is to enable this only for release builds. Note that
+ # this will always perform synchronized proguarding.
+ #
+ # proguard_jar_path: Optional. Path to custom proguard jar used for
+ # proguarding.
+ #
+ # enable_multidex: Optional. Enable multidexing of optimized modules jars
+ # when using synchronized proguarding. Only applies to base module.
+ #
+ # proguard_android_sdk_dep: Optional. android_system_java_prebuilt() target
+ # used as a library jar for synchronized proguarding.
+ #
+ # compress_shared_libraries: Optional. Whether to compress shared libraries
+ # such that they are extracted upon install. Libraries prefixed with
+ # "crazy." are never compressed.
+ #
+ # system_image_locale_whitelist: List of locales that should be included
+ # on system APKs generated from this bundle.
+ #
+ # Example:
+ # android_app_bundle("chrome_public_bundle") {
+ # base_module_target = "//chrome/android:chrome_public_apk"
+ # extra_modules = [
+ # { # NOTE: Scopes require one field per line, and no comma separators.
+ # name = "my_module"
+ # module_target = ":my_module"
+ # },
+ # ]
+ # }
+ #
+ template("android_app_bundle") {
+ _bundle_base_path = "$root_build_dir/apks"
+ if (defined(invoker.bundle_base_path)) {
+ _bundle_base_path = invoker.bundle_base_path
+ }
+
+ _bundle_name = target_name
+ if (defined(invoker.bundle_name)) {
+ _bundle_name = invoker.bundle_name
+ }
+ _bundle_path = "$_bundle_base_path/${_bundle_name}.aab"
+ _rebased_bundle_path = rebase_path(_bundle_path, root_build_dir)
+
+ _base_target_name = get_label_info(invoker.base_module_target, "name")
+ _base_target_gen_dir =
+ get_label_info(invoker.base_module_target, "target_gen_dir")
+ _base_module_build_config =
+ "$_base_target_gen_dir/${_base_target_name}.build_config"
+ _base_module_build_config_target =
+ "${invoker.base_module_target}$build_config_target_suffix"
+ _rebased_base_module_build_config =
+ rebase_path(_base_module_build_config, root_build_dir)
+
+ _sync_modules = [
+ {
+ name = "base"
+ module_target = invoker.base_module_target
+ build_config = _base_module_build_config
+ build_config_target = _base_module_build_config_target
+ },
+ ]
+
+ _async_modules = []
+
+ _proguard_enabled =
+ defined(invoker.proguard_enabled) && invoker.proguard_enabled
+ _enable_multidex =
+ !defined(invoker.enable_multidex) || invoker.enable_multidex
+
+ if (!_proguard_enabled && defined(invoker.min_sdk_version)) {
+ not_needed(invoker, [ "min_sdk_version" ])
+ }
+
+ # Prevent "unused variable".
+ not_needed([ "_enable_multidex" ])
+
+ assert(_proguard_enabled || !defined(invoker.enable_multidex),
+ "Bundle only adds dexing step if proguarding is enabled.")
+
+ if (defined(invoker.extra_modules)) {
+ _module_count = 0
+ not_needed([ "_module_count" ])
+
+ # Define unique package for each async proguarding run.
+ _async_package_number = 1
+
+ not_needed([ "_async_package_number" ])
+
+ foreach(_module, invoker.extra_modules) {
+ _module_count += 1
+ assert(defined(_module.name),
+ "Missing 'name' field for extra module #${_module_count}.")
+ assert(_module.name != "base",
+ "Module name 'base' is reserved for the main bundle module")
+ assert(
+ defined(_module.module_target),
+ "Missing 'module_target' field for extra module ${_module.name}.")
+ _module_target = _module.module_target
+ _module_target_name = get_label_info(_module_target, "name")
+ _module_target_gen_dir =
+ get_label_info(_module_target, "target_gen_dir")
+ _module.build_config =
+ "$_module_target_gen_dir/${_module_target_name}.build_config"
+ _module.build_config_target =
+ "$_module_target$build_config_target_suffix"
+
+ if (defined(_module.proguard_async) && _module.proguard_async) {
+ if (_proguard_enabled) {
+ # Use asynchronous proguarding for async modules.
+ # TODO(crbug.com/938635): Combine async module mapping paths with the sync one.
+ _async_proguard_mapping_path =
+ "${_bundle_path}_${_module.name}.mapping"
+
+ _dex_zip = "${target_out_dir}/${target_name}/${target_name}_${_module.name}_dex.zip"
+ _module.dex_path = _dex_zip
+
+ # Give unique name to each async dex target using module name.
+ _async_dex_target = "${target_name}_${_module.name}_dex"
+
+ dex(_async_dex_target) {
+ enable_multidex = _enable_multidex
+ proguard_enabled = true
+ proguard_mapping_path = _async_proguard_mapping_path
+ forward_variables_from(invoker,
+ [
+ "proguard_jar_path",
+ "min_sdk_version",
+ ])
+ build_config = _module.build_config
+ repackage_classes = "ap${_async_package_number}"
+
+ deps = [
+ _module.module_target,
+ ]
+
+ output = _dex_zip
+ }
+ _module.async_dex_target = _async_dex_target
+ _async_package_number += 1
+ }
+
+ _async_modules += [ _module ]
+ } else {
+ _sync_modules += [ _module ]
+ }
+ }
+ }
+
+ # Make build config, which is required for synchronized proguarding.
+ _sync_module_targets = []
+ foreach(_module, _sync_modules) {
+ _sync_module_targets += [ _module.module_target ]
+ }
+ _build_config = "$target_gen_dir/${target_name}.build_config"
+ _rebased_build_config = rebase_path(_build_config, root_build_dir)
+ _build_config_target = "$target_name$build_config_target_suffix"
+ if (defined(invoker.proguard_android_sdk_dep)) {
+ proguard_android_sdk_dep_ = invoker.proguard_android_sdk_dep
+ } else {
+ proguard_android_sdk_dep_ = "//third_party/android_sdk:android_sdk_java"
+ }
+ write_build_config(_build_config_target) {
+ # We don't want async modules to be proguarded synchronously, so we leave
+ # them out of possible_config_deps.
+ type = "android_app_bundle"
+ possible_config_deps =
+ _sync_module_targets + [ proguard_android_sdk_dep_ ]
+ build_config = _build_config
+ proguard_enabled = _proguard_enabled
+ }
+
+ if (_proguard_enabled) {
+ _proguard_mapping_path = "${_bundle_path}.mapping"
+ _unsplit_dex_zip =
+ "${target_gen_dir}/${target_name}/${target_name}__unsplit_dex.zip"
+ _unsplit_dex_target = "${target_name}__unsplit_dex"
+ dex(_unsplit_dex_target) {
+ enable_multidex = _enable_multidex
+ proguard_enabled = true
+ proguard_mapping_path = _proguard_mapping_path
+ forward_variables_from(invoker,
+ [
+ "proguard_jar_path",
+ "min_sdk_version",
+ ])
+ build_config = _build_config
+
+ deps = _sync_module_targets + [ ":$_build_config_target" ]
+ output = _unsplit_dex_zip
+ }
+
+ _dexsplitter_target = "${_unsplit_dex_target}__dexsplitter"
+ dexsplitter(_dexsplitter_target) {
+ input_dex_zip = _unsplit_dex_zip
+ proguard_mapping = _proguard_mapping_path
+ all_modules = _sync_modules
+ deps = [
+ ":${_unsplit_dex_target}",
+ ]
+ }
+ }
+
+ # Merge async and sync module scopes.
+ _all_modules = _sync_modules + _async_modules
+
+ _all_create_module_targets = []
+ _all_module_zip_paths = []
+ _all_module_build_configs = []
+ foreach(_module, _all_modules) {
+ _module_target = _module.module_target
+ _module_build_config = _module.build_config
+ _module_build_config_target = _module.build_config_target
+
+ if (!_proguard_enabled) {
+ _dex_target_for_module = "${_module_target}__final_dex"
+ } else if (defined(_module.dex_path)) {
+ _dex_target_for_module = ":${_module.async_dex_target}"
+ } else {
+ _dex_target_for_module = ":$_dexsplitter_target"
+ }
+
+ # Generate one module .zip file per bundle module.
+ #
+ # Important: the bundle tool uses the module's zip filename as
+ # the internal module name inside the final bundle, in other words,
+ # this file *must* be named ${_module.name}.zip
+ _create_module_target = "${target_name}__${_module.name}__create"
+ _module_zip_path = "$target_gen_dir/$target_name/${_module.name}.zip"
+
+ create_android_app_bundle_module(_create_module_target) {
+ build_config = _module_build_config
+ module_zip_path = _module_zip_path
+
+ # If module is async, use defined dex_path directly rather than
+ # build config FileArg.
+ if (defined(_module.dex_path)) {
+ dex_path = _module.dex_path
+ }
+
+ deps = [
+ _dex_target_for_module,
+ _module_build_config_target,
+ _module_target,
+ ]
+ }
+
+ _all_create_module_targets += [
+ ":$_create_module_target",
+ _module_build_config_target,
+ "${_module_target}__compile_resources",
+ ]
+ _all_module_zip_paths += [ _module_zip_path ]
+ _all_module_build_configs += [ _module_build_config ]
+ }
+
+ _all_rebased_module_zip_paths =
+ rebase_path(_all_module_zip_paths, root_build_dir)
+
+ _sign_bundle = defined(invoker.sign_bundle) && invoker.sign_bundle
+
+ _enable_language_splits = defined(invoker.enable_language_splits) &&
+ invoker.enable_language_splits
+
+ _split_dimensions = []
+ if (_enable_language_splits) {
+ _split_dimensions += [ "language" ]
+ }
+
+ _keystore_path = android_keystore_path
+ _keystore_password = android_keystore_password
+ _keystore_name = android_keystore_name
+
+ if (defined(invoker.keystore_path)) {
+ _keystore_path = invoker.keystore_path
+ _keystore_password = invoker.keystore_password
+ _keystore_name = invoker.keystore_name
+ }
+
+ _rebased_keystore_path = rebase_path(_keystore_path, root_build_dir)
+
+ if (_sign_bundle) {
+ # For now, the same keys are used to sign the bundle and the set of
+ # generated APKs. In the future, signing the bundle may require a
+ # different set of keys.
+ _bundle_keystore_name = _keystore_name
+ }
+
+ _bundle_target_name = "${target_name}__bundle"
+ action_with_pydeps(_bundle_target_name) {
+ script = "//build/android/gyp/create_app_bundle.py"
+ inputs = _all_module_zip_paths + _all_module_build_configs
+ outputs = [
+ _bundle_path,
+ ]
+ data = [
+ _bundle_path,
+ ]
+ deps = _all_create_module_targets + [ ":$_build_config_target" ]
+ args = [
+ "--out-bundle=$_rebased_bundle_path",
+ "--rtxt-out-path=$_rebased_bundle_path.R.txt",
+ "--module-zips=$_all_rebased_module_zip_paths",
+ ]
+ if (_sign_bundle) {
+ args += [
+ "--keystore-path",
+ _rebased_keystore_path,
+ "--keystore-password",
+ _keystore_password,
+ "--key-name",
+ _bundle_keystore_name,
+ ]
+ }
+ if (_split_dimensions != []) {
+ args += [ "--split-dimensions=$_split_dimensions" ]
+ }
+ if (defined(invoker.compress_shared_libraries) &&
+ invoker.compress_shared_libraries) {
+ args += [ "--compress-shared-libraries" ]
+ }
+
+ if (_enable_language_splits) {
+ args += [
+ "--base-whitelist-rtxt-path=@FileArg(" + "${_rebased_base_module_build_config}:deps_info:base_whitelist_rtxt_path)",
+ "--base-module-rtxt-path=@FileArg(" + "${_rebased_base_module_build_config}:deps_info:module_rtxt_path)",
+ ]
+ }
+
+ foreach(build_config, _all_module_build_configs) {
+ _rebased_build_config = rebase_path(build_config, root_build_dir)
+ args += [
+ "--uncompressed-assets=@FileArg(" +
+ "$_rebased_build_config:uncompressed_assets)",
+ "--rtxt-in-paths=@FileArg(" +
+ "$_rebased_build_config:deps_info:module_rtxt_path)",
+ ]
+ }
+ }
+
+ # Create size info files for targets that care about size
+ # (have proguard enabled).
+ if (_proguard_enabled) {
+ # Merge all module targets to obtain size info files for all targets.
+ _all_module_targets = _sync_module_targets
+ foreach(_async_module, _async_modules) {
+ _all_module_targets += [ _async_module.module_target ]
+ }
+
+ _size_info_target = "${target_name}__size_info"
+ create_size_info_files(_size_info_target) {
+ name = "$_bundle_name.aab"
+ deps = _all_module_targets + [ ":$_build_config_target" ]
+ module_build_configs = _all_module_build_configs
+ }
+ }
+
+ # Generate a wrapper script for the bundle.
+ _android_aapt2_path = android_sdk_tools_bundle_aapt2
+
+ _bundle_apks_path = "$_bundle_base_path/$_bundle_name.apks"
+ _bundle_wrapper_script_dir = "$root_build_dir/bin"
+ _bundle_wrapper_script_path = "$_bundle_wrapper_script_dir/$target_name"
+
+ action_with_pydeps("${target_name}__wrapper_script") {
+ script = "//build/android/gyp/create_bundle_wrapper_script.py"
+ inputs = [
+ _base_module_build_config,
+ ]
+ outputs = [
+ _bundle_wrapper_script_path,
+ ]
+
+ # Telemetry for bundles uses the wrapper script for installation.
+ data = [
+ _bundle_wrapper_script_path,
+ ]
+
+ deps = [
+ _base_module_build_config_target,
+ ]
+ args = [
+ "--script-output-path",
+ rebase_path(_bundle_wrapper_script_path, root_build_dir),
+ "--package-name=@FileArg(" +
+ "$_rebased_base_module_build_config:deps_info:package_name)",
+ "--aapt2",
+ rebase_path(_android_aapt2_path, root_build_dir),
+ "--bundle-path",
+ _rebased_bundle_path,
+ "--bundle-apks-path",
+ rebase_path(_bundle_apks_path, root_build_dir),
+ "--target-cpu=$target_cpu",
+ "--keystore-path",
+ _rebased_keystore_path,
+ "--keystore-password",
+ _keystore_password,
+ "--key-name",
+ _keystore_name,
+ ]
+ if (defined(invoker.system_image_locale_whitelist)) {
+ args += [
+ "--system-image-locales=${invoker.system_image_locale_whitelist}",
+ ]
+ }
+ if (defined(invoker.command_line_flags_file)) {
+ args += [
+ "--command-line-flags-file",
+ invoker.command_line_flags_file,
+ ]
+ }
+
+ # TODO(crbug.com/938635): Combine async module mapping paths with the sync one.
+ if (_proguard_enabled) {
+ args += [
+ "--proguard-mapping-path",
+ rebase_path(_proguard_mapping_path, root_build_dir),
+ ]
+ }
+ }
+
+ group(target_name) {
+ public_deps = [
+ ":${target_name}__bundle",
+ ":${target_name}__wrapper_script",
+ ]
+ if (defined(_size_info_target)) {
+ public_deps += [ ":$_size_info_target" ]
+ }
+ }
+ }
+
+ # Create an .apks file from an .aab file. The .apks file will contain the
+ # minimal set of .apk files needed for tracking binary size.
+ # The file will be created at "$bundle_path_without_extension.minimal.apks".
+ #
+ # Variables:
+ # bundle_path: Path to the input .aab file.
+ #
+ # Example:
+ # create_app_bundle_minimal_apks("minimal_apks") {
+ # deps = [
+ # ":bundle_target",
+ # ]
+ # bundle_path = "$root_build_dir/apks/Bundle.aab"
+ # }
+ template("create_app_bundle_minimal_apks") {
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "testonly",
+ ])
+ script = "//build/android/gyp/create_app_bundle_minimal_apks.py"
+ _dir = get_path_info(invoker.bundle_path, "dir")
+ _name = get_path_info(invoker.bundle_path, "name")
+ _output_path = "$_dir/$_name.minimal.apks"
+ outputs = [
+ _output_path,
+ ]
+ inputs = [
+ invoker.bundle_path,
+ ]
+ args = [
+ "--bundle",
+ rebase_path(invoker.bundle_path, root_build_dir),
+ "--output",
+ rebase_path(_output_path, root_build_dir),
+ "--aapt2-path",
+ rebase_path(android_sdk_tools_bundle_aapt2, root_build_dir),
+ "--keystore-path",
+ rebase_path(android_keystore_path, root_build_dir),
+ "--keystore-name",
+ android_keystore_name,
+ "--keystore-password",
+ android_keystore_password,
+ ]
+ }
+ }
+}
+
+# Generate an Android resources target that contains localized strings
+# describing the current locale used by the Android framework to display
+# UI strings. These are used by
+# org.chromium.chrome.browser.ChromeLocalizationUtils.
+#
+# Variables:
+# ui_locales: List of Chromium locale names to generate resources for.
+#
+template("generate_ui_locale_resources") {
+ _generating_target_name = "${target_name}__generate"
+ _rebased_output_zip_path = rebase_path(target_gen_dir, root_gen_dir)
+ _output_zip = "${root_out_dir}/resource_zips/${_rebased_output_zip_path}/" +
+ "${target_name}.zip"
+
+ _locales = invoker.ui_locales
+ _depfile = "$target_gen_dir/$target_name.d"
+
+ action(_generating_target_name) {
+ script = "//build/android/gyp/create_ui_locale_resources.py"
+ depfile = _depfile
+ outputs = [
+ _output_zip,
+ ]
+ args = [
+ "--locale-list=$_locales",
+ "--depfile",
+ rebase_path(_depfile, root_build_dir),
+ "--output-zip",
+ rebase_path(_output_zip, root_build_dir),
+ ]
+ }
+
+ android_generated_resources(target_name) {
+ generating_target_name = ":$_generating_target_name"
+ generated_resources_zip = _output_zip
+ }
+}
diff --git a/deps/v8/build/config/android/sdk.gni b/deps/v8/build/config/android/sdk.gni
new file mode 100644
index 0000000000..1f1ebc5d3f
--- /dev/null
+++ b/deps/v8/build/config/android/sdk.gni
@@ -0,0 +1,10 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# The default SDK release used by public builds. Value may differ in
+# internal builds.
+default_android_sdk_release = "p"
+
+# SDK releases against which public builds are supported.
+public_sdk_releases = [ "p" ]
diff --git a/deps/v8/build/config/arm.gni b/deps/v8/build/config/arm.gni
new file mode 100644
index 0000000000..29434eba92
--- /dev/null
+++ b/deps/v8/build/config/arm.gni
@@ -0,0 +1,127 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/v8_target_cpu.gni")
+
+# These are primarily relevant in current_cpu == "arm" contexts, where
+# ARM code is being compiled. But they can also be relevant in the
+# other contexts when the code will change its behavior based on the
+# cpu it wants to generate code for.
+if (current_cpu == "arm" || v8_current_cpu == "arm") {
+ declare_args() {
+ # Version of the ARM processor when compiling on ARM. Ignored on non-ARM
+ # platforms.
+ arm_version = 7
+
+ # The ARM architecture. This will be a string like "armv6" or "armv7-a".
+ # An empty string means to use the default for the arm_version.
+ arm_arch = ""
+
+ # The ARM floating point hardware. This will be a string like "neon" or
+ # "vfpv3". An empty string means to use the default for the arm_version.
+ arm_fpu = ""
+
+ # The ARM floating point mode. This is either the string "hard", "soft", or
+ # "softfp". An empty string means to use the default one for the
+ # arm_version.
+ arm_float_abi = ""
+
+ # The ARM variant-specific tuning mode. This will be a string like "armv6"
+ # or "cortex-a15". An empty string means to use the default for the
+ # arm_version.
+ arm_tune = ""
+
+ # Whether to use the neon FPU instruction set or not.
+ arm_use_neon = ""
+
+ # Whether to enable optional NEON code paths.
+ arm_optionally_use_neon = false
+
+ # Thumb is a reduced instruction set available on some ARM processors that
+ # has increased code density.
+ arm_use_thumb = true
+ }
+
+ assert(arm_float_abi == "" || arm_float_abi == "hard" ||
+ arm_float_abi == "soft" || arm_float_abi == "softfp")
+
+ if (arm_use_neon == "") {
+ if (current_os == "linux" && target_cpu != v8_target_cpu) {
+ # Don't use neon on V8 simulator builds as a default.
+ arm_use_neon = false
+ } else {
+ arm_use_neon = true
+ }
+ }
+
+ if (arm_version == 6) {
+ if (arm_arch == "") {
+ arm_arch = "armv6"
+ }
+ if (arm_tune != "") {
+ arm_tune = ""
+ }
+ if (arm_float_abi == "") {
+ arm_float_abi = "softfp"
+ }
+ if (arm_fpu == "") {
+ arm_fpu = "vfp"
+ }
+ arm_use_thumb = false
+ arm_use_neon = false
+ } else if (arm_version == 7) {
+ if (arm_arch == "") {
+ arm_arch = "armv7-a"
+ }
+ if (arm_tune == "") {
+ arm_tune = "generic-armv7-a"
+ }
+
+ if (arm_float_abi == "") {
+ if (current_os == "android" || target_os == "android") {
+ arm_float_abi = "softfp"
+ } else if (current_os == "linux" && target_cpu != v8_target_cpu) {
+ # Default to the same as Android for V8 simulator builds.
+ arm_float_abi = "softfp"
+ } else {
+ arm_float_abi = "hard"
+ }
+ }
+
+ if (arm_fpu == "") {
+ if (arm_use_neon) {
+ arm_fpu = "neon"
+ } else {
+ arm_fpu = "vfpv3-d16"
+ }
+ }
+ } else if (arm_version == 8) {
+ if (arm_arch == "") {
+ arm_arch = "armv8-a"
+ }
+ if (arm_tune == "") {
+ arm_tune = "generic-armv8-a"
+ }
+
+ if (arm_float_abi == "") {
+ if (current_os == "android" || target_os == "android") {
+ arm_float_abi = "softfp"
+ } else {
+ arm_float_abi = "hard"
+ }
+ }
+
+ if (arm_fpu == "") {
+ if (arm_use_neon) {
+ arm_fpu = "neon"
+ } else {
+ arm_fpu = "vfpv3-d16"
+ }
+ }
+ }
+} else if (current_cpu == "arm64" || v8_current_cpu == "arm64") {
+ # arm64 supports only "hard".
+ arm_float_abi = "hard"
+ arm_use_neon = true
+}
diff --git a/deps/v8/build/config/c++/BUILD.gn b/deps/v8/build/config/c++/BUILD.gn
new file mode 100644
index 0000000000..226e89dc4a
--- /dev/null
+++ b/deps/v8/build/config/c++/BUILD.gn
@@ -0,0 +1,117 @@
+import("//build/config/c++/c++.gni")
+import("//build/config/chrome_build.gni")
+import("//buildtools/deps_revisions.gni")
+
+assert(use_custom_libcxx, "should only be used if use_custom_libcxx is set")
+
+declare_args() {
+ # lldb pretty printing only works when libc++ is built in the __1 (or __ndk1)
+ # namespaces. For pretty printing to work out-of-the-box on Mac (where lldb
+ # is primarily used), this flag is set to false to build with the __1
+ # namespace (to maintain ABI compatibility, this implies building without
+ # _LIBCPP_ABI_UNSTABLE). This is not necessary on non-component builds
+ # because we leave the ABI version set to __1 in that case because libc++
+ # symbols are not exported.
+ # TODO(thomasanderson): Set this to true by default once rL352899 is available
+ # in MacOS's lldb.
+ libcxx_abi_unstable = !((is_mac || is_ios) && is_debug && is_component_build)
+}
+
+# TODO(xiaohuic): https://crbug/917533 Crashes on internal ChromeOS build.
+# Do unconditionally once the underlying problem is fixed.
+if (is_chromeos && is_chrome_branded) {
+ libcxx_abi_unstable = false
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is specific to libc++. Please see that target for advice on what should
+# go in :runtime_library vs. :compiler.
+config("runtime_library") {
+ cflags = []
+ cflags_cc = []
+ defines = []
+ ldflags = []
+ libs = []
+
+ if (libcxx_abi_unstable) {
+ defines += [ "_LIBCPP_ABI_UNSTABLE" ]
+ }
+
+ if (is_component_build) {
+ # In component builds, symbols from libc++.so are exported for all DSOs to
+ # use. If the system libc++ gets loaded (indirectly through a system
+ # library), then it will conflict with our libc++.so. Add a custom ABI
+ # version if we're building with _LIBCPP_ABI_UNSTABLE to avoid conflicts.
+ #
+ # Windows doesn't need to set _LIBCPP_ABI_VERSION since there's no system
+ # C++ library we could conflict with.
+ if (libcxx_abi_unstable && !is_win) {
+ defines += [ "_LIBCPP_ABI_VERSION=Cr" ]
+ }
+ } else {
+ # Don't leak any symbols on a static build.
+ defines += [ "_LIBCPP_DISABLE_VISIBILITY_ANNOTATIONS" ]
+ if (!export_libcxxabi_from_executables && !is_win) {
+ defines += [ "_LIBCXXABI_DISABLE_VISIBILITY_ANNOTATIONS" ]
+ }
+ }
+
+ defines += [ "_LIBCPP_ENABLE_NODISCARD" ]
+
+ if (is_win) {
+ # Intentionally not using libc++abi on Windows because libc++abi only
+ # implements the Itanium C++ ABI, and not the Microsoft ABI which we use on
+ # Windows (and we need to use in order to interoperate correctly with COM
+ # among other things).
+ assert(!export_libcxxabi_from_executables,
+ "Don't use libcxxabi on Windows.")
+
+ cflags_cc +=
+ [ "-I" + rebase_path("$libcxx_prefix/include", root_build_dir) ]
+
+ # Prevent libc++ from embedding linker flags to try to automatically link
+ # against its runtime library. This is unnecessary with our build system,
+ # and can also result in build failures if libc++'s name for a library
+ # does not match ours.
+ defines += [ "_LIBCPP_NO_AUTO_LINK" ]
+
+ configs = [ "//tools/win/DebugVisualizers:libc++" ]
+ } else {
+ cflags_cc += [
+ "-nostdinc++",
+ "-isystem" + rebase_path("$libcxx_prefix/include", root_build_dir),
+ "-isystem" + rebase_path("$libcxxabi_prefix/include", root_build_dir),
+ ]
+
+ defines += [ "CR_LIBCXX_REVISION=$libcxx_svn_revision" ]
+
+ # Make sure we don't link against the system libstdc++ or libc++.
+ if (is_clang) {
+ # //build/config/android:runtime_library adds -nostdlib, which suppresses
+ # linking against all system libraries. -nostdlib++ would be redundant,
+ # and would generate an unused warning in this case.
+ if (!is_android) {
+ ldflags += [ "-nostdlib++" ]
+ }
+ } else {
+ # Gcc has a built-in abs() definition with default visibility.
+ # If it was not disabled, it would conflict with libc++'s abs()
+ # with hidden visibility.
+ cflags += [ "-fno-builtin-abs" ]
+
+ ldflags += [ "-nodefaultlibs" ]
+
+ # Unfortunately, there's no way to disable linking against just libc++
+ # (gcc doesn't have -notstdlib++:
+ # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=83931); -nodefaultlibs
+ # removes all of the default libraries, so add back the ones that we need.
+ libs += [
+ "c",
+ "gcc_s",
+ "m",
+ "rt",
+ ]
+ }
+ }
+}
diff --git a/deps/v8/build/config/c++/c++.gni b/deps/v8/build/config/c++/c++.gni
new file mode 100644
index 0000000000..4deaf03575
--- /dev/null
+++ b/deps/v8/build/config/c++/c++.gni
@@ -0,0 +1,64 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+declare_args() {
+ # Use in-tree libc++ (buildtools/third_party/libc++ and
+ # buildtools/third_party/libc++abi) instead of the system C++ library for C++
+ # standard library support.
+ # Don't check in changes that set this to false for more platforms; doing so
+ # is not supported.
+ use_custom_libcxx =
+ is_fuchsia || is_android || is_mac || is_ios ||
+ (is_linux &&
+ (!is_chromeos || default_toolchain != "//build/toolchain/cros:target"))
+
+ # Use libc++ instead of stdlibc++ when using the host_cpu toolchain, even if
+ # use_custom_libcxx is false. This is useful for cross-compiles where a custom
+ # toolchain for the target_cpu has been set as the default toolchain, but
+ # use_custom_libcxx should still be true when building for the host. The
+ # expected usage is to set use_custom_libcxx=false and
+ # use_custom_libcxx_for_host=true in the passed in buildargs.
+ use_custom_libcxx_for_host = false
+}
+
+use_custom_libcxx =
+ use_custom_libcxx || (use_custom_libcxx_for_host && current_cpu == host_cpu)
+use_custom_libcxx = use_custom_libcxx && !is_nacl
+
+declare_args() {
+ # WARNING: Setting this to a non-default value is highly discouraged.
+ # If true, libc++ will be built as a shared library; otherwise libc++ will be
+ # linked statically. Setting this to something other than the default is
+ # unsupported and can be broken by libc++ rolls. Note that if this is set to
+ # true, you must also set libcxx_abi_unstable=false, which is bad for
+ # performance and memory use.
+ libcxx_is_shared = use_custom_libcxx && is_component_build
+}
+
+# libc++abi needs to be exported from executables to be picked up by shared
+# libraries on certain instrumented builds.
+export_libcxxabi_from_executables =
+ use_custom_libcxx && !is_win && !is_component_build &&
+ (is_asan || is_ubsan_vptr)
+
+# On Android, many shared libraries get loaded from the context of a JRE. In
+# this case, there's no "main executable" to export libc++abi from. We could
+# export libc++abi from each "toplevel" shared library instead, but that would
+# require adding an explicit dependency for each one, and might introduce
+# subtle, hard-to-fix problems down the line if the dependency is missing.
+#
+# export_libcxxabi_from_executables was added to avoid having an RPATH set in
+# static sanitizer builds just for executables to find libc++. But on Android,
+# the Bionic dynamic loader doesn't even look at RPATH; instead, LD_LIBRARY_PATH
+# is set for tests. Because of this, we make libc++ a shared library on android
+# since it should get loaded properly.
+if (is_android && export_libcxxabi_from_executables) {
+ export_libcxxabi_from_executables = false
+ libcxx_is_shared = true
+}
+
+libcxx_prefix = "//buildtools/third_party/libc++/trunk"
+libcxxabi_prefix = "//buildtools/third_party/libc++abi/trunk"
diff --git a/deps/v8/build/config/chrome_build.gni b/deps/v8/build/config/chrome_build.gni
new file mode 100644
index 0000000000..4bb4a043cb
--- /dev/null
+++ b/deps/v8/build/config/chrome_build.gni
@@ -0,0 +1,26 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+ # Select the desired branding flavor. False means normal Chromium branding,
+ # true means official Google Chrome branding (requires extra Google-internal
+ # resources).
+ is_chrome_branded = false
+
+ # Break chrome.dll into multple pieces based on process type. Only available
+ # on Windows.
+ is_multi_dll_chrome = is_win && !is_component_build
+
+ # Turn this on to generate order files. See
+ # https://chromium.googlesource.com/chromium/src/+/master/docs/win_order_files.md
+ generate_order_files = false
+}
+
+# Refers to the subdirectory for branding in various places including
+# chrome/app/theme.
+if (is_chrome_branded) {
+ branding_path_component = "google_chrome"
+} else {
+ branding_path_component = "chromium"
+}
diff --git a/deps/v8/build/config/chromecast/BUILD.gn b/deps/v8/build/config/chromecast/BUILD.gn
new file mode 100644
index 0000000000..c8b2989c42
--- /dev/null
+++ b/deps/v8/build/config/chromecast/BUILD.gn
@@ -0,0 +1,85 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chromecast_build.gni")
+
+assert(is_chromecast)
+
+config("static_config") {
+ if (!is_clang) {
+ ldflags = [
+ # Don't allow visible symbols from libraries that contain
+ # assembly code with symbols that aren't hidden properly.
+ # http://b/26390825
+ "-Wl,--exclude-libs=libffmpeg.a",
+ ]
+
+ if (!is_android) {
+ ldflags += [
+ # We want to statically link libstdc++/libgcc on Linux.
+ # (On Android, libstdc++ and libgcc aren't used.)
+ "-static-libstdc++",
+ "-static-libgcc",
+ ]
+ }
+ }
+}
+
+config("ldconfig") {
+ visibility = [ ":*" ]
+
+ # Chromecast executables depend on several shared libraries in
+ # /oem_cast_shlib, $ORIGIN, and $ORIGIN/lib. Add these rpaths to each binary.
+ # This is explicitly disabled in Chrome for security reasons (see comments in
+ # //build/config/gcc/BUILD.gn), but necessary on Chromecast so that OEM's may
+ # override the default libraries shipped in the Cast receiver package.
+ ldflags = [
+ "-Wl,-rpath=/oem_cast_shlib",
+ "-Wl,-rpath=\$ORIGIN/lib",
+ "-Wl,-rpath=\$ORIGIN",
+ ]
+
+ # Binaries which don't live in the same directory as Chrome component
+ # libraries may still depend on them. Explicitly add the component library
+ # directory to the rpath for the component build.
+ if (is_component_build) {
+ ldflags += [ "-Wl,-rpath=/system/chrome" ]
+ }
+}
+
+config("executable_config") {
+ configs = [ ":ldconfig" ]
+
+ if (!is_clang && current_cpu == "arm") {
+ ldflags = [
+ # Export stdlibc++ and libgcc symbols to force shlibs to refer to these
+ # symbols from the executable.
+ "-Wl,--export-dynamic",
+
+ "-lm", # stdlibc++ requires math.h
+
+ # In case we redefined stdlibc++ symbols (e.g. tc_malloc)
+ "-Wl,--allow-multiple-definition",
+
+ "-Wl,--whole-archive",
+ "-l:libstdc++.a",
+ "-l:libgcc.a",
+ "-Wl,--no-whole-archive",
+ ]
+
+ # Despite including libstdc++/libgcc archives, we still need to specify
+ # static linking for them in order to prevent the executable from having a
+ # dynamic dependency on them.
+ configs += [ ":static_config" ]
+ }
+}
+
+# Shared libaries should not have RPATH or RUNPATH set. This allows the
+# shared libs to inherit RPATH from the parent executable that is loading
+# the shared library. (See internal b/37514052 for more details.)
+config("shared_library_config") {
+ if (current_cpu == "arm") {
+ configs = [ ":static_config" ]
+ }
+}
diff --git a/deps/v8/build/config/chromecast_build.gni b/deps/v8/build/config/chromecast_build.gni
new file mode 100644
index 0000000000..f93f40b1a1
--- /dev/null
+++ b/deps/v8/build/config/chromecast_build.gni
@@ -0,0 +1,79 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# The args declared in this file should be referenced by components outside of
+# //chromecast. Args needed only in //chromecast should be declared in
+# //chromecast/chromecast.gni.
+declare_args() {
+ # Set this true for a Chromecast build. Chromecast builds are supported on
+ # Linux and Android.
+ is_chromecast = false
+
+ # If true, IS_CAST_DEBUG_BUILD() will evaluate to 1 in version.h. Otherwise,
+ # it will evaluate to 0. Overriding this when is_debug=false is useful for
+ # doing engineering builds.
+ cast_is_debug = is_debug
+
+ # chromecast_branding is used to include or exclude Google-branded components.
+ # Set it to "public" for a Chromium build.
+ chromecast_branding = "public"
+
+ # Set this true for an audio-only Chromecast build.
+ is_cast_audio_only = false
+
+ # If true, use cast CMA backend instead of default chromium media pipeline.
+ # TODO(sanfin): Remove this flag when all builds enable CMA.
+ is_cast_using_cma_backend = true
+}
+
+# Note(slan): This arg depends on the value of is_chromecast, and thus must be
+# declared in a separate block. These blocks can be combined when/if
+# crbug.com/542846 is resolved.
+declare_args() {
+ # True if Chromecast build is targeted for linux desktop. This type of build
+ # is useful for testing and development, but currently supports only a subset
+ # of Cast functionality. Though this defaults to true for x86 Linux devices,
+ # this should be overriden manually for an embedded x86 build.
+ # TODO(slan): Remove instances of this when x86 is a fully supported platform.
+ is_cast_desktop_build = is_chromecast && target_os == "linux" &&
+ (target_cpu == "x86" || target_cpu == "x64")
+}
+
+declare_args() {
+ # True to enable the cast renderer. It is enabled by default for non-android
+ # builds.
+ enable_cast_renderer =
+ is_chromecast && is_cast_using_cma_backend && !is_android
+}
+
+# Configures media options for cast. See media/media_options.gni
+cast_mojo_media_services = []
+cast_mojo_media_host = "none"
+
+if (enable_cast_renderer) {
+ cast_mojo_media_services = [
+ "cdm",
+ "renderer",
+ ]
+ cast_mojo_media_host = "browser"
+} else if (is_android) {
+ cast_mojo_media_services = [
+ "cdm",
+ "audio_decoder",
+ ]
+ if (is_cast_audio_only) {
+ cast_mojo_media_host = "browser"
+ } else {
+ cast_mojo_media_services += [ "video_decoder" ]
+ cast_mojo_media_host = "gpu"
+ }
+}
+
+# Assert that Chromecast is being built for a supported platform.
+assert(is_linux || is_android || is_fuchsia || !is_chromecast,
+ "Chromecast builds are not supported on $target_os")
+
+# Assert that is_cast_audio_only and is_cast_desktop_build are both false on a
+# non-Chromecast build.
+assert(is_chromecast || (!is_cast_audio_only && !is_cast_desktop_build))
diff --git a/deps/v8/build/config/chromeos/rules.gni b/deps/v8/build/config/chromeos/rules.gni
new file mode 100644
index 0000000000..0132d02cf4
--- /dev/null
+++ b/deps/v8/build/config/chromeos/rules.gni
@@ -0,0 +1,235 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_chromeos)
+
+declare_args() {
+ cros_board = ""
+ cros_sdk_version = ""
+}
+declare_args() {
+ # Determines if we're running tests on VMs or on devices.
+ # TODO(crbug.com/866062): Is there a better way to do this?
+ cros_is_vm = cros_board == "amd64-generic"
+}
+
+# Ensure that if one is set, the other is as well.
+assert(cros_board == "" == (cros_sdk_version == ""))
+
+# The build is using the Simple Chrome "cros chrome-sdk" to target real hardware
+# or a VM, not linux-chromeos.
+# NOTE: Most test targets in Chrome expect to run under linux-chromeos, so some
+# have compile-time asserts that intentionally fail when this build flag is set.
+# Build and run the tests for linux-chromeos instead.
+# https://chromium.googlesource.com/chromium/src/+/master/docs/chromeos_build_instructions.md
+# https://chromium.googlesource.com/chromiumos/docs/+/master/simple_chrome_workflow.md
+is_cros_chrome_sdk = cros_board != ""
+
+# Creates a script at $generated_script that can be used to launch a cros VM
+# and optionally run a test within it.
+# Args:
+# test_exe: Name of test binary located in the out dir. This will get copied
+# to the VM and executed there.
+# tast_attr_expr: Tast expression to pass to local_test_runner on the VM.
+# tast_tests: List of Tast tests to run on the VM. Note that when this is
+# specified, the target name used to invoke this template will be
+# designated as the "name" of this test and will primarly used for test
+# results tracking and displaying (eg: flakiness dashboard).
+# generated_script: Path to place the generated script.
+# deploy_chrome: If true, deploys a locally built chrome located in the root
+# build dir to the VM after launching it.
+# runtime_deps_file: Path to file listing runtime deps for the test. If set,
+# all files listed will be copied to the VM before testing.
+template("generate_runner_script") {
+ forward_variables_from(invoker,
+ [
+ "deploy_chrome",
+ "generated_script",
+ "runtime_deps_file",
+ "tast_attr_expr",
+ "tast_tests",
+ "testonly",
+ "test_exe",
+ ])
+ if (!defined(deploy_chrome)) {
+ deploy_chrome = false
+ }
+
+ # These are the only 2 conditions when ${_cache_path_prefix} is used, so
+ # initialize it in a conditional.
+ if (deploy_chrome || cros_is_vm) {
+ _cache_path_prefix = "//build/cros_cache/chrome-sdk/tarballs/${cros_board}+${cros_sdk_version}"
+ }
+
+ assert(defined(generated_script),
+ "Must specify where to place generated test launcher script via " +
+ "'generated_script'")
+ is_tast = defined(tast_attr_expr) || defined(tast_tests)
+ assert(!(is_tast && defined(test_exe)),
+ "Tast tests are invoked from binaries shipped with the VM image. " +
+ "There should be no locally built binary needed.")
+
+ action(target_name) {
+ if (defined(runtime_deps_file)) {
+ write_runtime_deps = runtime_deps_file
+ }
+ script = "//build/chromeos/create_test_runner_script.py"
+
+ outputs = [
+ generated_script,
+ ]
+
+ deps = [
+ "//testing/buildbot/filters:chromeos_filters",
+ ]
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+
+ data = [
+ # We use android test-runner's results libs to construct gtest output
+ # json.
+ "//build/android/pylib/__init__.py",
+ "//build/android/pylib/base/",
+ "//build/android/pylib/results/",
+ generated_script,
+ "//build/chromeos/",
+ "//build/cros_cache/chrome-sdk/misc/",
+
+ # We use luci-py's subprocess42 to launch test processes.
+ "//tools/swarming_client/utils/",
+
+ # The LKGM file controls what version of the VM image to download. Add it
+ # as data here so that changes to it will trigger analyze.
+ "//chromeos/CHROMEOS_LKGM",
+ "//third_party/chromite/",
+ ]
+
+ # Add the VM/QEMU-launching bits if needed.
+ if (cros_is_vm) {
+ assert(defined(_cache_path_prefix))
+ _vm_image_path = "${_cache_path_prefix}+chromiumos_qemu_image.tar.xz/"
+ _qemu_dir = "${_cache_path_prefix}+app-emulation/"
+ _firmware_dir = "${_cache_path_prefix}+sys-firmware/"
+ data += [
+ _firmware_dir,
+ _vm_image_path,
+ _qemu_dir,
+ ]
+ }
+ if (is_tast) {
+ data += [
+ "${_cache_path_prefix}+chromeos-base/tast-cmd/",
+ "${_cache_path_prefix}+chromeos-base/tast-remote-tests-cros/",
+ ]
+ }
+ if (defined(invoker.data)) {
+ deps += invoker.data
+ }
+
+ if (defined(invoker.data_deps)) {
+ data_deps = invoker.data_deps
+ }
+
+ # Required arguments used at build time by the runner script generator.
+ args = [
+ "--script-output-path",
+ rebase_path(generated_script, root_build_dir),
+ "--cros-cache",
+ rebase_path("//build/cros_cache/", root_build_dir),
+ "--board",
+ cros_board,
+ "--output-directory",
+ rebase_path(root_out_dir, root_build_dir),
+ ]
+
+ if (cros_is_vm) {
+ args += [ "--use-vm" ]
+ }
+
+ if (deploy_chrome) {
+ args += [ "--deploy-chrome" ]
+
+ # To deploy chrome to the VM, it needs to be stripped down to fit into
+ # the VM. This is done by using binutils in the toolchain. So add the
+ # toolchain to the data.
+ assert(defined(_cache_path_prefix))
+ data += [
+ "${_cache_path_prefix}+environment_chromeos-base_chromeos-chrome.tar.xz",
+ "${_cache_path_prefix}+target_toolchain/",
+ ]
+ }
+
+ # When --test-exe is specified, test_runner.py will push the exe to the VM
+ # and execute it. Otherwise it wraps a host-side command and just takes care
+ # launching & tearing-down the VM.
+ if (defined(test_exe)) {
+ args += [
+ "--test-exe",
+ test_exe,
+ ]
+ if (defined(runtime_deps_file)) {
+ args += [
+ "--runtime-deps-path",
+ rebase_path(runtime_deps_file, root_build_dir),
+ ]
+ }
+ } else if (is_tast) {
+ # When --tast-tests is specified, test_runner.py will call
+ # local_test_runner on the VM to run the set of tests.
+ args += [
+ "--suite-name",
+ target_name,
+ ]
+ if (defined(tast_attr_expr)) {
+ args += [
+ "--tast-attr-expr",
+ tast_attr_expr,
+ ]
+ } else {
+ foreach(test, tast_tests) {
+ args += [
+ "--tast-tests",
+ test,
+ ]
+ }
+ }
+ }
+ }
+}
+
+template("tast_test") {
+ forward_variables_from(invoker, "*")
+
+ # Default the expression to match any chrome-related test.
+ if (!defined(tast_attr_expr) && !defined(tast_tests)) {
+ tast_attr_expr = "!disabled && !\"group:*\" && !informational" +
+ " && (\"dep:chrome\" || \"dep:chrome_login\")"
+ } else {
+ assert(defined(tast_attr_expr) != defined(tast_tests),
+ "Specify one of tast_tests or tast_attr_expr.")
+ }
+
+ # Append any disabled tests to the expression.
+ if (defined(tast_disabled_tests)) {
+ assert(defined(tast_attr_expr),
+ "tast_attr_expr must be used when specifying tast_disabled_tests.")
+ foreach(test, tast_disabled_tests) {
+ tast_attr_expr += " && !\"name:${test}\""
+ }
+ }
+ if (defined(tast_attr_expr)) {
+ tast_attr_expr = "( " + tast_attr_expr + " )"
+ }
+ generate_runner_script(target_name) {
+ testonly = true
+ generated_script = "$root_build_dir/bin/run_${target_name}"
+ runtime_deps_file = "$root_out_dir/${target_name}.runtime_deps"
+ deploy_chrome = true
+ data_deps = [
+ "//:chromiumos_preflight", # Builds the browser.
+ "//chromeos:cros_chrome_deploy", # Adds additional browser run-time deps.
+ ]
+ }
+}
diff --git a/deps/v8/build/config/clang/BUILD.gn b/deps/v8/build/config/clang/BUILD.gn
new file mode 100644
index 0000000000..960726e1e5
--- /dev/null
+++ b/deps/v8/build/config/clang/BUILD.gn
@@ -0,0 +1,42 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("clang.gni")
+
+config("find_bad_constructs") {
+ if (clang_use_chrome_plugins) {
+ cflags = []
+
+ # The plugin is built directly into clang, so there's no need to load it
+ # dynamically.
+ cflags += [
+ "-Xclang",
+ "-add-plugin",
+ "-Xclang",
+ "find-bad-constructs",
+ ]
+
+ if (is_linux || is_android || is_fuchsia) {
+ cflags += [
+ "-Xclang",
+ "-plugin-arg-find-bad-constructs",
+ "-Xclang",
+ "check-ipc",
+ ]
+ }
+ }
+}
+
+# Enables some extra Clang-specific warnings. Some third-party code won't
+# compile with these so may want to remove this config.
+config("extra_warnings") {
+ cflags = [
+ "-Wheader-hygiene",
+
+ # Warns when a const char[] is converted to bool.
+ "-Wstring-conversion",
+
+ "-Wtautological-overlap-compare",
+ ]
+}
diff --git a/deps/v8/build/config/clang/clang.gni b/deps/v8/build/config/clang/clang.gni
new file mode 100644
index 0000000000..2c2d76fd4b
--- /dev/null
+++ b/deps/v8/build/config/clang/clang.gni
@@ -0,0 +1,15 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/toolchain.gni")
+
+default_clang_base_path = "//third_party/llvm-build/Release+Asserts"
+
+declare_args() {
+ # Indicates if the build should use the Chrome-specific plugins for enforcing
+ # coding guidelines, etc. Only used when compiling with Clang.
+ clang_use_chrome_plugins = is_clang && !is_nacl && !use_xcode_clang
+
+ clang_base_path = default_clang_base_path
+}
diff --git a/deps/v8/build/config/compiler/BUILD.gn b/deps/v8/build/config/compiler/BUILD.gn
new file mode 100644
index 0000000000..4b24c762b6
--- /dev/null
+++ b/deps/v8/build/config/compiler/BUILD.gn
@@ -0,0 +1,2423 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/c++/c++.gni")
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/coverage/coverage.gni")
+import("//build/config/dcheck_always_on.gni")
+import("//build/config/host_byteorder.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/ui.gni")
+import("//build/toolchain/cc_wrapper.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+import("//build_overrides/build.gni")
+
+if (current_cpu == "arm" || current_cpu == "arm64") {
+ import("//build/config/arm.gni")
+}
+if (current_cpu == "mipsel" || current_cpu == "mips64el" ||
+ current_cpu == "mips" || current_cpu == "mips64") {
+ import("//build/config/mips.gni")
+}
+if (is_mac) {
+ import("//build/config/mac/symbols.gni")
+}
+if (is_ios) {
+ import("//build/config/ios/ios_sdk.gni")
+}
+if (is_nacl) {
+ # To keep NaCl variables out of builds that don't include NaCl, all
+ # variables defined in nacl/config.gni referenced here should be protected by
+ # is_nacl conditions.
+ import("//build/config/nacl/config.gni")
+}
+
+declare_args() {
+ # Default to warnings as errors for default workflow, where we catch
+ # warnings with known toolchains. Allow overriding this e.g. for Chromium
+ # builds on Linux that could use a different version of the compiler.
+ # With GCC, warnings in no-Chromium code are always not treated as errors.
+ treat_warnings_as_errors = true
+
+ # Normally, Android builds are lightly optimized, even for debug builds, to
+ # keep binary size down. Setting this flag to true disables such optimization
+ android_full_debug = false
+
+ # Whether to use the binary binutils checked into third_party/binutils.
+ # These are not multi-arch so cannot be used except on x86 and x86-64 (the
+ # only two architectures that are currently checked in). Turn this off when
+ # you are using a custom toolchain and need to control -B in cflags.
+ linux_use_bundled_binutils =
+ linux_use_bundled_binutils_override && is_linux &&
+ (current_cpu == "x64" || current_cpu == "x86")
+ binutils_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin",
+ root_build_dir)
+
+ # Compile in such a way as to make it possible for the profiler to unwind full
+ # stack frames. Setting this flag has a large effect on the performance of the
+ # generated code than just setting profiling, but gives the profiler more
+ # information to analyze.
+ # Requires profiling to be set to true.
+ enable_full_stack_frames_for_profiling = false
+
+ # When we are going to use gold we need to find it.
+ # This is initialized below, after use_gold might have been overridden.
+ gold_path = false
+
+ if (is_win) {
+ # Whether the VS xtree header has been patched to disable warning 4702. If
+ # it has, then we don't need to disable 4702 (unreachable code warning).
+ # The patch is preapplied to the internal toolchain and hence all bots.
+ msvs_xtree_patched = false
+ }
+
+ # Enable fatal linker warnings. Building Chromium with certain versions
+ # of binutils can cause linker warning.
+ # See: https://bugs.chromium.org/p/chromium/issues/detail?id=457359
+ fatal_linker_warnings = true
+
+ # Build with C++ RTTI enabled. Chromium builds without RTTI by default,
+ # but some sanitizers are known to require it, like CFI diagnostics
+ # and UBsan variants.
+ use_rtti = use_cfi_diag || is_ubsan_vptr || is_ubsan_security
+
+ # AFDO (Automatic Feedback Directed Optimizer) is a form of profile-guided
+ # optimization that GCC supports. It used by ChromeOS in their official
+ # builds. To use it, set auto_profile_path to the path to a file containing
+ # the needed gcov profiling data.
+ auto_profile_path = ""
+
+ # Allow projects that wish to stay on C++11 to override Chromium's default.
+ use_cxx11 = false
+
+ # Path to an AFDO profile to use while building with clang, if any. Empty
+ # implies none.
+ clang_sample_profile_path = ""
+
+ # Some configurations have default sample profiles. If this is true and
+ # clang_sample_profile_path is empty, we'll fall back to the default.
+ #
+ # We currently only have default profiles for Chromium in-tree, so we disable
+ # this by default for all downstream projects, since these profiles are likely
+ # nonsensical for said projects.
+ clang_use_default_sample_profile = build_with_chromium && is_official_build &&
+ (is_android || is_desktop_linux)
+
+ # Turn this on to have the compiler output extra timing information.
+ compiler_timing = false
+
+ # Set to true to pass --no-rosegment to lld. This is a workaround
+ # for a KI issue in Valgrind,
+ # https://bugs.kde.org/show_bug.cgi?id=384727
+ ro_segment_workaround_for_valgrind = false
+
+ # Turn this on to use ghash feature of lld for faster debug link on Windows.
+ # http://blog.llvm.org/2018/01/improving-link-time-on-windows-with.html
+ use_ghash = true
+
+ # Whether to enable ThinLTO optimizations. Turning ThinLTO optimizations on
+ # can substantially increase link time and binary size, but they generally
+ # also make binaries a fair bit faster.
+ #
+ # TODO(gbiv): We disable optimizations by default on most platforms because
+ # the space overhead is too great. We should use some mixture of profiles and
+ # optimization settings to better tune the size increase.
+ thin_lto_enable_optimizations =
+ (is_chromeos || is_android || is_win) && is_official_build
+
+ # By default only the binaries in official builds get build IDs.
+ force_local_build_id = false
+}
+
+declare_args() {
+ # C++11 may not be an option if Android test infrastructure is used.
+ use_cxx11_on_android = use_cxx11
+}
+
+declare_args() {
+ # Set to true to use icf, Identical Code Folding.
+ #
+ # icf=all is broken in older golds, see
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=17704
+ # See also https://crbug.com/663886
+ # `linux_use_bundled_binutils` is to avoid breaking Linux distros which may
+ # still have a buggy gold.
+ # chromeos binutils has been patched with the fix, so always use icf there.
+ # The bug only affects x86 and x64, so we can still use ICF when targeting
+ # other architectures.
+ #
+ # lld doesn't have the bug.
+ use_icf = (is_posix || is_fuchsia) && !is_debug && !using_sanitizer &&
+ !use_clang_coverage && !(is_android && use_order_profiling) &&
+ (use_lld ||
+ (use_gold &&
+ ((!is_android && linux_use_bundled_binutils) || is_chromeos ||
+ !(current_cpu == "x86" || current_cpu == "x64"))))
+}
+
+# Apply the default logic for these values if they were not set explicitly.
+if (gold_path == false) {
+ if (use_gold) {
+ gold_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin",
+ root_build_dir)
+ } else {
+ gold_path = ""
+ }
+}
+
+if (use_debug_fission == "default") {
+ use_debug_fission =
+ is_debug && !is_android && !is_fuchsia && !is_ios && !is_mac && !is_win &&
+ (use_gold || use_lld) && cc_wrapper == ""
+}
+
+# default_include_dirs ---------------------------------------------------------
+#
+# This is a separate config so that third_party code (which would not use the
+# source root and might have conflicting versions of some headers) can remove
+# this and specify their own include paths.
+config("default_include_dirs") {
+ include_dirs = [
+ "//",
+ root_gen_dir,
+ ]
+}
+
+# compiler ---------------------------------------------------------------------
+#
+# Base compiler configuration.
+#
+# See also "runtime_library" below for related stuff and a discussion about
+# where stuff should go. Put warning related stuff in the "warnings" config.
+
+config("compiler") {
+ asmflags = []
+ cflags = []
+ cflags_c = []
+ cflags_cc = []
+ cflags_objc = []
+ cflags_objcc = []
+ ldflags = []
+ defines = []
+ configs = []
+
+ # System-specific flags. If your compiler flags apply to one of the
+ # categories here, add it to the associated file to keep this shared config
+ # smaller.
+ if (is_win) {
+ configs += [ "//build/config/win:compiler" ]
+ } else if (is_android) {
+ configs += [ "//build/config/android:compiler" ]
+ } else if (is_linux) {
+ configs += [ "//build/config/linux:compiler" ]
+ } else if (is_nacl) {
+ configs += [ "//build/config/nacl:compiler" ]
+ } else if (is_mac) {
+ configs += [ "//build/config/mac:compiler" ]
+ } else if (is_ios) {
+ configs += [ "//build/config/ios:compiler" ]
+ } else if (is_fuchsia) {
+ configs += [ "//build/config/fuchsia:compiler" ]
+ } else if (current_os == "aix") {
+ configs += [ "//build/config/aix:compiler" ]
+ }
+
+ configs += [
+ # See the definitions below.
+ ":clang_revision",
+ ":compiler_cpu_abi",
+ ":compiler_codegen",
+ ":compiler_deterministic",
+ ]
+
+ # In general, Windows is totally different, but all the other builds share
+ # some common GCC configuration.
+ if (!is_win) {
+ # Common POSIX compiler flags setup.
+ # --------------------------------
+ cflags += [ "-fno-strict-aliasing" ] # See http://crbug.com/32204
+
+ # Stack protection.
+ if (is_mac) {
+ # The strong variant of the stack protector significantly increases
+ # binary size, so only enable it in debug mode.
+ if (is_debug) {
+ cflags += [ "-fstack-protector-strong" ]
+ } else {
+ cflags += [ "-fstack-protector" ]
+ }
+ } else if ((is_posix && !is_chromeos && !is_nacl) || is_fuchsia) {
+ # TODO(phajdan.jr): Use -fstack-protector-strong when our gcc supports it.
+ # See also https://crbug.com/533294
+ cflags += [ "--param=ssp-buffer-size=4" ]
+
+ # The x86 toolchain currently has problems with stack-protector.
+ if (is_android && current_cpu == "x86") {
+ cflags += [ "-fno-stack-protector" ]
+ } else if (current_os != "aix") {
+ # Not available on aix.
+ cflags += [ "-fstack-protector" ]
+ }
+ }
+
+ # Linker warnings.
+ if (fatal_linker_warnings && !(is_chromeos && current_cpu == "arm") &&
+ !(is_android && use_order_profiling) && !is_mac && !is_ios &&
+ current_os != "aix") {
+ # TODO(jochen): Enable this on chromeos on arm. http://crbug.com/356580
+ # TODO(lizeb,pasko): Fix link errors when linking with order_profiling=1
+ # crbug.com/485542
+ ldflags += [ "-Wl,--fatal-warnings" ]
+ }
+ } else {
+ }
+
+ if (is_clang && is_debug) {
+ # Allow comparing the address of references and 'this' against 0
+ # in debug builds. Technically, these can never be null in
+ # well-defined C/C++ and Clang can optimize such checks away in
+ # release builds, but they may be used in asserts in debug builds.
+ cflags_cc += [
+ "-Wno-undefined-bool-conversion",
+ "-Wno-tautological-undefined-compare",
+ ]
+ }
+
+ # Non-Mac Posix and Fuchsia compiler flags setup.
+ # -----------------------------------
+ if ((is_posix && !(is_mac || is_ios)) || is_fuchsia) {
+ if (enable_profiling) {
+ if (!is_debug) {
+ cflags += [ "-g" ]
+
+ if (enable_full_stack_frames_for_profiling) {
+ cflags += [
+ "-fno-inline",
+ "-fno-optimize-sibling-calls",
+ ]
+ }
+ }
+ }
+
+ if (is_official_build || force_local_build_id) {
+ # Explicitly pass --build-id to ld. Compilers used to always pass this
+ # implicitly but don't any more (in particular clang when built without
+ # ENABLE_LINKER_BUILD_ID=ON). The crash infrastructure does need a build
+ # id, so explicitly enable it in official builds. It's not needed in
+ # unofficial builds and computing it does slow down the link, so go with
+ # faster links in unofficial builds.
+ ldflags += [ "-Wl,--build-id=sha1" ]
+ }
+
+ if (!is_android) {
+ defines += [
+ # _FILE_OFFSET_BITS=64 should not be set on Android in order to maintain
+ # the behavior of the Android NDK from earlier versions.
+ # See https://android-developers.googleblog.com/2017/09/introducing-android-native-development.html
+ "_FILE_OFFSET_BITS=64",
+ "_LARGEFILE_SOURCE",
+ "_LARGEFILE64_SOURCE",
+ ]
+ }
+
+ if (!is_nacl) {
+ if (exclude_unwind_tables) {
+ cflags += [
+ "-fno-unwind-tables",
+ "-fno-asynchronous-unwind-tables",
+ ]
+ defines += [ "NO_UNWIND_TABLES" ]
+ } else {
+ cflags += [ "-funwind-tables" ]
+ }
+ }
+ }
+
+ # Linux/Android/Fuchsia common flags setup.
+ # ---------------------------------
+ if (is_linux || is_android || is_fuchsia) {
+ asmflags += [ "-fPIC" ]
+ cflags += [ "-fPIC" ]
+ ldflags += [ "-fPIC" ]
+
+ if (!is_clang) {
+ # Use pipes for communicating between sub-processes. Faster.
+ # (This flag doesn't do anything with Clang.)
+ cflags += [ "-pipe" ]
+ }
+
+ ldflags += [
+ "-Wl,-z,noexecstack",
+ "-Wl,-z,relro",
+ ]
+
+ if (!is_component_build) {
+ ldflags += [ "-Wl,-z,now" ]
+ }
+
+ # Compiler instrumentation can introduce dependencies in DSOs to symbols in
+ # the executable they are loaded into, so they are unresolved at link-time.
+ if (!using_sanitizer) {
+ ldflags += [
+ "-Wl,-z,defs",
+ "-Wl,--as-needed",
+ ]
+ }
+ }
+
+ # Linux-specific compiler flags setup.
+ # ------------------------------------
+ if (is_android && is_clang) {
+ _rebased_android_toolchain_root =
+ rebase_path(android_toolchain_root, root_build_dir)
+
+ # Let clang find the linker in the NDK.
+ ldflags += [ "--gcc-toolchain=$_rebased_android_toolchain_root" ]
+ }
+
+ if (((is_posix || is_fuchsia) && use_lld) ||
+ (target_os == "chromeos" && is_android)) {
+ # NOTE: Some Chrome OS builds globally disable LLD, but they also build some
+ # targets against Android toolchains which should use LLD. Therefore we
+ # explicitly select LLD in these cases.
+ #
+ # TODO(https://crbug.com/837095): This should be cleaned up if/when LLD can
+ # work properly for Chrome OS builds.
+ ldflags += [ "-fuse-ld=lld" ]
+ if (current_cpu == "arm64") {
+ # Reduce the page size from 65536 in order to reduce binary size slightly
+ # by shrinking the alignment gap between segments. This also causes all
+ # segments to be mapped adjacently, which breakpad relies on.
+ ldflags += [ "-Wl,-z,max-page-size=4096" ]
+ }
+ } else if (use_gold) {
+ ldflags += [ "-fuse-ld=gold" ]
+ if (!is_android) {
+ # On Android, this isn't needed. gcc in the NDK knows to look next to
+ # it with -fuse-ld=gold, and clang gets a --gcc-toolchain flag passed
+ # above.
+ ldflags += [ "-B$gold_path" ]
+
+ if (linux_use_bundled_binutils) {
+ ldflags += [
+ # Experimentation found that using four linking threads
+ # saved ~20% of link time.
+ # https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36
+ # Only apply this to the target linker, since the host
+ # linker might not be gold, but isn't used much anyway.
+ "-Wl,--threads",
+ "-Wl,--thread-count=4",
+ ]
+ }
+ }
+
+ # TODO(thestig): Make this flag work with GN.
+ #if (!is_official_build && !is_chromeos && !(is_asan || is_lsan || is_tsan || is_msan)) {
+ # ldflags += [
+ # "-Wl,--detect-odr-violations",
+ # ]
+ #}
+ } else if (linux_use_bundled_binutils) {
+ # Gold is the default linker for the bundled binutils so we explicitly
+ # enable the bfd linker when use_gold is not set.
+ ldflags += [ "-fuse-ld=bfd" ]
+ }
+
+ if (use_icf) {
+ ldflags += [ "-Wl,--icf=all" ]
+ }
+
+ if (linux_use_bundled_binutils) {
+ cflags += [ "-B$binutils_path" ]
+ }
+
+ if (is_linux) {
+ cflags += [ "-pthread" ]
+ # Do not use the -pthread ldflag here since it becomes a no-op
+ # when using -nodefaultlibs, which would cause an unused argument
+ # error. "-lpthread" is added in //build/config:default_libs.
+ }
+
+ # Clang-specific compiler flags setup.
+ # ------------------------------------
+ if (is_clang) {
+ cflags += [ "-fcolor-diagnostics" ]
+
+ # Enable -fmerge-all-constants. This used to be the default in clang
+ # for over a decade. It makes clang non-conforming, but is fairly safe
+ # in practice and saves some binary size. We might want to consider
+ # disabling this (https://bugs.llvm.org/show_bug.cgi?id=18538#c13),
+ # but for now it looks like our build might rely on it
+ # (https://crbug.com/829795).
+ cflags += [ "-fmerge-all-constants" ]
+ }
+
+ if (use_lld) {
+ # TODO(thakis): Make the driver pass --color-diagnostics to the linker
+ # if -fcolor-diagnostics is passed to it, and pass -fcolor-diagnostics
+ # in ldflags instead.
+ if (is_win) {
+ # On Windows, we call the linker directly, instead of calling it through
+ # the driver.
+ ldflags += [ "--color-diagnostics" ]
+ } else {
+ ldflags += [ "-Wl,--color-diagnostics" ]
+ }
+ }
+
+ if (is_clang && !is_nacl && !use_xcode_clang) {
+ cflags += [ "-fcrash-diagnostics-dir=" +
+ rebase_path("//tools/clang/crashreports", root_build_dir) ]
+
+ cflags += [
+ # TODO(hans): Remove this once Clang generates better optimized debug info
+ # by default. https://crbug.com/765793
+ "-Xclang",
+ "-mllvm",
+ "-Xclang",
+ "-instcombine-lower-dbg-declare=0",
+ ]
+ }
+
+ # C11/C++11 compiler flags setup.
+ # ---------------------------
+ if (is_linux || is_android || (is_nacl && is_clang) || current_os == "aix") {
+ if (target_os == "android") {
+ cxx11_override = use_cxx11_on_android
+ } else {
+ cxx11_override = use_cxx11
+ }
+
+ if (is_clang) {
+ standard_prefix = "c"
+
+ # Since we build with -std=c* and not -std=gnu*, _GNU_SOURCE will not be
+ # defined by the compiler. However, lots of code relies on the
+ # non-standard features that _GNU_SOURCE enables, so define it manually.
+ defines += [ "_GNU_SOURCE" ]
+
+ if (is_nacl) {
+ # Undefine __STRICT_ANSI__ to get non-standard features which would
+ # otherwise not be enabled by NaCl's sysroots.
+ cflags += [ "-U__STRICT_ANSI__" ]
+ }
+ } else {
+ # Gcc does not support ##__VA_ARGS__ when in standards-conforming mode,
+ # but we use this feature in several places in Chromium.
+ # TODO(thomasanderson): Replace usages of ##__VA_ARGS__ with the
+ # standard-compliant __VA_OPT__ added by C++20, and switch the gcc build
+ # to -std=c*.
+ standard_prefix = "gnu"
+ }
+
+ cflags_c += [ "-std=${standard_prefix}11" ]
+ if (cxx11_override) {
+ # Override Chromium's default for projects that wish to stay on C++11.
+ cflags_cc += [ "-std=${standard_prefix}++11" ]
+ } else {
+ cflags_cc += [ "-std=${standard_prefix}++14" ]
+ }
+ } else if (!is_win && !is_nacl) {
+ if (target_os == "android") {
+ cxx11_override = use_cxx11_on_android
+ } else {
+ cxx11_override = use_cxx11
+ }
+
+ # TODO(mcgrathr) - the NaCl GCC toolchain doesn't support either gnu11/gnu++11
+ # or c11/c++11; we technically don't need this toolchain any more, but there
+ # are still a few buildbots using it, so until those are turned off
+ # we need the !is_nacl clause and the (is_nacl && is_clang) clause, above.
+ cflags_c += [ "-std=c11" ]
+ if (cxx11_override) {
+ cflags_cc += [ "-std=c++11" ]
+ } else {
+ cflags_cc += [ "-std=c++14" ]
+ }
+ }
+
+ if (is_mac) {
+ # The system libc++ on Mac doesn't have aligned allocation in C++17.
+ defines += [ "_LIBCPP_HAS_NO_ALIGNED_ALLOCATION" ]
+ cflags_cc += [ "-stdlib=libc++" ]
+ ldflags += [ "-stdlib=libc++" ]
+ }
+
+ # Add flags for link-time optimization. These flags enable
+ # optimizations/transformations that require whole-program visibility at link
+ # time, so they need to be applied to all translation units, and we may end up
+ # with miscompiles if only part of the program is compiled with LTO flags. For
+ # that reason, we cannot allow targets to enable or disable these flags, for
+ # example by disabling the optimize configuration.
+ # TODO(pcc): Make this conditional on is_official_build rather than on gn
+ # flags for specific features.
+ if (!is_debug && use_thin_lto && is_a_target_toolchain) {
+ assert(use_lld || target_os == "chromeos",
+ "gold plugin only supported with ChromeOS")
+
+ cflags += [ "-flto=thin" ]
+
+ if (target_os != "chromeos") {
+ cflags += [ "-fsplit-lto-unit" ]
+ }
+
+ if (thin_lto_enable_optimizations) {
+ lto_opt_level = 2
+ } else {
+ lto_opt_level = 0
+ }
+
+ if (is_win) {
+ # This is a straight translation of the non-Windows flags below,
+ # except we do not use the ThinLTO cache, which leaks temporary
+ # files on Windows (https://crbug.com/871962).
+ ldflags += [
+ "/opt:lldlto=" + lto_opt_level,
+ "/opt:lldltojobs=8",
+
+ # Experimentally determined to yield a reasonable trade-off between
+ # build time, run-time performance, and binary size.
+ "-mllvm:-import-instr-limit=10",
+ ]
+ } else {
+ ldflags += [ "-flto=thin" ]
+
+ # Limit the parallelism to avoid too aggressive competition between
+ # linker jobs. This is still suboptimal to a potential dynamic
+ # resource allocation scheme, but should be good enough.
+ if (use_lld) {
+ ldflags += [ "-Wl,--thinlto-jobs=8" ]
+
+ # Disable caching on Chrome OS temporarily (crbug.com/889967)
+ if (!is_chromeos) {
+ # Limit the size of the ThinLTO cache to the lesser of 10% of
+ # available disk space, 10GB and 100000 files.
+ cache_policy =
+ "cache_size=10%:cache_size_bytes=10g:cache_size_files=100000"
+ ldflags += [
+ "-Wl,--thinlto-cache-dir=" +
+ rebase_path("$root_out_dir/thinlto-cache", root_build_dir),
+ "-Wl,--thinlto-cache-policy,$cache_policy",
+ ]
+ }
+ } else {
+ ldflags += [ "-Wl,-plugin-opt,jobs=8" ]
+ }
+
+ if (use_lld) {
+ ldflags += [ "-Wl,--lto-O" + lto_opt_level ]
+ if (thin_lto_enable_optimizations) {
+ if (is_android) {
+ # TODO(gbiv): We ideally shouldn't need to specify this; ThinLTO
+ # should be able to better manage binary size increases on its own.
+ ldflags += [
+ "-Wl,-mllvm",
+ "-Wl,-import-instr-limit=5",
+ ]
+ }
+ }
+ } else {
+ not_needed([ "lto_opt_level" ])
+ }
+ }
+
+ # TODO(pcc): Re-enable this flag on Android. This will require libc++ to be
+ # built with ThinLTO (see https://crbug.com/767901) as well as the GVR shim.
+ if (!is_android) {
+ cflags += [ "-fwhole-program-vtables" ]
+ if (!is_win) {
+ ldflags += [ "-fwhole-program-vtables" ]
+ }
+ }
+
+ # Work-around for http://openradar.appspot.com/20356002
+ if (is_mac) {
+ ldflags += [ "-Wl,-all_load" ]
+ }
+
+ # This flag causes LTO to create an .ARM.attributes section with the correct
+ # architecture. This is necessary because LLD will refuse to link a program
+ # unless the architecture revision in .ARM.attributes is sufficiently new.
+ # TODO(pcc): The contents of .ARM.attributes should be based on the
+ # -march flag passed at compile time (see llvm.org/pr36291).
+ if (current_cpu == "arm") {
+ ldflags += [ "-march=$arm_arch" ]
+ }
+ }
+
+ if (compiler_timing) {
+ if (is_clang) {
+ if (is_win) {
+ cflags += [ "/clang:-ftime-report" ]
+ } else {
+ cflags += [ "-ftime-report" ]
+ }
+ } else if (is_win) {
+ cflags += [
+ # "Documented" here:
+ # http://aras-p.info/blog/2017/10/23/Best-unknown-MSVC-flag-d2cgsummary/
+ "/d2cgsummary",
+ ]
+ }
+ }
+
+ # Pass flag to LLD to work around issue in Valgrind related to
+ # location of debug symbols. This is also enabled for Android
+ # builds to allow debuggerd to properly symbolize stack crashes
+ # on this platform (http://crbug.com/919499).
+ if (use_lld && (ro_segment_workaround_for_valgrind || is_android)) {
+ ldflags += [ "-Wl,--no-rosegment" ]
+ }
+
+ # This flag enforces that member pointer base types are complete. It helps
+ # prevent us from running into problems in the Microsoft C++ ABI (see
+ # https://crbug.com/847724).
+ if (is_clang && !is_nacl && target_os != "chromeos" && !use_xcode_clang &&
+ (is_win || use_custom_libcxx)) {
+ cflags += [ "-fcomplete-member-pointers" ]
+ }
+
+ # Pass the same C/C++ flags to the objective C/C++ compiler.
+ cflags_objc += cflags_c
+ cflags_objcc += cflags_cc
+
+ # Assign any flags set for the C compiler to asmflags so that they are sent
+ # to the assembler. The Windows assembler takes different types of flags
+ # so only do so for posix platforms.
+ if (is_posix || is_fuchsia) {
+ asmflags += cflags
+ asmflags += cflags_c
+ }
+}
+
+# This provides the basic options to select the target CPU and ABI.
+# It is factored out of "compiler" so that special cases can use this
+# without using everything that "compiler" brings in. Options that
+# tweak code generation for a particular CPU do not belong here!
+# See "compiler_codegen", below.
+config("compiler_cpu_abi") {
+ cflags = []
+ ldflags = []
+ defines = []
+
+ if ((is_posix && !(is_mac || is_ios)) || is_fuchsia) {
+ # CPU architecture. We may or may not be doing a cross compile now, so for
+ # simplicity we always explicitly set the architecture.
+ if (current_cpu == "x64") {
+ cflags += [
+ "-m64",
+ "-march=x86-64",
+ ]
+ ldflags += [ "-m64" ]
+ } else if (current_cpu == "x86") {
+ cflags += [ "-m32" ]
+ ldflags += [ "-m32" ]
+ if (!is_nacl) {
+ cflags += [
+ "-msse2",
+ "-mfpmath=sse",
+ "-mmmx",
+ ]
+ }
+ } else if (current_cpu == "arm") {
+ if (is_clang && !is_android && !is_nacl) {
+ cflags += [ "--target=arm-linux-gnueabihf" ]
+ ldflags += [ "--target=arm-linux-gnueabihf" ]
+ }
+ if (!is_nacl) {
+ cflags += [
+ "-march=$arm_arch",
+ "-mfloat-abi=$arm_float_abi",
+ ]
+ }
+ if (arm_tune != "") {
+ cflags += [ "-mtune=$arm_tune" ]
+ }
+ } else if (current_cpu == "arm64") {
+ if (is_clang && !is_android && !is_nacl && !is_fuchsia) {
+ cflags += [ "--target=aarch64-linux-gnu" ]
+ ldflags += [ "--target=aarch64-linux-gnu" ]
+ }
+ } else if (current_cpu == "mipsel" && !is_nacl) {
+ ldflags += [ "-Wl,--hash-style=sysv" ]
+ if (custom_toolchain == "") {
+ if (is_clang) {
+ if (is_android) {
+ cflags += [ "--target=mipsel-linux-android" ]
+ ldflags += [ "--target=mipsel-linux-android" ]
+ } else {
+ cflags += [ "--target=mipsel-linux-gnu" ]
+ ldflags += [ "--target=mipsel-linux-gnu" ]
+ }
+ } else {
+ cflags += [ "-EL" ]
+ ldflags += [ "-EL" ]
+ }
+ }
+
+ if (mips_arch_variant == "r6") {
+ cflags += [ "-mno-odd-spreg" ]
+ ldflags += [ "-mips32r6" ]
+ if (is_clang) {
+ cflags += [
+ "-march=mipsel",
+ "-mcpu=mips32r6",
+ ]
+ } else {
+ cflags += [
+ "-mips32r6",
+ "-Wa,-mips32r6",
+ ]
+ if (is_android) {
+ ldflags += [ "-Wl,-melf32ltsmip" ]
+ }
+ }
+ if (mips_use_msa == true) {
+ cflags += [
+ "-mmsa",
+ "-mfp64",
+ ]
+ }
+ } else if (mips_arch_variant == "r2") {
+ ldflags += [ "-mips32r2" ]
+ if (is_clang) {
+ cflags += [
+ "-march=mipsel",
+ "-mcpu=mips32r2",
+ ]
+ } else {
+ cflags += [
+ "-mips32r2",
+ "-Wa,-mips32r2",
+ ]
+ if (mips_float_abi == "hard" && mips_fpu_mode != "") {
+ cflags += [ "-m$mips_fpu_mode" ]
+ }
+ }
+ } else if (mips_arch_variant == "r1") {
+ ldflags += [ "-mips32" ]
+ if (is_clang) {
+ cflags += [
+ "-march=mipsel",
+ "-mcpu=mips32",
+ ]
+ } else {
+ cflags += [
+ "-mips32",
+ "-Wa,-mips32",
+ ]
+ }
+ } else if (mips_arch_variant == "loongson3") {
+ defines += [ "_MIPS_ARCH_LOONGSON" ]
+ cflags += [
+ "-march=loongson3a",
+ "-mno-branch-likely",
+ "-Wa,-march=loongson3a",
+ ]
+ }
+
+ if (mips_dsp_rev == 1) {
+ cflags += [ "-mdsp" ]
+ } else if (mips_dsp_rev == 2) {
+ cflags += [ "-mdspr2" ]
+ }
+
+ cflags += [ "-m${mips_float_abi}-float" ]
+ } else if (current_cpu == "mips" && !is_nacl) {
+ ldflags += [ "-Wl,--hash-style=sysv" ]
+ if (custom_toolchain == "") {
+ if (is_clang) {
+ cflags += [ "--target=mips-linux-gnu" ]
+ ldflags += [ "--target=mips-linux-gnu" ]
+ } else {
+ cflags += [ "-EB" ]
+ ldflags += [ "-EB" ]
+ }
+ }
+
+ if (mips_arch_variant == "r6") {
+ cflags += [
+ "-mips32r6",
+ "-Wa,-mips32r6",
+ ]
+ if (mips_use_msa == true) {
+ cflags += [
+ "-mmsa",
+ "-mfp64",
+ ]
+ }
+ } else if (mips_arch_variant == "r2") {
+ cflags += [
+ "-mips32r2",
+ "-Wa,-mips32r2",
+ ]
+ if (mips_float_abi == "hard" && mips_fpu_mode != "") {
+ cflags += [ "-m$mips_fpu_mode" ]
+ }
+ } else if (mips_arch_variant == "r1") {
+ cflags += [
+ "-mips32",
+ "-Wa,-mips32",
+ ]
+ }
+
+ if (mips_dsp_rev == 1) {
+ cflags += [ "-mdsp" ]
+ } else if (mips_dsp_rev == 2) {
+ cflags += [ "-mdspr2" ]
+ }
+
+ cflags += [ "-m${mips_float_abi}-float" ]
+ } else if (current_cpu == "mips64el") {
+ cflags += [ "-D__SANE_USERSPACE_TYPES__" ]
+ ldflags += [ "-Wl,--hash-style=sysv" ]
+ if (custom_toolchain == "") {
+ if (is_clang) {
+ if (is_android) {
+ cflags += [ "--target=mips64el-linux-android" ]
+ ldflags += [ "--target=mips64el-linux-android" ]
+ } else {
+ cflags += [ "--target=mips64el-linux-gnuabi64" ]
+ ldflags += [ "--target=mips64el-linux-gnuabi64" ]
+ }
+ } else {
+ cflags += [
+ "-EL",
+ "-mabi=64",
+ ]
+ ldflags += [
+ "-EL",
+ "-mabi=64",
+ ]
+ }
+ }
+
+ if (mips_arch_variant == "r6") {
+ if (is_clang) {
+ cflags += [
+ "-march=mips64el",
+ "-mcpu=mips64r6",
+ ]
+ } else {
+ cflags += [
+ "-mips64r6",
+ "-Wa,-mips64r6",
+ ]
+ ldflags += [ "-mips64r6" ]
+ }
+ if (mips_use_msa == true) {
+ cflags += [
+ "-mmsa",
+ "-mfp64",
+ ]
+ }
+ } else if (mips_arch_variant == "r2") {
+ ldflags += [ "-mips64r2" ]
+ if (is_clang) {
+ cflags += [
+ "-march=mips64el",
+ "-mcpu=mips64r2",
+ ]
+ } else {
+ cflags += [
+ "-mips64r2",
+ "-Wa,-mips64r2",
+ ]
+ }
+ } else if (mips_arch_variant == "loongson3") {
+ defines += [ "_MIPS_ARCH_LOONGSON" ]
+ cflags += [
+ "-march=loongson3a",
+ "-mno-branch-likely",
+ "-Wa,-march=loongson3a",
+ ]
+ }
+ } else if (current_cpu == "mips64") {
+ ldflags += [ "-Wl,--hash-style=sysv" ]
+ if (custom_toolchain == "") {
+ if (is_clang) {
+ cflags += [ "--target=mips64-linux-gnuabi64" ]
+ ldflags += [ "--target=mips64-linux-gnuabi64" ]
+ } else {
+ cflags += [
+ "-EB",
+ "-mabi=64",
+ ]
+ ldflags += [
+ "-EB",
+ "-mabi=64",
+ ]
+ }
+ }
+
+ if (mips_arch_variant == "r6") {
+ cflags += [
+ "-mips64r6",
+ "-Wa,-mips64r6",
+ ]
+ ldflags += [ "-mips64r6" ]
+
+ if (mips_use_msa == true) {
+ cflags += [
+ "-mmsa",
+ "-mfp64",
+ ]
+ }
+ } else if (mips_arch_variant == "r2") {
+ cflags += [
+ "-mips64r2",
+ "-Wa,-mips64r2",
+ ]
+ ldflags += [ "-mips64r2" ]
+ }
+ } else if (current_cpu == "pnacl" && is_nacl_nonsfi) {
+ if (target_cpu == "x86" || target_cpu == "x64") {
+ cflags += [
+ "-arch",
+ "x86-32-nonsfi",
+ "--pnacl-bias=x86-32-nonsfi",
+ "--target=i686-unknown-nacl",
+ ]
+ ldflags += [
+ "-arch",
+ "x86-32-nonsfi",
+ "--target=i686-unknown-nacl",
+ ]
+ } else if (target_cpu == "arm") {
+ cflags += [
+ "-arch",
+ "arm-nonsfi",
+ "-mfloat-abi=hard",
+ "--pnacl-bias=arm-nonsfi",
+ "--target=armv7-unknown-nacl-gnueabihf",
+ ]
+ ldflags += [
+ "-arch",
+ "arm-nonsfi",
+ "--target=armv7-unknown-nacl-gnueabihf",
+ ]
+ }
+ } else if (current_cpu == "ppc64") {
+ if (current_os == "aix") {
+ cflags += [ "-maix64" ]
+ ldflags += [ "-maix64" ]
+ } else {
+ cflags += [ "-m64" ]
+ ldflags += [ "-m64" ]
+ }
+ } else if (current_cpu == "s390x") {
+ cflags += [ "-m64" ]
+ ldflags += [ "-m64" ]
+ }
+ }
+
+ asmflags = cflags
+}
+
+# This provides options to tweak code generation that are necessary
+# for particular Chromium code or for working around particular
+# compiler bugs (or the combination of the two).
+config("compiler_codegen") {
+ configs = []
+ cflags = []
+ ldflags = []
+
+ if (is_nacl) {
+ configs += [ "//build/config/nacl:compiler_codegen" ]
+ } else if (is_posix && !is_mac && !is_ios) {
+ if (current_cpu == "x86") {
+ if (is_clang) {
+ cflags += [
+ # Else building libyuv gives clang's register allocator issues,
+ # see llvm.org/PR15798 / crbug.com/233709
+ "-momit-leaf-frame-pointer",
+ ]
+ }
+ }
+ }
+
+ if (current_cpu == "arm64" && is_android) {
+ # On arm64 disable outlining for Android. See crbug.com/931297 for more
+ # information.
+ cflags += [ "-mno-outline" ]
+
+ # This can be removed once https://bugs.llvm.org/show_bug.cgi?id=40348
+ # has been resolved, and -mno-outline is obeyed by the linker during
+ # ThinLTO.
+ ldflags += [ "-Wl,-mllvm,-enable-machine-outliner=never" ]
+ }
+
+ asmflags = cflags
+}
+
+# This provides options that make the build deterministic, so that the same
+# revision produces the same output, independent of the name of the build
+# directory and of the computer the build is done on.
+# The relative path from build dir to source dir makes it into the build
+# outputs, so it's recommended that you use a build dir two levels deep
+# (e.g. "out/Release") so that you get the same "../.." path as all the bots
+# in your build outputs.
+config("compiler_deterministic") {
+ cflags = []
+ ldflags = []
+
+ # Eliminate build metadata (__DATE__, __TIME__ and __TIMESTAMP__) for
+ # deterministic build. See https://crbug.com/314403
+ if (!is_official_build) {
+ if (is_win && !is_clang) {
+ cflags += [
+ "/wd4117", # Trying to define or undefine a predefined macro.
+ "/D__DATE__=",
+ "/D__TIME__=",
+ "/D__TIMESTAMP__=",
+ ]
+ } else {
+ cflags += [
+ "-Wno-builtin-macro-redefined",
+ "-D__DATE__=",
+ "-D__TIME__=",
+ "-D__TIMESTAMP__=",
+ ]
+ }
+ }
+
+ # Makes builds independent of absolute file path.
+ if (symbol_level != 0 && is_clang && strip_absolute_paths_from_debug_symbols) {
+ # If debug option is given, clang includes $cwd in debug info by default.
+ # For such build, this flag generates reproducible obj files even we use
+ # different build directory like "out/feature_a" and "out/feature_b" if
+ # we build same files with same compile flag.
+ # Other paths are already given in relative, no need to normalize them.
+ cflags += [
+ "-Xclang",
+ "-fdebug-compilation-dir",
+ "-Xclang",
+ ".",
+ ]
+
+ if (is_win && use_lld) {
+ if (symbol_level == 2 || (is_clang && using_sanitizer)) {
+ # Absolutize source file path for PDB. Pass the real build directory
+ # if the pdb contains source-level debug information.
+ ldflags += [ "/PDBSourcePath:" + rebase_path(root_build_dir) ]
+ } else {
+ # On Windows, (non-sanitizier) symbol_level 1 builds don't contain
+ # debug information in obj files; the linker just creates enough
+ # debug info at link time to produce symbolized stacks (without line
+ # numbers). In that case, there's no downside in using a fake fixed
+ # base directory for paths in the pdb. This makes the pdb output
+ # fully deterministic and independent of the build directory.
+ assert(symbol_level == 1 && !(is_clang && using_sanitizer))
+ ldflags += [ "/PDBSourcePath:o:\fake\prefix" ]
+ }
+ }
+ }
+
+ # Tells the compiler not to use absolute paths when passing the default
+ # paths to the tools it invokes. We don't want this because we don't
+ # really need it and it can mess up the goma cache entries.
+ if (is_clang && !is_nacl) {
+ cflags += [ "-no-canonical-prefixes" ]
+ }
+}
+
+config("clang_revision") {
+ if (is_clang && clang_base_path == default_clang_base_path) {
+ update_args = [
+ "--print-revision",
+ "--verify-version=$clang_version",
+ ]
+ if (llvm_force_head_revision) {
+ update_args += [ "--llvm-force-head-revision" ]
+ }
+ clang_revision = exec_script("//tools/clang/scripts/update.py",
+ update_args,
+ "trim string")
+
+ # This is here so that all files get recompiled after a clang roll and
+ # when turning clang on or off. (defines are passed via the command line,
+ # and build system rebuild things when their commandline changes). Nothing
+ # should ever read this define.
+ defines = [ "CR_CLANG_REVISION=\"$clang_revision\"" ]
+ }
+}
+
+config("compiler_arm_fpu") {
+ if (current_cpu == "arm" && !is_ios && !is_nacl) {
+ cflags = [ "-mfpu=$arm_fpu" ]
+ asmflags = cflags
+ }
+}
+
+config("compiler_arm_thumb") {
+ if (current_cpu == "arm" && arm_use_thumb && is_posix &&
+ !(is_mac || is_ios || is_nacl)) {
+ cflags = [ "-mthumb" ]
+ }
+}
+
+config("compiler_arm") {
+ if (current_cpu == "arm" && is_chromeos) {
+ # arm is normally the default mode for clang, but on chromeos a wrapper
+ # is used to pass -mthumb, and therefor change the default.
+ cflags = [ "-marm" ]
+ }
+}
+
+config("assembler_debug_dir") {
+ # TODO(thakis): Once openmax_dl no longer uses -fno-integrated-as, move
+ # this behind the -fdebug-compilation-dir cflags in the "compiler" config.
+ if (symbol_level != 0 && is_clang && strip_absolute_paths_from_debug_symbols) {
+ if (!is_win) {
+ # We don't use clang -cc1as on Windows (yet? https://crbug.com/762167)
+ asmflags = [ "-Wa,-fdebug-compilation-dir,." ]
+ }
+ }
+}
+
+# runtime_library -------------------------------------------------------------
+#
+# Sets the runtime library and associated options.
+#
+# How do you determine what should go in here vs. "compiler" above? Consider if
+# a target might choose to use a different runtime library (ignore for a moment
+# if this is possible or reasonable on your system). If such a target would want
+# to change or remove your option, put it in the runtime_library config. If a
+# target wants the option regardless, put it in the compiler config.
+
+config("runtime_library") {
+ configs = []
+
+ # The order of this config is important: it must appear before
+ # android:runtime_library. This is to ensure libc++ appears before
+ # libandroid_support in the -isystem include order. Otherwise, there will be
+ # build errors related to symbols declared in math.h.
+ if (use_custom_libcxx) {
+ configs += [ "//build/config/c++:runtime_library" ]
+ }
+
+ # TODO(crbug.com/830987): Come up with a better name for is POSIX + Fuchsia
+ # configuration.
+ if (is_posix || is_fuchsia) {
+ configs += [ "//build/config/posix:runtime_library" ]
+ }
+
+ # System-specific flags. If your compiler flags apply to one of the
+ # categories here, add it to the associated file to keep this shared config
+ # smaller.
+ if (is_win) {
+ configs += [ "//build/config/win:runtime_library" ]
+ } else if (is_linux) {
+ configs += [ "//build/config/linux:runtime_library" ]
+ } else if (is_ios) {
+ configs += [ "//build/config/ios:runtime_library" ]
+ } else if (is_mac) {
+ configs += [ "//build/config/mac:runtime_library" ]
+ } else if (is_android) {
+ configs += [ "//build/config/android:runtime_library" ]
+ }
+
+ if (is_component_build) {
+ defines = [ "COMPONENT_BUILD" ]
+ }
+}
+
+# default_warnings ------------------------------------------------------------
+#
+# Collects all warning flags that are used by default. This is used as a
+# subconfig of both chromium_code and no_chromium_code. This way these
+# flags are guaranteed to appear on the compile command line after -Wall.
+config("default_warnings") {
+ cflags = []
+ cflags_cc = []
+ ldflags = []
+
+ if (is_win) {
+ if (treat_warnings_as_errors) {
+ cflags += [ "/WX" ]
+ }
+ if (fatal_linker_warnings) {
+ ldflags += [ "/WX" ]
+ }
+
+ cflags += [
+ # Warnings permanently disabled:
+
+ # C4091: 'typedef ': ignored on left of 'X' when no variable is
+ # declared.
+ # This happens in a number of Windows headers. Dumb.
+ "/wd4091",
+
+ # C4127: conditional expression is constant
+ # This warning can in theory catch dead code and other problems, but
+ # triggers in far too many desirable cases where the conditional
+ # expression is either set by macros or corresponds some legitimate
+ # compile-time constant expression (due to constant template args,
+ # conditionals comparing the sizes of different types, etc.). Some of
+ # these can be worked around, but it's not worth it.
+ "/wd4127",
+
+ # C4251: 'identifier' : class 'type' needs to have dll-interface to be
+ # used by clients of class 'type2'
+ # This is necessary for the shared library build.
+ "/wd4251",
+
+ # C4275: non dll-interface class used as base for dll-interface class
+ # This points out a potential (but rare) problem with referencing static
+ # fields of a non-exported base, through the base's non-exported inline
+ # functions, or directly. The warning is subtle enough that people just
+ # suppressed it when they saw it, so it's not worth it.
+ "/wd4275",
+
+ # C4312 is a VS 2015 64-bit warning for integer to larger pointer.
+ # TODO(brucedawson): fix warnings, crbug.com/554200
+ "/wd4312",
+
+ # C4324 warns when padding is added to fulfill alignas requirements,
+ # but can trigger in benign cases that are difficult to individually
+ # suppress.
+ "/wd4324",
+
+ # C4351: new behavior: elements of array 'array' will be default
+ # initialized
+ # This is a silly "warning" that basically just alerts you that the
+ # compiler is going to actually follow the language spec like it's
+ # supposed to, instead of not following it like old buggy versions did.
+ # There's absolutely no reason to turn this on.
+ "/wd4351",
+
+ # C4355: 'this': used in base member initializer list
+ # It's commonly useful to pass |this| to objects in a class' initializer
+ # list. While this warning can catch real bugs, most of the time the
+ # constructors in question don't attempt to call methods on the passed-in
+ # pointer (until later), and annotating every legit usage of this is
+ # simply more hassle than the warning is worth.
+ "/wd4355",
+
+ # C4503: 'identifier': decorated name length exceeded, name was
+ # truncated
+ # This only means that some long error messages might have truncated
+ # identifiers in the presence of lots of templates. It has no effect on
+ # program correctness and there's no real reason to waste time trying to
+ # prevent it.
+ "/wd4503",
+
+ # Warning C4589 says: "Constructor of abstract class ignores
+ # initializer for virtual base class." Disable this warning because it
+ # is flaky in VS 2015 RTM. It triggers on compiler generated
+ # copy-constructors in some cases.
+ "/wd4589",
+
+ # C4611: interaction between 'function' and C++ object destruction is
+ # non-portable
+ # This warning is unavoidable when using e.g. setjmp/longjmp. MSDN
+ # suggests using exceptions instead of setjmp/longjmp for C++, but
+ # Chromium code compiles without exception support. We therefore have to
+ # use setjmp/longjmp for e.g. JPEG decode error handling, which means we
+ # have to turn off this warning (and be careful about how object
+ # destruction happens in such cases).
+ "/wd4611",
+
+ # Warnings to evaluate and possibly fix/reenable later:
+
+ "/wd4100", # Unreferenced formal function parameter.
+ "/wd4121", # Alignment of a member was sensitive to packing.
+ "/wd4244", # Conversion: possible loss of data.
+ "/wd4505", # Unreferenced local function has been removed.
+ "/wd4510", # Default constructor could not be generated.
+ "/wd4512", # Assignment operator could not be generated.
+ "/wd4610", # Class can never be instantiated, constructor required.
+ "/wd4838", # Narrowing conversion. Doesn't seem to be very useful.
+ "/wd4995", # 'X': name was marked as #pragma deprecated
+ "/wd4996", # Deprecated function warning.
+
+ # These are variable shadowing warnings that are new in VS2015. We
+ # should work through these at some point -- they may be removed from
+ # the RTM release in the /W4 set.
+ "/wd4456",
+ "/wd4457",
+ "/wd4458",
+ "/wd4459",
+
+ # All of our compilers support the extensions below.
+ "/wd4200", # nonstandard extension used: zero-sized array in struct/union
+ "/wd4201", # nonstandard extension used: nameless struct/union
+ "/wd4204", # nonstandard extension used : non-constant aggregate
+ # initializer
+
+ "/wd4221", # nonstandard extension used : 'identifier' : cannot be
+ # initialized using address of automatic variable
+
+ # http://crbug.com/588506 - Conversion suppressions waiting on Clang
+ # -Wconversion.
+ "/wd4245", # 'conversion' : conversion from 'type1' to 'type2',
+ # signed/unsigned mismatch
+
+ "/wd4267", # 'var' : conversion from 'size_t' to 'type', possible loss of
+ # data
+
+ "/wd4305", # 'identifier' : truncation from 'type1' to 'type2'
+ "/wd4389", # 'operator' : signed/unsigned mismatch
+
+ # http://crbug.com/346399 - Unreachable code suppression waiting on Clang
+ # -Wunreachable-code.
+ "/wd4702", # unreachable code
+
+ # http://crbug.com/848979 - MSVC is more conservative than Clang with
+ # regards to variables initialized and consumed in different branches.
+ "/wd4701", # Potentially uninitialized local variable 'name' used
+ "/wd4703", # Potentially uninitialized local pointer variable 'name' used
+
+ # http://crbug.com/848979 - Remaining Clang permitted warnings.
+ "/wd4661", # 'identifier' : no suitable definition provided for explicit
+ # template instantiation request
+
+ "/wd4706", # assignment within conditional expression
+ # MSVC is stricter and requires a boolean expression.
+
+ "/wd4715", # 'function' : not all control paths return a value'
+ # MSVC does not analyze switch (enum) for completeness.
+ ]
+
+ cflags_cc += [
+ # Allow "noexcept" annotations even though we compile with exceptions
+ # disabled.
+ "/wd4577",
+ ]
+
+ if (current_cpu == "x86") {
+ cflags += [
+ # VC++ 2015 changes 32-bit size_t truncation warnings from 4244 to
+ # 4267. Example: short TruncTest(size_t x) { return x; }
+ # Since we disable 4244 we need to disable 4267 during migration.
+ # TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
+ "/wd4267",
+ ]
+ }
+
+ # VS xtree header file needs to be patched or 4702 (unreachable code
+ # warning) is reported if _HAS_EXCEPTIONS=0. Disable the warning if xtree is
+ # not patched.
+ if (!msvs_xtree_patched &&
+ exec_script("../../win_is_xtree_patched.py", [], "value") == 0) {
+ cflags += [ "/wd4702" ] # Unreachable code.
+ }
+ } else {
+ if ((is_mac || is_ios) && !is_nacl) {
+ # When compiling Objective-C, warns if a method is used whose
+ # availability is newer than the deployment target.
+ cflags += [ "-Wunguarded-availability" ]
+ }
+
+ if (is_ios) {
+ # When compiling Objective-C, warns if a selector named via @selector has
+ # not been defined in any visible interface.
+ cflags += [ "-Wundeclared-selector" ]
+ }
+
+ # Suppress warnings about ABI changes on ARM (Clang doesn't give this
+ # warning).
+ if (current_cpu == "arm" && !is_clang) {
+ cflags += [ "-Wno-psabi" ]
+ }
+
+ if (!is_clang) {
+ cflags_cc += [
+ # See comment for -Wno-c++11-narrowing.
+ "-Wno-narrowing",
+ ]
+
+ # -Wunused-local-typedefs is broken in gcc,
+ # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=63872
+ cflags += [ "-Wno-unused-local-typedefs" ]
+
+ # Don't warn about "maybe" uninitialized. Clang doesn't include this
+ # in -Wall but gcc does, and it gives false positives.
+ cflags += [ "-Wno-maybe-uninitialized" ]
+ cflags += [ "-Wno-deprecated-declarations" ]
+
+ # -Wcomment gives too many false positives in the case a
+ # backslash ended comment line is followed by a new line of
+ # comments
+ # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=61638
+ cflags += [ "-Wno-comments" ]
+ }
+ }
+
+ # Common Clang and GCC warning setup.
+ if (!is_win || is_clang) {
+ cflags += [
+ # Disables.
+ "-Wno-missing-field-initializers", # "struct foo f = {0};"
+ "-Wno-unused-parameter", # Unused function parameters.
+ ]
+ }
+
+ if (is_clang) {
+ cflags += [
+ # TODO(thakis): Consider -Wloop-analysis (turns on
+ # -Wrange-loop-analysis too).
+
+ # This warns on using ints as initializers for floats in
+ # initializer lists (e.g. |int a = f(); CGSize s = { a, a };|),
+ # which happens in several places in chrome code. Not sure if
+ # this is worth fixing.
+ "-Wno-c++11-narrowing",
+
+ # TODO(thakis): This used to be implied by -Wno-unused-function,
+ # which we no longer use. Check if it makes sense to remove
+ # this as well. http://crbug.com/316352
+ "-Wno-unneeded-internal-declaration",
+ ]
+
+ # use_xcode_clang only refers to the iOS toolchain, host binaries use
+ # chromium's clang always.
+ if (!is_nacl) {
+ cflags += [
+ # TODO(thakis): https://crbug.com/604888
+ "-Wno-undefined-var-template",
+ ]
+
+ if (is_win) {
+ # TODO(thakis): https://crbug.com/617318
+ # Currently goma can not handle case sensitiveness for windows well.
+ cflags += [ "-Wno-nonportable-include-path" ]
+ }
+
+ if (current_toolchain == host_toolchain || !use_xcode_clang) {
+ # Flags NaCl (Clang 3.7) and Xcode 9.2 (Clang clang-900.0.39.2) do not
+ # recognize.
+ cflags += [
+ # Ignore warnings about MSVC optimization pragmas.
+ # TODO(thakis): Only for no_chromium_code? http://crbug.com/912662
+ "-Wno-ignored-pragma-optimize",
+ ]
+ if (is_fuchsia) {
+ cflags += [
+ # TODO(hans): https://crbug.com/890307
+ "-Wno-defaulted-function-deleted",
+ ]
+ }
+ }
+ }
+ }
+}
+
+# chromium_code ---------------------------------------------------------------
+#
+# Toggles between higher and lower warnings for code that is (or isn't)
+# part of Chromium.
+
+config("chromium_code") {
+ if (is_win) {
+ cflags = [ "/W4" ] # Warning level 4.
+
+ if (is_clang) {
+ # Opt in to additional [[nodiscard]] on standard library methods.
+ defines = [ "_HAS_NODISCARD" ]
+ }
+ } else {
+ cflags = [ "-Wall" ]
+ if (treat_warnings_as_errors) {
+ cflags += [ "-Werror" ]
+
+ # The compiler driver can sometimes (rarely) emit warnings before calling
+ # the actual linker. Make sure these warnings are treated as errors as
+ # well.
+ ldflags = [ "-Werror" ]
+ }
+ if (is_clang) {
+ # Enable extra warnings for chromium_code when we control the compiler.
+ cflags += [ "-Wextra" ]
+ }
+
+ # In Chromium code, we define __STDC_foo_MACROS in order to get the
+ # C99 macros on Mac and Linux.
+ defines = [
+ "__STDC_CONSTANT_MACROS",
+ "__STDC_FORMAT_MACROS",
+ ]
+
+ if (!is_debug && !using_sanitizer && current_cpu != "s390x" &&
+ current_cpu != "s390" && current_cpu != "ppc64" &&
+ current_cpu != "mips" && current_cpu != "mips64") {
+ # Non-chromium code is not guaranteed to compile cleanly with
+ # _FORTIFY_SOURCE. Also, fortified build may fail when optimizations are
+ # disabled, so only do that for Release build.
+ defines += [ "_FORTIFY_SOURCE=2" ]
+ }
+
+ if (is_mac) {
+ cflags_objc = [ "-Wobjc-missing-property-synthesis" ]
+ cflags_objcc = [ "-Wobjc-missing-property-synthesis" ]
+ }
+ }
+
+ if (is_clang) {
+ cflags += [
+ # Warn on missing break statements at the end of switch cases.
+ # For intentional fallthrough, use FALLTHROUGH; from
+ # base/compiler_specific.h
+ "-Wimplicit-fallthrough",
+
+ # Thread safety analysis. See base/thread_annotations.h and
+ # https://clang.llvm.org/docs/ThreadSafetyAnalysis.html
+ "-Wthread-safety",
+ ]
+
+ # TODO(thakis): Enable this for more platforms, https://crbug.com/926235
+ # ChromeOS: http://crbug.com/940863
+ # Chromecast: http://crbug.com/942554
+ has_dchecks = is_debug || dcheck_always_on
+ if (!has_dchecks && is_ios && use_xcode_clang) {
+ # TODO(thakis): Remove this branch once Xcode's clang has clang r356148.
+ cflags_c = [ "-Wextra-semi" ]
+ cflags_cc = [ "-Wextra-semi" ]
+ } else if (!has_dchecks && is_chromeos && is_chrome_branded) {
+ # Temporarily disable -Wextra-semi for Chrome on Chrome OS.
+ } else if (is_chromecast && chromecast_branding != "public") {
+ # Temporarily disable -Wextra-semi for Chromecast.
+ } else {
+ cflags += [ "-Wextra-semi" ]
+ }
+ }
+
+ configs = [ ":default_warnings" ]
+}
+
+config("no_chromium_code") {
+ cflags = []
+ cflags_cc = []
+ defines = []
+
+ if (is_win) {
+ cflags += [
+ "/W3", # Warning level 3.
+ "/wd4800", # Disable warning when forcing value to bool.
+ "/wd4267", # TODO(jschuh): size_t to int.
+ "/wd4996", # Deprecated function warning.
+ ]
+ defines += [
+ "_CRT_NONSTDC_NO_WARNINGS",
+ "_CRT_NONSTDC_NO_DEPRECATE",
+ ]
+ } else {
+ # GCC may emit unsuppressible warnings so don't add -Werror for no chromium
+ # code. crbug.com/589724
+ if (treat_warnings_as_errors && is_clang) {
+ cflags += [ "-Werror" ]
+ ldflags = [ "-Werror" ]
+ }
+ if (is_clang && !is_nacl) {
+ # TODO(thakis): Remove !is_nacl once
+ # https://codereview.webrtc.org/1552863002/ made its way into chromium.
+ cflags += [ "-Wall" ]
+ }
+ }
+
+ if (is_clang) {
+ cflags += [
+ # Lots of third-party libraries have unused variables. Instead of
+ # suppressing them individually, we just blanket suppress them here.
+ "-Wno-unused-variable",
+ ]
+ }
+
+ configs = [ ":default_warnings" ]
+}
+
+# noshadowing -----------------------------------------------------------------
+#
+# Allows turning -Wshadow on.
+
+config("noshadowing") {
+ # This flag has to be disabled for nacl because the nacl compiler is too
+ # strict about shadowing.
+ if (is_clang && !is_nacl) {
+ cflags = [ "-Wshadow" ]
+ }
+}
+
+# rtti ------------------------------------------------------------------------
+#
+# Allows turning Run-Time Type Identification on or off.
+
+config("rtti") {
+ if (is_win) {
+ cflags_cc = [ "/GR" ]
+ } else {
+ cflags_cc = [ "-frtti" ]
+ }
+}
+
+config("no_rtti") {
+ # Some sanitizer configs may require RTTI to be left enabled globally
+ if (!use_rtti) {
+ if (is_win) {
+ cflags_cc = [ "/GR-" ]
+ } else {
+ cflags_cc = [ "-fno-rtti" ]
+ cflags_objcc = cflags_cc
+ }
+ }
+}
+
+# export_dynamic ---------------------------------------------------------------
+#
+# Ensures all exported symbols are added to the dynamic symbol table. This is
+# necessary to expose Chrome's custom operator new() and operator delete() (and
+# other memory-related symbols) to libraries. Otherwise, they might
+# (de)allocate memory on a different heap, which would spell trouble if pointers
+# to heap-allocated memory are passed over shared library boundaries.
+config("export_dynamic") {
+ if (is_desktop_linux || export_libcxxabi_from_executables) {
+ ldflags = [ "-rdynamic" ]
+ }
+}
+
+# thin_archive -----------------------------------------------------------------
+#
+# Enables thin archives on posix, and on windows when the lld linker is used.
+# Regular archives directly include the object files used to generate it.
+# Thin archives merely reference the object files.
+# This makes building them faster since it requires less disk IO, but is
+# inappropriate if you wish to redistribute your static library.
+# This config is added to the global config, so thin archives should already be
+# enabled. If you want to make a distributable static library, you need to do 2
+# things:
+# 1. Set complete_static_lib so that all dependencies of the library make it
+# into the library. See `gn help complete_static_lib` for details.
+# 2. Remove the thin_archive config, so that the .a file actually contains all
+# .o files, instead of just references to .o files in the build directoy
+config("thin_archive") {
+ # Mac and iOS use the mac-specific "libtool" command, not ar, which doesn't
+ # have a "thin archive" mode (it does accept -T, but it means truncating
+ # archive names to 16 characters, which is not what we want).
+ if ((is_posix && !is_nacl && !is_mac && !is_ios) || is_fuchsia) {
+ arflags = [ "-T" ]
+ } else if (is_win && use_lld) {
+ arflags = [ "/llvmlibthin" ]
+ }
+}
+
+# exceptions -------------------------------------------------------------------
+#
+# Allows turning Exceptions on or off.
+# Note: exceptions are disallowed in Google code.
+
+config("exceptions") {
+ if (is_win) {
+ # Enables exceptions in the STL.
+ if (!use_custom_libcxx) {
+ defines = [ "_HAS_EXCEPTIONS=1" ]
+ }
+ cflags_cc = [ "/EHsc" ]
+ } else {
+ cflags_cc = [ "-fexceptions" ]
+ cflags_objcc = cflags_cc
+ }
+}
+
+config("no_exceptions") {
+ if (is_win) {
+ # Disables exceptions in the STL.
+ # libc++ uses the __has_feature macro to control whether to use exceptions,
+ # so defining this macro is unnecessary. Defining _HAS_EXCEPTIONS to 0 also
+ # breaks libc++ because it depends on MSVC headers that only provide certain
+ # declarations if _HAS_EXCEPTIONS is 1. Those MSVC headers do not use
+ # exceptions, despite being conditional on _HAS_EXCEPTIONS.
+ if (!use_custom_libcxx) {
+ defines = [ "_HAS_EXCEPTIONS=0" ]
+ }
+ } else {
+ cflags_cc = [ "-fno-exceptions" ]
+ cflags_objcc = cflags_cc
+ }
+}
+
+# Warnings ---------------------------------------------------------------------
+
+# This will generate warnings when using Clang if code generates exit-time
+# destructors, which will slow down closing the program.
+# TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+config("wexit_time_destructors") {
+ if (is_clang) {
+ cflags = [ "-Wexit-time-destructors" ]
+ }
+}
+
+# On Windows compiling on x64, VC will issue a warning when converting
+# size_t to int because it will truncate the value. Our code should not have
+# these warnings and one should use a static_cast or a checked_cast for the
+# conversion depending on the case. However, a lot of code still needs to be
+# fixed. Apply this config to such targets to disable the warning.
+#
+# Note that this can be applied regardless of platform and architecture to
+# clean up the call sites. This will only apply the flag when necessary.
+#
+# This config is just an alias to no_shorten_64_warnings and will
+# suppress a superset of warning 4267 and any 64-bit -> 32-bit implicit
+# conversions. Having both for a time means not having to go through and
+# update all references to no_size_t_to_int_warning throughout the codebase
+# atomically.
+#
+# Any new warning suppressions should use the no_shorten_64_warnings
+# config below and not this.
+#
+# TODO(jschuh): crbug.com/167187 fix this and delete this config.
+config("no_size_t_to_int_warning") {
+ configs = [ ":no_shorten_64_warnings" ]
+}
+
+# As part of re-enabling -Wconversion (see issue 588506) some code
+# will continue to generate warnings.
+# The first warning to be enabled will be -Wshorten-64-to-32.
+#
+# Code that currently generates warnings for this can include this
+# config to disable them.
+config("no_shorten_64_warnings") {
+ if (current_cpu == "x64" || current_cpu == "arm64") {
+ if (is_clang) {
+ cflags = [ "-Wno-shorten-64-to-32" ]
+ } else {
+ if (is_win) {
+ # MSVC does not have an explicit warning equivalent to
+ # -Wshorten-64-to-32 but 4267 warns for size_t -> int
+ # on 64-bit builds, so is the closest.
+ cflags = [ "/wd4267" ]
+ }
+ }
+ }
+}
+
+# Some code presumes that pointers to structures/objects are compatible
+# regardless of whether what they point to is already known to be valid.
+# gcc 4.9 and earlier had no way of suppressing this warning without
+# suppressing the rest of them. Here we centralize the identification of
+# the gcc 4.9 toolchains.
+config("no_incompatible_pointer_warnings") {
+ cflags = []
+ if (is_clang) {
+ cflags += [ "-Wno-incompatible-pointer-types" ]
+ } else if (current_cpu == "mipsel" || current_cpu == "mips64el") {
+ cflags += [ "-w" ]
+ } else if (is_chromeos && current_cpu == "arm") {
+ cflags += [ "-w" ]
+ }
+}
+
+# Optimization -----------------------------------------------------------------
+#
+# The BUILDCONFIG file sets the "default_optimization" config on targets by
+# default. It will be equivalent to either "optimize" (release) or
+# "no_optimize" (debug) optimization configs.
+#
+# You can override the optimization level on a per-target basis by removing the
+# default config and then adding the named one you want:
+#
+# configs -= [ "//build/config/compiler:default_optimization" ]
+# configs += [ "//build/config/compiler:optimize_max" ]
+
+# Shared settings for both "optimize" and "optimize_max" configs.
+# IMPORTANT: On Windows "/O1" and "/O2" must go before the common flags.
+if (is_win) {
+ common_optimize_on_cflags = [
+ "/Ob2", # Both explicit and auto inlining.
+ "/Oy-", # Disable omitting frame pointers, must be after /O2.
+ "/Zc:inline", # Remove unreferenced COMDAT (faster links).
+ ]
+ if (!is_asan) {
+ common_optimize_on_cflags += [
+ # Put data in separate COMDATs. This allows the linker
+ # to put bit-identical constants at the same address even if
+ # they're unrelated constants, which saves binary size.
+ # This optimization can't be used when ASan is enabled because
+ # it is not compatible with the ASan ODR checker.
+ "/Gw",
+ ]
+ }
+ common_optimize_on_ldflags = []
+
+ # /OPT:ICF is not desirable in Debug builds, since code-folding can result in
+ # misleading symbols in stack traces. It is also incompatible with
+ # incremental linking, which we enable for both Debug and component builds.
+ if (!is_debug && !is_component_build) {
+ common_optimize_on_ldflags += [ "/OPT:ICF" ] # Redundant COMDAT folding.
+ }
+
+ if (is_official_build) {
+ common_optimize_on_ldflags += [ "/OPT:REF" ] # Remove unreferenced data.
+
+ # TODO(thakis): Remove is_clang here, https://crbug.com/598772
+ if (!use_lld && !is_clang) {
+ common_optimize_on_ldflags += [
+ # Set the number of LTCG code-gen threads to eight. The default is four.
+ # This gives a 5-10% link speedup.
+ "/cgthreads:8",
+ ]
+ if (use_incremental_wpo) {
+ # Incremental Link-time code generation.
+ common_optimize_on_ldflags += [ "/LTCG:INCREMENTAL" ]
+ } else {
+ common_optimize_on_ldflags += [ "/LTCG" ] # Link-time code generation.
+ }
+ if (full_wpo_on_official) {
+ if (use_incremental_wpo) {
+ arflags = [ "/LTCG:INCREMENTAL" ]
+ } else {
+ arflags = [ "/LTCG" ]
+ }
+ }
+ }
+ }
+} else {
+ common_optimize_on_cflags = []
+ common_optimize_on_ldflags = []
+
+ if (is_android) {
+ # TODO(jdduke) Re-enable on mips after resolving linking
+ # issues with libc++ (crbug.com/456380).
+ if (current_cpu != "mipsel" && current_cpu != "mips64el") {
+ common_optimize_on_ldflags += [
+ # Warn in case of text relocations.
+ "-Wl,--warn-shared-textrel",
+ ]
+ }
+ }
+
+ if (is_mac || is_ios) {
+ if (symbol_level == 2) {
+ # Mac dead code stripping requires symbols.
+ common_optimize_on_ldflags += [ "-Wl,-dead_strip" ]
+ }
+ } else if (current_os != "aix") {
+ # Non-Mac Posix flags.
+ # Aix does not support these.
+
+ common_optimize_on_cflags += [
+ # Don't emit the GCC version ident directives, they just end up in the
+ # .comment section taking up binary size.
+ "-fno-ident",
+
+ # Put data and code in their own sections, so that unused symbols
+ # can be removed at link time with --gc-sections.
+ "-fdata-sections",
+ "-ffunction-sections",
+ ]
+
+ common_optimize_on_ldflags += [
+ # Specifically tell the linker to perform optimizations.
+ # See http://lwn.net/Articles/192624/ .
+ # -O2 enables string tail merge optimization in gold and lld.
+ "-Wl,-O2",
+ "-Wl,--gc-sections",
+ ]
+ }
+}
+
+config("default_stack_frames") {
+ if (is_posix || is_fuchsia) {
+ if (enable_frame_pointers) {
+ cflags = [ "-fno-omit-frame-pointer" ]
+ } else {
+ cflags = [ "-fomit-frame-pointer" ]
+ }
+ }
+ # On Windows, the flag to enable framepointers "/Oy-" must always come after
+ # the optimization flag [e.g. "/O2"]. The optimization flag is set by one of
+ # the "optimize" configs, see rest of this file. The ordering that cflags are
+ # applied is well-defined by the GN spec, and there is no way to ensure that
+ # cflags set by "default_stack_frames" is applied after those set by an
+ # "optimize" config. Similarly, there is no way to propagate state from this
+ # config into the "optimize" config. We always apply the "/Oy-" config in the
+ # definition for common_optimize_on_cflags definition, even though this may
+ # not be correct.
+}
+
+# Default "optimization on" config.
+config("optimize") {
+ if (is_win) {
+ # TODO(thakis): Remove is_clang here, https://crbug.com/598772
+ if (is_official_build && full_wpo_on_official && !is_clang) {
+ common_optimize_on_cflags += [
+ "/GL", # Whole program optimization.
+
+ # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds.
+ # Probably anything that this would catch that wouldn't be caught in a
+ # normal build isn't going to actually be a bug, so the incremental
+ # value of C4702 for PGO builds is likely very small.
+ "/wd4702",
+ ]
+ }
+
+ # Favor size over speed, /O1 must be before the common flags. The GYP
+ # build also specifies /Os and /GF but these are implied by /O1.
+ cflags = [ "/O1" ] + common_optimize_on_cflags + [ "/Oi" ]
+ } else if (optimize_for_size && !is_nacl) {
+ # Favor size over speed.
+ # TODO(crbug.com/718650): Fix -Os in PNaCl compiler and remove the is_nacl
+ # guard above.
+ if (is_clang) {
+ cflags = [ "-Oz" ] + common_optimize_on_cflags
+ } else {
+ cflags = [ "-Os" ] + common_optimize_on_cflags
+ }
+ } else {
+ cflags = [ "-O2" ] + common_optimize_on_cflags
+ }
+ ldflags = common_optimize_on_ldflags
+}
+
+# Same config as 'optimize' but without the WPO flag.
+config("optimize_no_wpo") {
+ if (is_win) {
+ # Favor size over speed, /O1 must be before the common flags. The GYP
+ # build also specifies /Os and /GF but these are implied by /O1.
+ cflags = [ "/O1" ] + common_optimize_on_cflags + [ "/Oi" ]
+ } else if (optimize_for_size && !is_nacl) {
+ # Favor size over speed.
+ # TODO(crbug.com/718650): Fix -Os in PNaCl compiler and remove the is_nacl
+ # guard above.
+ if (is_clang) {
+ cflags = [ "-Oz" ] + common_optimize_on_cflags
+ } else {
+ cflags = [ "-Os" ] + common_optimize_on_cflags
+ }
+ } else if (optimize_for_fuzzing) {
+ cflags = [ "-O1" ] + common_optimize_on_cflags
+ } else {
+ cflags = [ "-O2" ] + common_optimize_on_cflags
+ }
+ ldflags = common_optimize_on_ldflags
+}
+
+# Turn off optimizations.
+config("no_optimize") {
+ if (is_win) {
+ cflags = [
+ "/Od", # Disable optimization.
+ "/Ob0", # Disable all inlining (on by default).
+ "/GF", # Enable string pooling (off by default).
+ ]
+ } else if (is_android && !android_full_debug) {
+ # On Android we kind of optimize some things that don't affect debugging
+ # much even when optimization is disabled to get the binary size down.
+ if (is_clang) {
+ cflags = [ "-Oz" ] + common_optimize_on_cflags
+ } else {
+ cflags = [ "-Os" ] + common_optimize_on_cflags
+ }
+ } else if (is_fuchsia) {
+ # On Fuchsia, we optimize for size here to reduce the size of debug build
+ # packages so they can be run in a KVM. See crbug.com/910243 for details.
+ cflags = [ "-Og" ]
+ } else {
+ cflags = [ "-O0" ]
+ ldflags = []
+ }
+}
+
+# Turns up the optimization level. On Windows, this implies whole program
+# optimization and link-time code generation which is very expensive and should
+# be used sparingly.
+config("optimize_max") {
+ if (is_nacl && is_nacl_irt) {
+ # The NaCl IRT is a special case and always wants its own config.
+ # Various components do:
+ # if (!is_debug) {
+ # configs -= [ "//build/config/compiler:default_optimization" ]
+ # configs += [ "//build/config/compiler:optimize_max" ]
+ # }
+ # So this config has to have the selection logic just like
+ # "default_optimization", below.
+ configs = [ "//build/config/nacl:irt_optimize" ]
+ } else {
+ ldflags = common_optimize_on_ldflags
+ if (is_win) {
+ # Favor speed over size, /O2 must be before the common flags. The GYP
+ # build also specifies /Ot, /Oi, and /GF, but these are implied by /O2.
+ cflags = [ "/O2" ] + common_optimize_on_cflags
+
+ if (is_official_build) {
+ if (!is_clang) {
+ cflags += [
+ "/GL", # Whole program optimization.
+
+ # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds.
+ # Probably anything that this would catch that wouldn't be caught
+ # in a normal build isn't going to actually be a bug, so the
+ # incremental value of C4702 for PGO builds is likely very small.
+ "/wd4702",
+ ]
+ }
+ # TODO(crbug.com/598772): Enable -flto for Clang.
+ }
+ } else if (optimize_for_fuzzing) {
+ cflags = [ "-O1" ] + common_optimize_on_cflags
+ } else {
+ cflags = [ "-O2" ] + common_optimize_on_cflags
+ }
+ }
+}
+
+# This config can be used to override the default settings for per-component
+# and whole-program optimization, optimizing the particular target for speed
+# instead of code size. This config is exactly the same as "optimize_max"
+# except that we use -O3 instead of -O2 on non-win, non-IRT platforms.
+#
+# TODO(crbug.com/621335) - rework how all of these configs are related
+# so that we don't need this disclaimer.
+config("optimize_speed") {
+ if (is_nacl && is_nacl_irt) {
+ # The NaCl IRT is a special case and always wants its own config.
+ # Various components do:
+ # if (!is_debug) {
+ # configs -= [ "//build/config/compiler:default_optimization" ]
+ # configs += [ "//build/config/compiler:optimize_max" ]
+ # }
+ # So this config has to have the selection logic just like
+ # "default_optimization", below.
+ configs = [ "//build/config/nacl:irt_optimize" ]
+ } else {
+ ldflags = common_optimize_on_ldflags
+ if (is_win) {
+ # Favor speed over size, /O2 must be before the common flags. The GYP
+ # build also specifies /Ot, /Oi, and /GF, but these are implied by /O2.
+ cflags = [ "/O2" ] + common_optimize_on_cflags
+
+ # TODO(thakis): Remove is_clang here, https://crbug.com/598772
+ if (is_official_build && !is_clang) {
+ cflags += [
+ "/GL", # Whole program optimization.
+
+ # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds.
+ # Probably anything that this would catch that wouldn't be caught in a
+ # normal build isn't going to actually be a bug, so the incremental
+ # value of C4702 for PGO builds is likely very small.
+ "/wd4702",
+ ]
+ }
+ } else if (optimize_for_fuzzing) {
+ cflags = [ "-O1" ] + common_optimize_on_cflags
+ } else {
+ cflags = [ "-O3" ] + common_optimize_on_cflags
+ }
+ }
+}
+
+config("optimize_fuzzing") {
+ cflags = [ "-O1" ] + common_optimize_on_cflags
+ ldflags = common_optimize_on_ldflags
+ visibility = [ ":default_optimization" ]
+}
+
+# The default optimization applied to all targets. This will be equivalent to
+# either "optimize" or "no_optimize", depending on the build flags.
+config("default_optimization") {
+ if (is_nacl && is_nacl_irt) {
+ # The NaCl IRT is a special case and always wants its own config.
+ # It gets optimized the same way regardless of the type of build.
+ configs = [ "//build/config/nacl:irt_optimize" ]
+ } else if (is_debug) {
+ configs = [ ":no_optimize" ]
+ } else if (optimize_for_fuzzing) {
+ assert(!is_win, "Fuzzing optimize level not supported on Windows")
+
+ # Coverage build is quite slow. Using "optimize_for_fuzzing" makes it even
+ # slower as it uses "-O1" instead of "-O3". Prevent that from happening.
+ assert(!use_clang_coverage,
+ "optimize_for_fuzzing=true should not be used with " +
+ "use_clang_coverage=true.")
+ configs = [ ":optimize_fuzzing" ]
+ } else {
+ configs = [ ":optimize" ]
+ }
+}
+
+_clang_sample_profile = ""
+if (is_clang && is_a_target_toolchain) {
+ if (clang_sample_profile_path != "") {
+ _clang_sample_profile = clang_sample_profile_path
+ } else if (clang_use_default_sample_profile) {
+ assert(build_with_chromium,
+ "Our default profiles currently only apply to Chromium")
+ assert(is_android || is_desktop_linux,
+ "The current platform has no default profile")
+ _clang_sample_profile = "//chrome/android/profiles/afdo.prof"
+ }
+}
+
+# Clang offers a way to assert that AFDO profiles are accurate, which causes it
+# to optimize functions not represented in a profile more aggressively for size.
+# This config can be toggled in cases where shaving off binary size hurts
+# performance too much.
+config("afdo_optimize_size") {
+ if (_clang_sample_profile != "" && sample_profile_is_accurate) {
+ cflags = [ "-fprofile-sample-accurate" ]
+ }
+}
+
+# GCC and clang support a form of profile-guided optimization called AFDO.
+# There are some targeted places that AFDO regresses (and an icky interaction
+# between //base/allocator:tcmalloc and AFDO on GCC), so we provide a separate
+# config to allow AFDO to be disabled per-target.
+config("afdo") {
+ if (is_clang) {
+ if (_clang_sample_profile != "") {
+ rebased_clang_sample_profile =
+ rebase_path(_clang_sample_profile, root_build_dir)
+ cflags = [ "-fprofile-sample-use=${rebased_clang_sample_profile}" ]
+ inputs = [
+ _clang_sample_profile,
+ ]
+ }
+ } else if (auto_profile_path != "" && is_a_target_toolchain) {
+ cflags = [ "-fauto-profile=${auto_profile_path}" ]
+ inputs = [
+ auto_profile_path,
+ ]
+ }
+}
+
+# Symbols ----------------------------------------------------------------------
+
+# The BUILDCONFIG file sets the "default_symbols" config on targets by
+# default. It will be equivalent to one the three specific symbol levels.
+#
+# You can override the symbol level on a per-target basis by removing the
+# default config and then adding the named one you want:
+#
+# configs -= [ "//build/config/compiler:default_symbols" ]
+# configs += [ "//build/config/compiler:symbols" ]
+
+# A helper config that all configs passing /DEBUG to the linker should
+# include as sub-config.
+config("win_pdbaltpath") {
+ visibility = [
+ ":symbols",
+ ":minimal_symbols",
+ ]
+
+ # /DEBUG causes the linker to generate a pdb file, and to write the absolute
+ # path to it in the executable file it generates. This flag turns that
+ # absolute path into just the basename of the pdb file, which helps with
+ # build reproducibility. Debuggers look for pdb files next to executables,
+ # so there's no downside to always using this.
+ ldflags = [ "/pdbaltpath:%_PDB%" ]
+}
+
+# Full symbols.
+config("symbols") {
+ if (is_win) {
+ if (use_goma || is_clang) {
+ # Note that with VC++ this requires is_win_fastlink, enforced elsewhere.
+ cflags = [ "/Z7" ] # Debug information in the .obj files.
+ } else {
+ cflags = [ "/Zi" ] # Produce PDB file, no edit and continue.
+ }
+
+ if (is_win_fastlink && !use_lld) {
+ # TODO(hans): is_win_fastlink=true is incompatible with use_lld. However,
+ # some developers might have enabled it manually, so to ease the
+ # transition to lld, just ignore it rather than asserting. Eventually we
+ # want to assert instead.
+
+ # Tell VS 2015+ to create a PDB that references debug
+ # information in .obj and .lib files instead of copying
+ # it all. This flag is incompatible with /PROFILE
+ ldflags = [ "/DEBUG:FASTLINK" ]
+ } else if (is_clang && use_lld && use_ghash) {
+ cflags += [ "-gcodeview-ghash" ]
+ ldflags = [ "/DEBUG:GHASH" ]
+ } else {
+ ldflags = [ "/DEBUG" ]
+ }
+
+ # All configs using /DEBUG should include this:
+ configs = [ ":win_pdbaltpath" ]
+
+ if (is_clang) {
+ # /DEBUG:FASTLINK requires every object file to have standalone debug
+ # information.
+ if (is_win_fastlink && !use_lld) {
+ cflags += [ "-fstandalone-debug" ]
+ } else {
+ cflags += [ "-fno-standalone-debug" ]
+ }
+ }
+ } else {
+ cflags = []
+ if (is_mac && enable_dsyms) {
+ # If generating dSYMs, specify -fno-standalone-debug. This was
+ # originally specified for https://crbug.com/479841 because dsymutil
+ # could not handle a 4GB dSYM file. But dsymutil from Xcodes prior to
+ # version 7 also produces debug data that is incompatible with Breakpad
+ # dump_syms, so this is still required (https://crbug.com/622406).
+ cflags += [ "-fno-standalone-debug" ]
+ }
+ if (!use_debug_fission && current_cpu == "arm") {
+ # dump_syms has issues with dwarf4 on arm, https://crbug.com/744956
+ # TODO(thakis): Remove this again once dump_syms is fixed.
+ #
+ # debug fission needs DWARF DIEs to be emitted at version 4.
+ # Chrome OS emits Debug Frame in DWARF2's .debug_frame v1 to make breakpad
+ # happy [1].
+ # Unless Android needs debug fission, DWARF3 is the simplest solution.
+ #
+ # [1] crrev.com/a81d5ade0b043208e06ad71a38bcf9c348a1a52f
+ cflags += [ "-gdwarf-3" ]
+ } else if (is_mac) {
+ # clang defaults to DWARF2 on macOS unless mac_deployment_target is
+ # at least 10.11.
+ # TODO(thakis): Remove this once mac_deployment_target is 10.11.
+ cflags += [ "-gdwarf-4" ]
+ }
+
+ # The gcc-based nacl compilers don't support -fdebug-compilation-dir (see
+ # elsewhere in this file), so they can't have build-dir-independent output.
+ # Disable symbols for nacl object files to get deterministic,
+ # build-directory-independent output. pnacl and nacl-clang do support that
+ # flag, so we can use use -g1 for pnacl and nacl-clang compiles.
+ # gcc nacl is is_nacl && !is_clang, pnacl and nacl-clang are && is_clang.
+ if (!is_nacl || is_clang) {
+ cflags += [ "-g2" ]
+ }
+
+ if (use_debug_fission && !is_nacl && !is_android) {
+ # NOTE: Some Chrome OS builds globally set |use_debug_fission| to true,
+ # but they also build some targets against Android toolchains which aren't
+ # compatible with it.
+ #
+ # TODO(https://crbug.com/837032): See if we can clean this up by e.g. not
+ # setting use_debug_fission globally.
+ cflags += [ "-gsplit-dwarf" ]
+ }
+ asmflags = cflags
+ ldflags = []
+
+ # TODO(thakis): Figure out if there's a way to make this go for 32-bit,
+ # currently we get "warning:
+ # obj/native_client/src/trusted/service_runtime/sel_asm/nacl_switch_32.o:
+ # DWARF info may be corrupt; offsets in a range list entry are in different
+ # sections" there. Maybe just a bug in nacl_switch_32.S.
+ if (!is_mac && !is_ios && !is_nacl && current_cpu != "x86" &&
+ (use_gold || use_lld)) {
+ if (is_clang) {
+ # This flag enables the GNU-format pubnames and pubtypes sections,
+ # which lld needs in order to generate a correct GDB index.
+ # TODO(pcc): Try to make lld understand non-GNU-format pubnames
+ # sections (llvm.org/PR34820).
+ cflags += [ "-ggnu-pubnames" ]
+ }
+ ldflags += [ "-Wl,--gdb-index" ]
+ }
+ }
+}
+
+# Minimal symbols.
+# This config guarantees to hold symbol for stack trace which are shown to user
+# when crash happens in unittests running on buildbot.
+config("minimal_symbols") {
+ if (is_win) {
+ # Linker symbols for backtraces only.
+ cflags = []
+ ldflags = [ "/DEBUG" ]
+
+ # All configs using /DEBUG should include this:
+ configs = [ ":win_pdbaltpath" ]
+
+ # For win/asan, get stack traces with full line numbers.
+ # AddressSanitizerTests.TestAddressSanitizer needs this, and since
+ # win/asan isn't a default cq bot the build time hit is ok.
+ if (is_clang && using_sanitizer) {
+ # -gline-tables-only is the same as -g1, but clang-cl only exposes the
+ # former.
+ cflags += [ "-gline-tables-only" ]
+ }
+ } else {
+ cflags = []
+ if (current_cpu == "arm") {
+ # dump_syms has issues with dwarf4 on arm, https://crbug.com/744956
+ # TODO(thakis): Remove this again once dump_syms is fixed.
+ cflags += [ "-gdwarf-3" ]
+ } else if (is_mac) {
+ # clang defaults to DWARF2 on macOS unless mac_deployment_target is
+ # at least 10.11.
+ # TODO(thakis): Remove this once mac_deployment_target is 10.11.
+ cflags += [ "-gdwarf-4" ]
+ }
+
+ # The gcc-based nacl compilers don't support -fdebug-compilation-dir (see
+ # elsewhere in this file), so they can't have build-dir-independent output.
+ # Disable symbols for nacl object files to get deterministic,
+ # build-directory-independent output. pnacl and nacl-clang do support that
+ # flag, so we can use use -g1 for pnacl and nacl-clang compiles.
+ # gcc nacl is is_nacl && !is_clang, pnacl and nacl-clang are && is_clang.
+ if (!is_nacl || is_clang) {
+ cflags += [ "-g1" ]
+ }
+ ldflags = []
+ if (is_android && is_clang) {
+ # Android defaults to symbol_level=1 builds in production builds
+ # (https://crbug.com/648948), but clang, unlike gcc, doesn't emit
+ # DW_AT_linkage_name in -g1 builds. -fdebug-info-for-profiling enables
+ # that (and a bunch of other things we don't need), so that we get
+ # qualified names in stacks.
+ # TODO(thakis): Consider making clang emit DW_AT_linkage_name in -g1 mode;
+ # failing that consider doing this on non-Android too.
+ cflags += [ "-fdebug-info-for-profiling" ]
+ }
+
+ # Note: -gsplit-dwarf implicitly turns on -g2 with clang, so don't pass it.
+ asmflags = cflags
+ }
+}
+
+# No symbols.
+config("no_symbols") {
+ if (!is_win) {
+ cflags = [ "-g0" ]
+ asmflags = cflags
+ }
+}
+
+# Default symbols.
+config("default_symbols") {
+ if (symbol_level == 0) {
+ configs = [ ":no_symbols" ]
+ } else if (symbol_level == 1) {
+ configs = [ ":minimal_symbols" ]
+ } else if (symbol_level == 2) {
+ configs = [ ":symbols" ]
+ } else {
+ assert(false)
+ }
+
+ # This config is removed by base unittests apk.
+ if (is_android && is_clang && strip_debug_info) {
+ configs += [ ":strip_debug" ]
+ }
+}
+
+config("strip_debug") {
+ if (!defined(ldflags)) {
+ ldflags = []
+ }
+ ldflags += [ "-Wl,--strip-debug" ]
+}
+
+if (is_ios || is_mac) {
+ # On Mac and iOS, this enables support for ARC (automatic ref-counting).
+ # See http://clang.llvm.org/docs/AutomaticReferenceCounting.html.
+ config("enable_arc") {
+ common_flags = [ "-fobjc-arc" ]
+ cflags_objc = common_flags
+ cflags_objcc = common_flags
+ }
+}
diff --git a/deps/v8/build/config/compiler/compiler.gni b/deps/v8/build/config/compiler/compiler.gni
new file mode 100644
index 0000000000..3bdd85bf97
--- /dev/null
+++ b/deps/v8/build/config/compiler/compiler.gni
@@ -0,0 +1,286 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build/config/compiler/pgo/pgo.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+import("//build_overrides/build.gni")
+
+if (is_chromeos) {
+ import("//build/config/chromeos/rules.gni")
+} else {
+ is_cros_chrome_sdk = false
+}
+
+if (is_android) {
+ import("//build/config/android/abi.gni")
+}
+if (current_cpu == "arm" || current_cpu == "arm64") {
+ import("//build/config/arm.gni")
+}
+
+declare_args() {
+ # How many symbols to include in the build. This affects the performance of
+ # the build since the symbols are large and dealing with them is slow.
+ # 2 means regular build with symbols.
+ # 1 means minimal symbols, usually enough for backtraces only. Symbols with
+ # internal linkage (static functions or those in anonymous namespaces) may not
+ # appear when using this level.
+ # 0 means no symbols.
+ # -1 means auto-set according to debug/release and platform.
+ symbol_level = -1
+
+ # Android-only: Strip the debug info of libraries within lib.unstripped to
+ # reduce size. As long as symbol_level > 0, this will still allow stacks to be
+ # symbolized.
+ strip_debug_info = false
+
+ # Compile in such a way as to enable profiling of the generated code. For
+ # example, don't omit the frame pointer and leave in symbols.
+ enable_profiling = false
+
+ # use_debug_fission: whether to use split DWARF debug info
+ # files. This can reduce link time significantly, but is incompatible
+ # with some utilities such as icecc and ccache. Requires gold and
+ # gcc >= 4.8 or clang.
+ # http://gcc.gnu.org/wiki/DebugFission
+ #
+ # This is a placeholder value indicating that the code below should set
+ # the default. This is necessary to delay the evaluation of the default
+ # value expression until after its input values such as use_gold have
+ # been set, e.g. by a toolchain_args() block.
+ use_debug_fission = "default"
+
+ # Enables support for ThinLTO, which links 3x-10x faster than full LTO. See
+ # also http://blog.llvm.org/2016/06/thinlto-scalable-and-incremental-lto.html
+ use_thin_lto = is_cfi || (is_android && is_official_build)
+
+ # Tell VS to create a PDB that references information in .obj files rather
+ # than copying it all. This should improve linker performance. mspdbcmf.exe
+ # can be used to convert a fastlink pdb to a normal one.
+ is_win_fastlink = false
+
+ # Whether or not we should turn on incremental WPO. Only affects the VS
+ # Windows build.
+ use_incremental_wpo = false
+
+ # Whether we're using a sample profile collected on an architecture different
+ # than the one we're compiling for.
+ #
+ # It's currently not possible to collect AFDO profiles on anything but
+ # x86{,_64}.
+ using_mismatched_sample_profile = current_cpu != "x64" && current_cpu != "x86"
+}
+
+assert(!is_cfi || use_thin_lto, "CFI requires ThinLTO")
+
+# Exclude unwind tables for official builds as unwinding can be done from stack
+# dumps produced by Crashpad at a later time "offline" in the crash server.
+# For unofficial (e.g. development) builds and non-Chrome branded (e.g. Cronet
+# which doesn't use Crashpad, crbug.com/479283) builds it's useful to be able
+# to unwind at runtime.
+exclude_unwind_tables = (is_chrome_branded && is_official_build) ||
+ (is_chromecast && !is_cast_desktop_build && !is_debug &&
+ !cast_is_debug && !is_fuchsia)
+
+# If true, optimize for size. Does not affect windows builds.
+# Linux & Mac favor speed over size.
+# TODO(brettw) it's weird that Mac and desktop Linux are different. We should
+# explore favoring size over speed in this case as well.
+optimize_for_size = is_android || is_chromecast || is_fuchsia || is_ios
+
+declare_args() {
+ # Whether we should consider the profile we're using to be accurate. Accurate
+ # profiles have the benefit of (potentially substantial) binary size
+ # reductions, by instructing the compiler to optimize cold and uncovered
+ # functions heavily for size. This often comes at the cost of performance.
+ sample_profile_is_accurate = optimize_for_size
+}
+
+# Determine whether to enable or disable frame pointers, based on the platform
+# and build arguments.
+if (is_mac || is_ios || is_linux) {
+ enable_frame_pointers = true
+} else if (is_win) {
+ # 64-bit Windows ABI doesn't support frame pointers.
+ if (current_cpu == "x64") {
+ enable_frame_pointers = false
+ } else {
+ enable_frame_pointers = true
+ }
+} else if (is_chromeos) {
+ # ChromeOS generally prefers frame pointers, to support CWP.
+ # However, Clang does not currently generate usable frame pointers in ARM
+ # 32-bit builds (https://bugs.llvm.org/show_bug.cgi?id=18505) so disable them
+ # there to avoid the unnecessary overhead.
+ enable_frame_pointers = current_cpu != "arm"
+} else if (is_android) {
+ enable_frame_pointers =
+ enable_profiling ||
+ # Ensure that stacks from arm64 crash dumps are usable (crbug.com/391706).
+ current_cpu == "arm64" ||
+ # For x86 Android, unwind tables are huge without frame pointers
+ # (crbug.com/762629). Enabling frame pointers grows the code size slightly
+ # but overall shrinks binaries considerably by avoiding huge unwind
+ # tables.
+ (current_cpu == "x86" && !exclude_unwind_tables && optimize_for_size) ||
+ using_sanitizer
+} else {
+ # Explicitly ask for frame pointers, otherwise:
+ # * Stacks may be missing for sanitizer and profiling builds.
+ # * Debug tcmalloc can crash (crbug.com/636489).
+ enable_frame_pointers = using_sanitizer || enable_profiling || is_debug
+}
+
+# In general assume that if we have frame pointers then we can use them to
+# unwind the stack. However, this requires that they are enabled by default for
+# most translation units, that they are emitted correctly, and that the
+# compiler or platform provides a way to access them.
+can_unwind_with_frame_pointers = enable_frame_pointers
+if (current_cpu == "arm" && arm_use_thumb) {
+ # We cannot currently unwind ARM Thumb frame pointers correctly.
+ # See https://bugs.llvm.org/show_bug.cgi?id=18505
+ can_unwind_with_frame_pointers = false
+} else if (is_win) {
+ # Windows 32-bit does provide frame pointers, but the compiler does not
+ # provide intrinsics to access them, so we don't use them.
+ can_unwind_with_frame_pointers = false
+}
+
+assert(!can_unwind_with_frame_pointers || enable_frame_pointers)
+
+# Unwinding with CFI table is only possible on static library builds and
+# requried only when frame pointers are not enabled.
+can_unwind_with_cfi_table = is_android && !is_component_build &&
+ !enable_frame_pointers && current_cpu == "arm"
+
+declare_args() {
+ # Whether or not the official builds should be built with full WPO. Enabled by
+ # default for the PGO and the x64 builds.
+ if (chrome_pgo_phase > 0) {
+ full_wpo_on_official = true
+ } else {
+ full_wpo_on_official = false
+ }
+}
+
+declare_args() {
+ # Set to true to use lld, the LLVM linker.
+ # https://crbug.com/911658 for using lld on 32-bit linux.
+ # https://crbug.com/917504 for arm chromeos
+ use_lld = is_clang &&
+ (is_win || is_fuchsia || is_android ||
+ (is_linux && target_os != "chromeos" && current_cpu != "x86") ||
+ (target_os == "chromeos" && current_cpu != "arm"))
+}
+
+declare_args() {
+ # Whether to use the gold linker from binutils instead of lld or bfd.
+ use_gold =
+ !use_lld && !(is_chromecast && is_linux &&
+ (current_cpu == "arm" || current_cpu == "mipsel")) &&
+ ((is_linux && (current_cpu == "x64" || current_cpu == "x86" ||
+ current_cpu == "arm" || current_cpu == "arm64" ||
+ current_cpu == "mipsel" || current_cpu == "mips64el")) ||
+ (is_android && (current_cpu == "x86" || current_cpu == "x64" ||
+ current_cpu == "arm" || current_cpu == "arm64")))
+
+ # Use relative paths for debug info. This is important to make the build
+ # results independent of the checkout and build directory names, which
+ # in turn is important for goma compile hit rate.
+ # Setting this to true may make it harder to debug binaries on Linux, see
+ # https://chromium.googlesource.com/chromium/src/+/master/docs/linux_debugging.md#Source-level-debug-with-fdebug_compilation_dir
+ strip_absolute_paths_from_debug_symbols =
+ is_android || is_fuchsia || is_nacl || (is_win && use_lld) || is_linux
+}
+
+# If it wasn't manually set, set to an appropriate default.
+assert(symbol_level >= -1 && symbol_level <= 2, "Invalid symbol_level")
+if (symbol_level == -1) {
+ if (is_android && !is_component_build &&
+ !(android_64bit_target_cpu && !build_apk_secondary_abi)) {
+ # Reduce symbol level when it will cause invalid elf files to be created
+ # (due to file size). https://crbug.com/648948.
+ symbol_level = 1
+ } else if (is_win && use_goma && !is_clang) {
+ # goma doesn't support PDB files, so we disable symbols during goma
+ # compilation because otherwise the redundant debug information generated
+ # by visual studio (repeated in every .obj file) makes linker
+ # memory consumption and link times unsustainable (crbug.com/630074).
+ # Clang on windows does not have this issue.
+ # If you use is_win_fastlink = true or lld or clang then you can set
+ # symbol_level = 2 when using goma.
+ symbol_level = 1
+ } else if (is_cros_chrome_sdk) {
+ # Use lower symbol level in Simple Chrome build for faster link time.
+ # For Simple Chrome, this should take precedence over is_official_build,
+ # turned on by --internal.
+ if ((target_cpu == "x64" || target_cpu == "x86") && !is_debug) {
+ # For release x86/x64 build, specify symbol_level=0 for faster link time.
+ # x86/x64 shows backtraces with symbol_level=0 (arm requires
+ # symbol_level=1).
+ symbol_level = 0
+ } else {
+ symbol_level = 1
+ }
+ } else if (using_sanitizer) {
+ # Sanitizers need line table info for stack traces. They don't need type
+ # info or variable info, so we can leave that out to speed up the build.
+ # Sanitizers also require symbols for filename suppressions to work.
+ symbol_level = 1
+ } else if ((!is_nacl && !is_linux && !is_fuchsia) || is_debug ||
+ is_official_build || is_chromecast) {
+ # Linux builds slower by having symbols as part of the target binary,
+ # whereas Mac and Windows have them separate, so in Release Linux, default
+ # them off, but keep them on for Official builds and Chromecast builds.
+ symbol_level = 2
+ } else {
+ symbol_level = 0
+ }
+} else if (symbol_level == 2) {
+ if (is_win) {
+ # See crbug.com/630074
+ assert(is_win_fastlink || use_lld || !use_goma || is_clang,
+ "VC++ goma builds that use symbol_level 2 must use " +
+ "is_win_fastlink or use_lld.")
+ }
+}
+
+# Non-component debug builds with symbol_level = 2 are an undesirable (very slow
+# build times) and unsupported (some test binaries will fail with > 4 GB PDBs)
+# combination. This is only checked when current_toolchain == default_toolchain
+# because the is_component_build flag is set to false in various components of
+# the build (like nacl) and we don't want to assert on those.
+# iOS does not support component builds so add an exception for this platform.
+if (build_with_chromium) {
+ assert(symbol_level != 2 || current_toolchain != default_toolchain ||
+ is_component_build || !is_debug || is_ios,
+ "Can't do non-component debug builds at symbol_level=2")
+}
+
+# Assert that the configuration isn't going to hit https://crbug.com/648948.
+# An exception is made when target_os == "chromeos" as we only use the Android
+# toolchain there to build relatively small binaries.
+assert(ignore_elf32_limitations || !is_android || target_os == "chromeos" ||
+ (android_64bit_target_cpu && !build_apk_secondary_abi) ||
+ is_component_build || symbol_level < 2,
+ "Android 32-bit non-component builds cannot have symbol_level=2 " +
+ "due to 4GiB file size limit, see https://crbug.com/648948. " +
+ "If you really want to try this out, " +
+ "set ignore_elf32_limitations=true.")
+
+# This variable is true if the current toolchain is either the default
+# toolchain, or, on Android, the secondary ABI toolchain. In other words, it's
+# a toolchain that generates targets for the product, as opposed to the host
+# OS. It can be used in situations where we wish to avoid doing work on the
+# host toolchain, but we don't want to break Android's secondary ABI.
+# Ie. "current_toolchain == default_toolchain" is not a good way to infer
+# "not the host toolchain".
+is_a_target_toolchain =
+ current_toolchain == default_toolchain ||
+ (is_android && defined(android_secondary_abi_toolchain) &&
+ current_toolchain == android_secondary_abi_toolchain)
diff --git a/deps/v8/build/config/compiler/pgo/BUILD.gn b/deps/v8/build/config/compiler/pgo/BUILD.gn
new file mode 100644
index 0000000000..bb147a6ecc
--- /dev/null
+++ b/deps/v8/build/config/compiler/pgo/BUILD.gn
@@ -0,0 +1,101 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/compiler/pgo/pgo.gni")
+
+# Configuration that enables PGO instrumentation.
+config("pgo_instrumentation_flags") {
+ visibility = [ ":default_pgo_flags" ]
+ cflags = []
+ ldflags = []
+
+ # Only add flags when chrome_pgo_phase == 1, so that variables we would use
+ # are not required to be defined when we're not actually using PGO.
+ if (chrome_pgo_phase == 1) {
+ if (is_clang) {
+ cflags = [ "-fprofile-instr-generate" ]
+ if (is_win) {
+ # Normally, we pass -fprofile-instr-generate to the compiler and it
+ # automatically passes the right flags to the linker.
+ # However, on Windows, we call the linker directly, without going
+ # through the compiler driver. This means we need to pass the right
+ # flags ourselves.
+ _clang_rt_base_path =
+ "$clang_base_path/lib/clang/$clang_version/lib/windows"
+ if (target_cpu == "x86") {
+ _clang_rt_suffix = "-i386.lib"
+ } else if (target_cpu == "x64") {
+ _clang_rt_suffix = "-x86_64.lib"
+ }
+ assert(_clang_rt_suffix != "", "target CPU $target_cpu not supported")
+ ldflags += [ "$_clang_rt_base_path/clang_rt.profile$_clang_rt_suffix" ]
+ } else {
+ ldflags += [ "-fprofile-instr-generate" ]
+ }
+ } else if (is_win) {
+ ldflags = [
+ # In MSVC, we must use /LTCG when using PGO.
+ "/LTCG",
+
+ # Make sure that enough memory gets allocated for the PGO profiling
+ # buffers and also cap this memory. Usually a PGI instrumented build
+ # of chrome_child.dll requires ~55MB of memory for storing its counter
+ # etc, normally the linker should automatically choose an appropriate
+ # amount of memory but it doesn't always do a good estimate and
+ # sometime allocates too little or too much (and so the instrumented
+ # image fails to start). Making sure that the buffer has a size in the
+ # [128 MB, 512 MB] range should prevent this from happening.
+ "/GENPROFILE:MEMMIN=134217728",
+ "/GENPROFILE:MEMMAX=536870912",
+ "/PogoSafeMode",
+ ]
+ }
+ }
+}
+
+# Configuration that enables optimization using profile data.
+config("pgo_optimization_flags") {
+ visibility = [ ":default_pgo_flags" ]
+ cflags = []
+ ldflags = []
+
+ # Only add flags when chrome_pgo_phase == 2, so that variables we would use
+ # are not required to be defined when we're not actually using PGO.
+ if (chrome_pgo_phase == 2) {
+ if (is_clang) {
+ assert(pgo_data_path != "",
+ "Please set pgo_data_path to point at the profile data")
+ cflags += [
+ "-fprofile-instr-use=$pgo_data_path",
+
+ # It's possible to have some profile data legitimately missing,
+ # and at least some profile data always ends up being considered
+ # out of date, so make sure we don't error for those cases.
+ "-Wno-profile-instr-unprofiled",
+ "-Wno-error=profile-instr-out-of-date",
+ ]
+ } else if (is_win) {
+ ldflags += [
+ # In MSVC, we must use /LTCG when using PGO.
+ "/LTCG",
+ "/USEPROFILE",
+ ]
+ }
+ }
+}
+
+# Applies flags necessary when profile-guided optimization is used.
+# Flags are only added if PGO is enabled, so that this config is safe to
+# include by default.
+config("default_pgo_flags") {
+ if (chrome_pgo_phase == 0) {
+ # Nothing. This config should be a no-op when chrome_pgo_phase == 0.
+ } else if (chrome_pgo_phase == 1) {
+ configs = [ ":pgo_instrumentation_flags" ]
+ } else if (chrome_pgo_phase == 2) {
+ configs = [ ":pgo_optimization_flags" ]
+ }
+}
diff --git a/deps/v8/build/config/compiler/pgo/pgo.gni b/deps/v8/build/config/compiler/pgo/pgo.gni
new file mode 100644
index 0000000000..5da004d28f
--- /dev/null
+++ b/deps/v8/build/config/compiler/pgo/pgo.gni
@@ -0,0 +1,17 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+ # Specify the current PGO phase.
+ # Here's the different values that can be used:
+ # 0 : Means that PGO is turned off.
+ # 1 : Used during the PGI (instrumentation) phase.
+ # 2 : Used during the PGO (optimization) phase.
+ #
+ # TODO(sebmarchand): Add support for the PGU (update) phase.
+ chrome_pgo_phase = 0
+
+ # When using chrome_pgo_phase = 2, read profile data from this path.
+ pgo_data_path = ""
+}
diff --git a/deps/v8/build/config/compute_inputs_for_analyze.gni b/deps/v8/build/config/compute_inputs_for_analyze.gni
new file mode 100644
index 0000000000..050ab70a3f
--- /dev/null
+++ b/deps/v8/build/config/compute_inputs_for_analyze.gni
@@ -0,0 +1,14 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+ # Enable this flag when running "gn analyze".
+ #
+ # This causes some gn actions to compute inputs immediately (via exec_script)
+ # where they would normally compute them only when executed (and write them to
+ # a depfile).
+ #
+ # This flag will slow down GN, but is required for analyze to work properly.
+ compute_inputs_for_analyze = false
+}
diff --git a/deps/v8/build/config/coverage/BUILD.gn b/deps/v8/build/config/coverage/BUILD.gn
new file mode 100644
index 0000000000..2604411aea
--- /dev/null
+++ b/deps/v8/build/config/coverage/BUILD.gn
@@ -0,0 +1,44 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/coverage/coverage.gni")
+
+config("default_coverage") {
+ if (use_clang_coverage) {
+ ldflags = []
+ if (!is_win) {
+ ldflags += [ "-fprofile-instr-generate" ]
+ } else {
+ # Windows directly calls link.exe instead of the compiler driver when
+ # linking. Hence, pass the runtime libraries instead of
+ # -fsanitize=address.
+ # TODO(rnk): Come up with a more uniform approach for linking against
+ # compiler-rt for sanitizers and coverage.
+ if (target_cpu == "x64") {
+ ldflags += [ "clang_rt.profile-x86_64.lib" ]
+ } else if (target_cpu == "x86") {
+ ldflags += [ "clang_rt.profile-i386.lib" ]
+ } else {
+ assert(false &&
+ "use_clang_coverage=true not supported yet for this target_cpu")
+ }
+ }
+
+ cflags = [
+ "-fprofile-instr-generate",
+ "-fcoverage-mapping",
+
+ # Following experimental flags removes unused header functions from the
+ # coverage mapping data embedded in the test binaries, and the reduction
+ # of binary size enables building Chrome's large unit test targets on
+ # MacOS. Please refer to crbug.com/796290 for more details.
+ "-mllvm",
+ "-limited-coverage-experimental=true",
+ ]
+
+ if (!is_win) {
+ cflags += [ "-fno-use-cxa-atexit" ]
+ }
+ }
+}
diff --git a/deps/v8/build/config/coverage/OWNERS b/deps/v8/build/config/coverage/OWNERS
new file mode 100644
index 0000000000..70d27b62af
--- /dev/null
+++ b/deps/v8/build/config/coverage/OWNERS
@@ -0,0 +1,3 @@
+inferno@chromium.org
+mmoroz@chromium.org
+ochang@chromium.org
diff --git a/deps/v8/build/config/coverage/coverage.gni b/deps/v8/build/config/coverage/coverage.gni
new file mode 100644
index 0000000000..12ab03fcd0
--- /dev/null
+++ b/deps/v8/build/config/coverage/coverage.gni
@@ -0,0 +1,29 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/toolchain.gni")
+
+# There are two ways to enable code coverage instrumentation:
+# 1. When |use_clang_coverage| is true and |coverage_instrumentation_input_file|
+# is empty, all source files are instrumented.
+# 2. When |use_clang_coverage| is true and |coverage_instrumentation_input_file|
+# is NOT empty and points to a text file on the file system, ONLY source
+# files specified in the input file are instrumented.
+declare_args() {
+ # Enable Clang's Source-based Code Coverage.
+ use_clang_coverage = false
+
+ # The path to the coverage instrumentation input file should be a source root
+ # absolute path (e.g. //out/Release/coverage_instrumentation_input.txt), and
+ # the file consists of multiple lines where each line represents a path to a
+ # source file, and the paths must be relative to the root build directory.
+ # e.g. ../../base/task/post_task.cc for build directory 'out/Release'.
+ coverage_instrumentation_input_file = ""
+}
+
+assert(!use_clang_coverage || is_clang,
+ "Clang Source-based Code Coverage requires clang.")
+
+assert(coverage_instrumentation_input_file == "" || use_clang_coverage,
+ "Instrument a subset of source files requires enabling clang coverage.")
diff --git a/deps/v8/build/config/crypto.gni b/deps/v8/build/config/crypto.gni
new file mode 100644
index 0000000000..a3d52deafd
--- /dev/null
+++ b/deps/v8/build/config/crypto.gni
@@ -0,0 +1,15 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file declares build flags for the SSL library configuration.
+#
+# TODO(brettw) this should probably be moved to src/crypto or somewhere, and
+# the global build dependency on it should be removed.
+#
+# PLEASE TRY TO AVOID ADDING FLAGS TO THIS FILE in cases where grit isn't
+# required. See the declare_args block of BUILDCONFIG.gn for advice on how
+# to set up feature flags.
+
+# True if NSS is used for certificate handling.
+use_nss_certs = is_linux
diff --git a/deps/v8/build/config/dcheck_always_on.gni b/deps/v8/build/config/dcheck_always_on.gni
new file mode 100644
index 0000000000..e58cfced39
--- /dev/null
+++ b/deps/v8/build/config/dcheck_always_on.gni
@@ -0,0 +1,15 @@
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+ # Enables DCHECKs to be built-in, but to default to being non-fatal/log-only.
+ # DCHECKS can then be set as fatal/non-fatal via the DCheckIsFatal feature.
+ # See crbug.com/596231 for details on how this is used.
+ dcheck_is_configurable = false
+}
+
+declare_args() {
+ # Set to true to enable dcheck in Release builds.
+ dcheck_always_on = dcheck_is_configurable
+}
diff --git a/deps/v8/build/config/features.gni b/deps/v8/build/config/features.gni
new file mode 100644
index 0000000000..fdf05dfdd9
--- /dev/null
+++ b/deps/v8/build/config/features.gni
@@ -0,0 +1,59 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# =============================================
+# PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
+# =============================================
+#
+# These flags are effectively global. Your feature flag should go near the
+# code it controls. Most of these items are here now because they control
+# legacy global #defines passed to the compiler (now replaced with generated
+# buildflag headers -- see //build/buildflag_header.gni).
+#
+# There is more advice on where to put build flags in the "Build flag" section
+# of //build/config/BUILDCONFIG.gn.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+if (is_android) {
+ import("//build/config/android/config.gni")
+}
+
+declare_args() {
+ # Enables proprietary codecs and demuxers; e.g. H264, AAC, MP3, and MP4.
+ # We always build Google Chrome and Chromecast with proprietary codecs.
+ #
+ # Note: this flag is used by WebRTC which is DEPSed into Chrome. Moving it
+ # out of //build will require using the build_overrides directory.
+ proprietary_codecs = is_chrome_branded || is_chromecast
+
+ # Variable safe_browsing is used to control the build time configuration for
+ # safe browsing feature. Safe browsing can be compiled in 3 different levels:
+ # 0 disables it, 1 enables it fully, 2 enables mobile protection via an
+ # external API, and 3 enables mobile protection via internal API.
+ if (is_ios || is_chromecast) {
+ safe_browsing_mode = 0
+ } else if (is_android) {
+ if (notouch_build) {
+ safe_browsing_mode = 3
+ } else {
+ safe_browsing_mode = 2
+ }
+ } else {
+ safe_browsing_mode = 1
+ }
+
+ # libudev usage. This currently only affects the content layer.
+ use_udev = is_linux && !is_chromecast
+
+ use_dbus = is_linux && !is_chromecast
+
+ use_gio = is_linux && !is_chromeos && !is_chromecast
+}
+#
+# =============================================
+# PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
+# =============================================
+#
+# See comment at the top.
diff --git a/deps/v8/build/config/freetype/BUILD.gn b/deps/v8/build/config/freetype/BUILD.gn
new file mode 100644
index 0000000000..0ba9027167
--- /dev/null
+++ b/deps/v8/build/config/freetype/BUILD.gn
@@ -0,0 +1,16 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/features.gni")
+import("//build/config/freetype/freetype.gni")
+
+group("freetype") {
+ if (use_system_freetype) {
+ public_configs = [ "//build/linux:freetype_from_pkgconfig" ]
+ } else {
+ public_deps = [
+ "//third_party:freetype_harfbuzz",
+ ]
+ }
+}
diff --git a/deps/v8/build/config/freetype/OWNERS b/deps/v8/build/config/freetype/OWNERS
new file mode 100644
index 0000000000..3277f87312
--- /dev/null
+++ b/deps/v8/build/config/freetype/OWNERS
@@ -0,0 +1,2 @@
+bungeman@chromium.org
+drott@chromium.org
diff --git a/deps/v8/build/config/freetype/freetype.gni b/deps/v8/build/config/freetype/freetype.gni
new file mode 100644
index 0000000000..b4eced2d65
--- /dev/null
+++ b/deps/v8/build/config/freetype/freetype.gni
@@ -0,0 +1,14 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+ # Blink needs a recent and properly build-configured FreeType version to
+ # support OpenType variations, color emoji and avoid security bugs. By default
+ # we ship and link such a version as part of Chrome. For distributions that
+ # prefer to keep linking to the version the system, FreeType must be newer
+ # than version 2.7.1 and have color bitmap support compiled in. WARNING:
+ # System FreeType configurations other than as described WILL INTRODUCE TEXT
+ # RENDERING AND SECURITY REGRESSIONS.
+ use_system_freetype = false
+}
diff --git a/deps/v8/build/config/fuchsia/BUILD.gn b/deps/v8/build/config/fuchsia/BUILD.gn
new file mode 100644
index 0000000000..59da62e766
--- /dev/null
+++ b/deps/v8/build/config/fuchsia/BUILD.gn
@@ -0,0 +1,128 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/fuchsia/config.gni")
+import("//build/config/fuchsia/rules.gni")
+import("//build/config/sysroot.gni")
+
+assert(is_fuchsia)
+assert(!is_posix)
+
+config("compiler") {
+ sdk_version_file = "${fuchsia_sdk}/.hash"
+ sdk_version = read_file(sdk_version_file, "trim string")
+ defines = [
+ # To force full builds after SDK updates in case of ABI changes.
+ "FUCHSIA_SDK_VERSION=$sdk_version",
+ ]
+ cflags = []
+ ldflags = []
+ if (current_cpu == "arm64") {
+ cflags += [ "--target=aarch64-fuchsia" ]
+ ldflags += [ "--target=aarch64-fuchsia" ]
+ } else if (current_cpu == "x64") {
+ cflags += [ "--target=x86_64-fuchsia" ]
+ ldflags += [ "--target=x86_64-fuchsia" ]
+ } else {
+ assert(false, "Unsupported architecture")
+ }
+ asmflags = cflags
+
+ ldflags += [
+ # The stack defaults to 256k on Fuchsia (see
+ # https://fuchsia.googlesource.com/zircon/+/master/system/private/zircon/stack.h#9),
+ # but on other platforms it's much higher, so a variety of code assumes more
+ # will be available. Raise to 8M which matches e.g. macOS.
+ "-Wl,-z,stack-size=0x800000",
+
+ # We always want fdio or else e.g. stdio wouldn't be initialized if fdio
+ # happens to not be directly referenced. The common POSIX-y compiler setup
+ # uses -Wl,--as-needed which drops it if it's simply "-lfdio" from a libs
+ # setting. Disable --as-needed, add fdio, and then set back to --as-needed.
+ # https://crbug.com/731217.
+ "-Wl,--no-as-needed",
+ "-lfdio",
+ "-Wl,--as-needed",
+ ]
+
+ # Add SDK lib dir for -lfdio above.
+ lib_dirs = [ "${fuchsia_sdk}/arch/${current_cpu}/lib" ]
+
+ # TODO(crbug.com/821951): Clang enables SafeStack by default when targeting
+ # Fuchsia, but it breaks some tests, notably in V8.
+ cflags += [ "-fno-sanitize=safe-stack" ]
+
+ libs = [ "zircon" ]
+}
+
+# Writes an extended version of fvm.blk to fvm.extended.blk.
+blobstore_extended_path = "$root_out_dir/fvm.extended.blk"
+action("blobstore_extended_fvm") {
+ # The file is grown by 1GB, which should be large enough to hold packaged
+ # binaries and assets. The value should be increased if the size becomes a
+ # limitation in the future.
+ _extend_size = "1073741824" # 1GB
+
+ _target_dir = "${fuchsia_sdk}/target/${current_cpu}"
+
+ script = "//build/config/fuchsia/extend_fvm.py"
+
+ inputs = [
+ "${fuchsia_sdk}/tools/fvm",
+ "${_target_dir}/fvm.blk",
+ ]
+ outputs = [
+ blobstore_extended_path,
+ ]
+
+ args = [
+ rebase_path("${fuchsia_sdk}/tools/fvm", root_build_dir),
+ rebase_path("${_target_dir}/fvm.blk", root_build_dir),
+ rebase_path(blobstore_extended_path, root_build_dir),
+ _extend_size,
+ ]
+}
+
+# _________________________________________
+# / Create a compressed copy-on-write (COW) \
+# \ image based on fvm.blk. /
+# -----------------------------------------
+# \ ^__^
+# \ (oo)\_______
+# (__)\ )\/\
+# ||----w |
+# || ||
+action("blobstore_extended_qcow2") {
+ script = "//build/gn_run_binary.py"
+
+ deps = [
+ ":blobstore_extended_fvm",
+ ]
+ inputs = [
+ blobstore_extended_path,
+ ]
+ outputs = [
+ blobstore_qcow_path,
+ ]
+ data = [
+ blobstore_qcow_path,
+ ]
+
+ args = [
+ rebase_path("${qemu_root}/bin/qemu-img", root_build_dir),
+ "convert",
+ "-f",
+ "raw",
+ "-O",
+ "qcow2",
+ "-c",
+ rebase_path(blobstore_extended_path, root_build_dir),
+ rebase_path(blobstore_qcow_path, root_build_dir),
+ ]
+}
+
+# Settings for executables.
+config("executable_config") {
+ ldflags = [ "-pie" ]
+}
diff --git a/deps/v8/build/config/fuchsia/OWNERS b/deps/v8/build/config/fuchsia/OWNERS
new file mode 100644
index 0000000000..e7034eabb1
--- /dev/null
+++ b/deps/v8/build/config/fuchsia/OWNERS
@@ -0,0 +1 @@
+file://build/fuchsia/OWNERS
diff --git a/deps/v8/build/config/fuchsia/build_symbol_archive.py b/deps/v8/build/config/fuchsia/build_symbol_archive.py
new file mode 100755
index 0000000000..264c6ddfca
--- /dev/null
+++ b/deps/v8/build/config/fuchsia/build_symbol_archive.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a compressed archive of binary symbols derived from the unstripped
+executables and libraries cataloged by "ids.txt"."""
+
+import argparse
+import os
+import subprocess
+import sys
+import tarfile
+import tempfile
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('ids_txt', type=str, nargs=1,
+ help='Path to ids.txt files.')
+ parser.add_argument('-o', '--output_tarball', nargs=1, type=str,
+ help='Path which the tarball will be written to.')
+ parser.add_argument('--eu-strip', required=True, type=str,
+ help='Path to the eu-strip tool.')
+ parser.add_argument('--fuchsia-build-id-dir', type=str, required=True,
+ help='Directory containing symbols for SDK prebuilts.')
+ args = parser.parse_args(args)
+
+ stripped_tempfile = tempfile.NamedTemporaryFile()
+ ids_txt = args.ids_txt[0]
+ build_ids_archive = tarfile.open(args.output_tarball[0], 'w:bz2')
+ for line in open(ids_txt, 'r'):
+ # debug_tempfile: The path which debug symbols will be written to.
+ # stripped_tempfile: The path which the stripped executable will be written
+ # to. This file is ignored and immediately deleted.
+ with tempfile.NamedTemporaryFile() as debug_tempfile, \
+ tempfile.NamedTemporaryFile() as stripped_tempfile:
+ build_id, binary_path = line.strip().split(' ')
+
+ # Look for prebuilt symbols in the SDK first.
+ symbol_source_path = os.path.join(args.fuchsia_build_id_dir,
+ build_id[:2],
+ build_id[2:] + '.debug')
+ if not os.path.exists(symbol_source_path):
+ binary_abspath = os.path.abspath(
+ os.path.join(os.path.dirname(ids_txt), binary_path))
+
+ # Extract debugging symbols from the binary into their own file.
+ # The stripped executable binary is written to |debug_tempfile| and
+ # deleted. Writing to /dev/null would be preferable, but eu-strip
+ # disallows writing output to /dev/null.
+ subprocess.check_call([args.eu_strip, '-g', binary_abspath,
+ '-f', debug_tempfile.name,
+ '-o', stripped_tempfile.name])
+
+ if os.path.getsize(debug_tempfile.name) == 0:
+ # This is a prebuilt which wasn't accompanied by SDK symbols.
+ continue
+
+ symbol_source_path = debug_tempfile.name
+
+
+ # Archive the debugging symbols, placing them in a hierarchy keyed to the
+ # GNU build ID. The symbols reside in directories whose names are the
+ # first two characters of the build ID, with the symbol files themselves
+ # named after the remaining characters of the build ID. So, a symbol file
+ # with the build ID "deadbeef" would be located at the path 'de/adbeef'.
+ build_ids_archive.add(symbol_source_path,
+ '%s/%s' % (build_id[:2], build_id[2:]))
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/config/fuchsia/config.gni b/deps/v8/build/config/fuchsia/config.gni
new file mode 100644
index 0000000000..cdf684e259
--- /dev/null
+++ b/deps/v8/build/config/fuchsia/config.gni
@@ -0,0 +1,22 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(current_os == "fuchsia")
+
+declare_args() {
+ # Path to Fuchsia SDK.
+ fuchsia_sdk = "//third_party/fuchsia-sdk/sdk"
+}
+
+# Compute the arch-specific path to packages' dynamic library dependencies.
+if (current_cpu == "arm64") {
+ dist_libroot = fuchsia_sdk + "/arch/arm64/dist"
+} else if (current_cpu == "x64") {
+ dist_libroot = fuchsia_sdk + "/arch/x64/dist"
+} else {
+ assert(false, "No libraries available for architecture: $current_cpu")
+}
+
+# Compute the qemu path.
+qemu_root = "//third_party/qemu-${host_os}-${host_cpu}"
diff --git a/deps/v8/build/config/fuchsia/extend_fvm.py b/deps/v8/build/config/fuchsia/extend_fvm.py
new file mode 100644
index 0000000000..44e5ee30e1
--- /dev/null
+++ b/deps/v8/build/config/fuchsia/extend_fvm.py
@@ -0,0 +1,26 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies a FVM file and extends it by a specified amount.
+
+Arg #1: path to 'fvm'.
+ #2: the path to the source fvm.blk.
+ #3: the path that the extended FVM file will be written to.
+ #4: the additional number of bytes to grow fvm.blk by."""
+
+import os
+import shutil
+import subprocess
+import sys
+
+def ExtendFVM(fvm_tool_path, src_path, dest_path, delta):
+ old_size = os.path.getsize(src_path)
+ new_size = old_size + int(delta)
+ shutil.copyfile(src_path, dest_path)
+ subprocess.check_call([fvm_tool_path, dest_path, 'extend', '--length',
+ str(new_size)])
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(ExtendFVM(*sys.argv[1:]))
diff --git a/deps/v8/build/config/fuchsia/fidl_library.gni b/deps/v8/build/config/fuchsia/fidl_library.gni
new file mode 100644
index 0000000000..acb1132a6f
--- /dev/null
+++ b/deps/v8/build/config/fuchsia/fidl_library.gni
@@ -0,0 +1,262 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/fuchsia/config.gni")
+
+assert(is_fuchsia)
+
+# Template for FIDL libraries. Following parameters can be passed when
+# instantiating this template:
+# sources - List of .fidl files.
+# library_name - (optional) Name of the library. target_name is used if name
+# is not specified explicitly.
+# namespace - (optional) Namespace for the library.
+# deps - (optional) List of other fidl_library() targets that this
+# FIDL library depends on.
+# languages - Generate bindings for the given languages, defaults to
+# [ "cpp" ]. "js" also supported.
+#
+# $namespace.$library_name must match the the library name specified in the FIDL
+# files.
+
+template("fidl_library") {
+ forward_variables_from(invoker,
+ [
+ "languages",
+ "namespace",
+ ])
+
+ _library_basename = target_name
+ if (defined(invoker.library_name)) {
+ _library_basename = invoker.library_name
+ }
+
+ if (defined(namespace)) {
+ _library_name = "${namespace}.${_library_basename}"
+ _namespace_path = string_replace(namespace, ".", "/")
+ _library_path = "${_namespace_path}/${_library_basename}"
+ } else {
+ _library_name = _library_basename
+ _library_path = _library_basename
+ }
+
+ if (!defined(invoker.languages)) {
+ languages = [ "cpp" ]
+ }
+
+ _define_cpp_action = false
+ _define_js_action = false
+
+ foreach(language, languages) {
+ if (language == "cpp") {
+ _define_cpp_action = true
+ } else if (language == "js") {
+ _define_js_action = true
+ }
+ }
+
+ _response_file = "$target_gen_dir/$target_name.rsp"
+ _json_representation = "$target_gen_dir/${_library_name}.fidl.json"
+ _output_gen_dir = "$target_gen_dir/fidl"
+ _output_base = "$_output_gen_dir/${_library_path}/cpp/fidl"
+ _tables_file = "$_output_gen_dir/${_library_name}.fidl-tables.cc"
+
+ action("${target_name}_response_file") {
+ script = "//third_party/fuchsia-sdk/gen_fidl_response_file.py"
+
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "public_deps",
+ "sources",
+ "testonly",
+ ])
+
+ _libraries_file = "$target_gen_dir/${invoker.target_name}.fidl_libraries"
+
+ outputs = [
+ _response_file,
+ _libraries_file,
+ ]
+
+ args = [
+ "--out-response-file",
+ rebase_path(_response_file, root_build_dir),
+ "--out-libraries",
+ rebase_path(_libraries_file, root_build_dir),
+ "--tables",
+ rebase_path(_tables_file, root_build_dir),
+ "--json",
+ rebase_path(_json_representation, root_build_dir),
+ "--name",
+ _library_name,
+ "--sources",
+ ] + rebase_path(sources, root_build_dir)
+
+ if (defined(invoker.deps) || defined(invoker.public_deps)) {
+ merged_deps = []
+
+ if (defined(invoker.deps)) {
+ merged_deps += invoker.deps
+ }
+
+ if (defined(invoker.public_deps)) {
+ merged_deps += invoker.public_deps
+ }
+
+ dep_libraries = []
+ deps = []
+
+ foreach(dep, merged_deps) {
+ gen_dir = get_label_info(dep, "target_gen_dir")
+ name = get_label_info(dep, "name")
+ dep_libraries += [ "$gen_dir/$name.fidl_libraries" ]
+ deps += [ "${dep}_response_file" ]
+ }
+
+ inputs = dep_libraries
+
+ args += [ "--dep-libraries" ] + rebase_path(dep_libraries, root_build_dir)
+ }
+ }
+
+ action("${target_name}_compile") {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ visibility = [ ":*" ]
+
+ deps = [
+ ":${invoker.target_name}_response_file",
+ ]
+
+ script = "//build/gn_run_binary.py"
+
+ inputs = [
+ # Depend on the SDK hash, to ensure rebuild if the SDK tools change.
+ "${fuchsia_sdk}/.hash",
+ _response_file,
+ ]
+
+ outputs = [
+ _json_representation,
+ _tables_file,
+ ]
+
+ rebased_response_file = rebase_path(_response_file, root_build_dir)
+ args = [
+ rebase_path("//third_party/fuchsia-sdk/sdk/tools/fidlc", root_build_dir),
+ "@$rebased_response_file",
+ ]
+ }
+
+ if (_define_cpp_action) {
+ action("${target_name}_cpp_gen") {
+ visibility = [ ":${invoker.target_name}" ]
+ forward_variables_from(invoker, [ "testonly" ])
+
+ deps = [
+ ":${invoker.target_name}_compile",
+ ]
+
+ inputs = [
+ # Depend on the SDK hash, to ensure rebuild if the SDK tools change.
+ "${fuchsia_sdk}/.hash",
+ _json_representation,
+ ]
+
+ outputs = [
+ "${_output_base}.h",
+ "${_output_base}.cc",
+ ]
+
+ script = "//build/gn_run_binary.py"
+ args = [
+ rebase_path("//third_party/fuchsia-sdk/sdk/tools/fidlgen",
+ root_build_dir),
+ "-generators",
+ "cpp",
+ "-json",
+ rebase_path(_json_representation, root_build_dir),
+ "-include-base",
+ rebase_path(_output_gen_dir, root_build_dir),
+ "-output-base",
+ rebase_path("${_output_base}", root_build_dir),
+ ]
+ }
+ }
+
+ if (_define_js_action) {
+ _output_js_path = "$_output_gen_dir/${_library_path}/js/fidl.js"
+ action("${target_name}_js_gen") {
+ visibility = [ ":${invoker.target_name}" ]
+ forward_variables_from(invoker, [ "testonly" ])
+
+ deps = [
+ ":${invoker.target_name}_compile",
+ ]
+
+ inputs = [
+ # Depend on the SDK hash, to ensure rebuild if the SDK tools change.
+ "${fuchsia_sdk}/.hash",
+ _json_representation,
+ "//build/fuchsia/fidlgen_js/fidl.py", # The schema helper file.
+ ]
+
+ outputs = [
+ _output_js_path,
+ ]
+
+ script = "//build/fuchsia/fidlgen_js/gen.py"
+
+ args = [
+ rebase_path(_json_representation, root_build_dir),
+ "--output",
+ rebase_path("${_output_js_path}", root_build_dir),
+ ]
+
+ data = []
+ foreach(o, outputs) {
+ data += [ o ]
+ }
+ }
+ }
+
+ config("${target_name}_config") {
+ visibility = [ ":${invoker.target_name}" ]
+ include_dirs = [ _output_gen_dir ]
+ }
+
+ source_set("${target_name}") {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "public_deps",
+ "testonly",
+ "visibility",
+ ])
+
+ sources = [
+ "${_output_base}.cc",
+ "${_output_base}.h",
+ _tables_file,
+ ]
+
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += [ ":${invoker.target_name}_compile" ]
+
+ foreach(language, languages) {
+ deps += [ ":${invoker.target_name}_${language}_gen" ]
+ }
+
+ if (!defined(public_deps)) {
+ public_deps = []
+ }
+ public_deps += [ "//third_party/fuchsia-sdk/sdk:fidl" ]
+ public_deps += [ "//third_party/fuchsia-sdk/sdk:fidl_cpp" ]
+
+ public_configs = [ ":${invoker.target_name}_config" ]
+ }
+}
diff --git a/deps/v8/build/config/fuchsia/package.gni b/deps/v8/build/config/fuchsia/package.gni
new file mode 100644
index 0000000000..55b43c05a5
--- /dev/null
+++ b/deps/v8/build/config/fuchsia/package.gni
@@ -0,0 +1,239 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/fuchsia/config.gni")
+import("//build/config/sysroot.gni")
+
+# Creates a Fuchsia .far package file.
+#
+# Parameters are:
+# package_name_override: Specifies the name of the package to generate,
+# if different than |target_name|.
+# binary: The executable target which should be launched.
+# sandbox_policy: A path to the sandbox_policy that will be used.
+# "testonly" targets default to using
+# //build/config/fuchsia/testing_sandbox_policy by default.
+# Non-test targets must explicitly specify a |sandbox_policy|.
+# deps: Additional targets to build and include in the package (optional).
+template("fuchsia_package") {
+ pkg = {
+ forward_variables_from(invoker, "*")
+
+ if (defined(package_name_override)) {
+ package_name = package_name_override
+ } else {
+ package_name = invoker.target_name
+ }
+
+ if (!defined(sandbox_policy)) {
+ assert(testonly == true)
+ sandbox_policy = "//build/config/fuchsia/testing_sandbox_policy"
+ }
+ }
+ assert(defined(pkg.binary))
+
+ _pm_tool_path = "${fuchsia_sdk}/tools/pm"
+
+ _pkg_out_dir = "${target_gen_dir}/${pkg.package_name}"
+ _runtime_deps_file = "$_pkg_out_dir/${pkg.package_name}.runtime_deps"
+ _archive_manifest = "$_pkg_out_dir/${pkg.package_name}.archive_manifest"
+ _build_ids_file = "$_pkg_out_dir/ids.txt"
+ _component_manifest = "$_pkg_out_dir/${pkg.package_name}.cmx"
+ _key_file = "$_pkg_out_dir/signing-key"
+ _meta_far_file = "$_pkg_out_dir/meta.far"
+ _combined_far_file = "$_pkg_out_dir/${pkg.package_name}-0.far"
+ _final_far_file = "$_pkg_out_dir/${pkg.package_name}.far"
+ _package_info_path = "$_pkg_out_dir/package"
+
+ _write_manifest_target = "${pkg.package_name}__write_manifest"
+ _generate_key_target = "${pkg.package_name}__genkey"
+ _package_target = "${pkg.package_name}__pkg"
+ _bundle_target = "${pkg.package_name}__bundle"
+
+ # Generates a manifest file based on the GN runtime deps
+ # suitable for "pm" tool consumption.
+ action(_write_manifest_target) {
+ _depfile = "${target_gen_dir}/${target_name}_stamp.d"
+
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "testonly",
+ ])
+
+ script = "//build/config/fuchsia/prepare_package_inputs.py"
+
+ inputs = [
+ _runtime_deps_file,
+ pkg.sandbox_policy,
+ ]
+
+ outputs = [
+ _archive_manifest,
+ _build_ids_file,
+ _component_manifest,
+ ]
+
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += [ pkg.binary ]
+ data_deps = deps
+
+ # Use a depfile to trigger package rebuilds if any of the files (static
+ # assets, shared libraries, etc.) included by the package have changed.
+ depfile = _depfile
+
+ args = [
+ "--root-dir",
+ rebase_path("//", root_build_dir),
+ "--out-dir",
+ rebase_path(root_out_dir, root_build_dir),
+ "--app-name",
+ pkg.package_name,
+ "--app-filename",
+ get_label_info(pkg.binary, "name"),
+ "--sandbox-policy-path",
+ rebase_path(pkg.sandbox_policy, root_build_dir),
+ "--runtime-deps-file",
+ rebase_path(_runtime_deps_file, root_build_dir),
+ "--depfile-path",
+ rebase_path(_depfile, root_build_dir),
+ "--manifest-path",
+ rebase_path(_archive_manifest, root_build_dir),
+ "--build-ids-file",
+ rebase_path(_build_ids_file, root_build_dir),
+ ]
+
+ if (defined(pkg.excluded_files)) {
+ foreach(filename, pkg.excluded_files) {
+ args += [
+ "--exclude-file",
+ filename,
+ ]
+ }
+ }
+
+ write_runtime_deps = _runtime_deps_file
+ }
+
+ # Generates a signing key to use for building the package.
+ action(_generate_key_target) {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ script = "//build/gn_run_binary.py"
+
+ inputs = [
+ # Depend on the SDK hash, to ensure rebuild if the SDK tools change.
+ "${fuchsia_sdk}/.hash",
+ ]
+
+ outputs = [
+ _key_file,
+ ]
+
+ args = [
+ rebase_path(_pm_tool_path, root_build_dir),
+ "-k",
+ rebase_path(_key_file, root_build_dir),
+ "genkey",
+ ]
+ }
+
+ # Creates a signed Fuchsia metadata package.
+ action(_package_target) {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ script = "//build/gn_run_binary.py"
+
+ deps = [
+ ":$_generate_key_target",
+ ":$_write_manifest_target",
+ ]
+
+ inputs = [
+ # Depend on the SDK hash, to ensure rebuild if the SDK tools change.
+ "${fuchsia_sdk}/.hash",
+ _key_file,
+ ]
+
+ outputs = [
+ _meta_far_file,
+ ]
+
+ args = [
+ rebase_path(_pm_tool_path, root_build_dir),
+ "-o",
+ rebase_path(_pkg_out_dir, root_build_dir),
+ "-k",
+ rebase_path(_key_file, root_build_dir),
+ "-m",
+ rebase_path(_archive_manifest, root_build_dir),
+ "build",
+ ]
+ }
+
+ # Creates a package containing the metadata archive and blob data.
+ action(_bundle_target) {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ script = "//build/gn_run_binary.py"
+
+ deps = [
+ ":$_package_target",
+ ":$_write_manifest_target",
+ ]
+
+ inputs = [
+ # Depend on the SDK hash, to ensure rebuild if the SDK tools change.
+ "${fuchsia_sdk}/.hash",
+ _meta_far_file,
+ _archive_manifest,
+ ]
+
+ outputs = [
+ _combined_far_file,
+ ]
+
+ args = [
+ rebase_path(_pm_tool_path, root_build_dir),
+ "-o",
+ rebase_path(_pkg_out_dir, root_build_dir),
+ "-m",
+ rebase_path(_archive_manifest, root_build_dir),
+ "archive",
+ ]
+ }
+
+ # Copies the archive to a well-known path.
+ # TODO(kmarshall): Use a 'pm' output flag to write directly to the desired
+ # file path instead.
+ copy(target_name) {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ # Allows dependent targets to make use of "ids.txt".
+ public_deps = [
+ ":$_write_manifest_target",
+ ]
+
+ deps = [
+ ":$_bundle_target",
+ ]
+
+ data = [
+ _final_far_file,
+
+ # Files specified here so that they can be read by isolated testbots.
+ _package_info_path,
+ _build_ids_file,
+ ]
+
+ sources = [
+ _combined_far_file,
+ ]
+ outputs = [
+ _final_far_file,
+ ]
+ }
+}
diff --git a/deps/v8/build/config/fuchsia/prepare_package_inputs.py b/deps/v8/build/config/fuchsia/prepare_package_inputs.py
new file mode 100644
index 0000000000..078c3cb266
--- /dev/null
+++ b/deps/v8/build/config/fuchsia/prepare_package_inputs.py
@@ -0,0 +1,220 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a archive manifest used for Fuchsia package generation."""
+
+import argparse
+import json
+import os
+import re
+import subprocess
+import sys
+import tempfile
+
+
+def MakePackagePath(file_path, roots):
+ """Computes a path for |file_path| that is relative to one of the directory
+ paths in |roots|.
+
+ file_path: The file path to relativize.
+ roots: A list of directory paths which may serve as a relative root
+ for |file_path|.
+
+ Examples:
+
+ >>> MakePackagePath('/foo/bar.txt', ['/foo/'])
+ 'bar.txt'
+
+ >>> MakePackagePath('/foo/dir/bar.txt', ['/foo/'])
+ 'dir/bar.txt'
+
+ >>> MakePackagePath('/foo/out/Debug/bar.exe', ['/foo/', '/foo/out/Debug/'])
+ 'bar.exe'
+ """
+
+ # Prevents greedily matching against a shallow path when a deeper, better
+ # matching path exists.
+ roots.sort(key=len, reverse=True)
+
+ for next_root in roots:
+ if not next_root.endswith(os.sep):
+ next_root += os.sep
+
+ if file_path.startswith(next_root):
+ relative_path = file_path[len(next_root):]
+
+ return relative_path
+
+ return file_path
+
+
+def _GetStrippedPath(bin_path):
+ """Finds the stripped version of the binary |bin_path| in the build
+ output directory."""
+
+ return bin_path.replace('lib.unstripped/', 'lib/').replace(
+ 'exe.unstripped/', '')
+
+
+def _IsBinary(path):
+ """Checks if the file at |path| is an ELF executable by inspecting its FourCC
+ header."""
+
+ with open(path, 'rb') as f:
+ file_tag = f.read(4)
+ return file_tag == '\x7fELF'
+
+
+def _WriteBuildIdsTxt(binary_paths, ids_txt_path):
+ """Writes an index text file that maps build IDs to the paths of unstripped
+ binaries."""
+
+ READELF_FILE_PREFIX = 'File: '
+ READELF_BUILD_ID_PREFIX = 'Build ID: '
+
+ # List of binaries whose build IDs are awaiting processing by readelf.
+ # Entries are removed as readelf's output is parsed.
+ unprocessed_binary_paths = {os.path.basename(p): p for p in binary_paths}
+
+ with open(ids_txt_path, 'w') as ids_file:
+ readelf_stdout = subprocess.check_output(
+ ['readelf', '-n'] + map(_GetStrippedPath, binary_paths))
+
+ if len(binary_paths) == 1:
+ # Readelf won't report a binary's path if only one was provided to the
+ # tool.
+ binary_shortname = os.path.basename(binary_paths[0])
+ else:
+ binary_shortname = None
+
+ for line in readelf_stdout.split('\n'):
+ line = line.strip()
+
+ if line.startswith(READELF_FILE_PREFIX):
+ binary_shortname = os.path.basename(line[len(READELF_FILE_PREFIX):])
+ assert binary_shortname in unprocessed_binary_paths
+
+ elif line.startswith(READELF_BUILD_ID_PREFIX):
+ # Paths to the unstripped executables listed in "ids.txt" are specified
+ # as relative paths to that file.
+ unstripped_rel_path = os.path.relpath(
+ os.path.abspath(unprocessed_binary_paths[binary_shortname]),
+ os.path.dirname(os.path.abspath(ids_txt_path)))
+
+ build_id = line[len(READELF_BUILD_ID_PREFIX):]
+ ids_file.write(build_id + ' ' + unstripped_rel_path + '\n')
+ del unprocessed_binary_paths[binary_shortname]
+
+ # Did readelf forget anything? Make sure that all binaries are accounted for.
+ assert not unprocessed_binary_paths
+
+
+def BuildManifest(args):
+ binaries = []
+ with open(args.manifest_path, 'w') as manifest, \
+ open(args.depfile_path, 'w') as depfile:
+ # Process the runtime deps file for file paths, recursively walking
+ # directories as needed.
+ # MakePackagePath() may relativize to either the source root or output
+ # directory.
+ # runtime_deps may contain duplicate paths, so use a set for
+ # de-duplication.
+ expanded_files = set()
+ for next_path in open(args.runtime_deps_file, 'r'):
+ next_path = next_path.strip()
+ if os.path.isdir(next_path):
+ for root, _, files in os.walk(next_path):
+ for current_file in files:
+ if current_file.startswith('.'):
+ continue
+ expanded_files.add(
+ os.path.join(root, current_file))
+ else:
+ expanded_files.add(next_path)
+
+ # Format and write out the manifest contents.
+ gen_dir = os.path.normpath(os.path.join(args.out_dir, "gen"))
+ app_found = False
+ excluded_files_set = set(args.exclude_file)
+ for current_file in expanded_files:
+ if _IsBinary(current_file):
+ binaries.append(current_file)
+ current_file = _GetStrippedPath(current_file)
+
+ in_package_path = MakePackagePath(current_file,
+ [gen_dir, args.root_dir, args.out_dir])
+ if in_package_path == args.app_filename:
+ app_found = True
+
+ if in_package_path in excluded_files_set:
+ excluded_files_set.remove(in_package_path)
+ continue
+
+ manifest.write('%s=%s\n' % (in_package_path, current_file))
+
+ if len(excluded_files_set) > 0:
+ raise Exception('Some files were excluded with --exclude-file, but '
+ 'not found in the deps list: %s' %
+ ', '.join(excluded_files_set));
+
+ if not app_found:
+ raise Exception('Could not locate executable inside runtime_deps.')
+
+ # Write meta/package manifest file.
+ with open(os.path.join(os.path.dirname(args.manifest_path), 'package'),
+ 'w') as package_json:
+ json.dump({'version': '0', 'name': args.app_name}, package_json)
+ manifest.write('meta/package=%s\n' %
+ os.path.relpath(package_json.name, args.out_dir))
+
+ # Write component manifest file.
+ cmx_file_path = os.path.join(os.path.dirname(args.manifest_path),
+ args.app_name + '.cmx')
+ with open(cmx_file_path, 'w') as component_manifest_file:
+ component_manifest = {
+ 'program': { 'binary': args.app_filename },
+ 'sandbox': json.load(open(args.sandbox_policy_path, 'r')),
+ }
+ json.dump(component_manifest, component_manifest_file)
+
+ manifest.write('meta/%s=%s\n' %
+ (os.path.basename(component_manifest_file.name),
+ os.path.relpath(cmx_file_path, args.out_dir)))
+
+ depfile.write(
+ "%s: %s" % (os.path.relpath(args.manifest_path, args.out_dir),
+ " ".join([os.path.relpath(f, args.out_dir)
+ for f in expanded_files])))
+
+ _WriteBuildIdsTxt(binaries, args.build_ids_file)
+
+ return 0
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--root-dir', required=True, help='Build root directory')
+ parser.add_argument('--out-dir', required=True, help='Build output directory')
+ parser.add_argument('--app-name', required=True, help='Package name')
+ parser.add_argument('--app-filename', required=True,
+ help='Path to the main application binary relative to the output dir.')
+ parser.add_argument('--sandbox-policy-path', required=True,
+ help='Path to the sandbox policy file relative to the output dir.')
+ parser.add_argument('--runtime-deps-file', required=True,
+ help='File with the list of runtime dependencies.')
+ parser.add_argument('--depfile-path', required=True,
+ help='Path to write GN deps file.')
+ parser.add_argument('--exclude-file', action='append', default=[],
+ help='Package-relative file path to exclude from the package.')
+ parser.add_argument('--manifest-path', required=True,
+ help='Manifest output path.')
+ parser.add_argument('--build-ids-file', required=True,
+ help='Debug symbol index path.')
+
+ args = parser.parse_args()
+
+ return BuildManifest(args)
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/config/fuchsia/rules.gni b/deps/v8/build/config/fuchsia/rules.gni
new file mode 100644
index 0000000000..11cb4f10ad
--- /dev/null
+++ b/deps/v8/build/config/fuchsia/rules.gni
@@ -0,0 +1,157 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_fuchsia)
+
+import("//build/config/chromecast_build.gni")
+import("//build/config/fuchsia/config.gni")
+import("//build/config/fuchsia/package.gni")
+import("//build/config/sysroot.gni")
+import("//build/util/generate_wrapper.gni")
+
+blobstore_qcow_path = "$root_out_dir/fvm.blk.qcow2"
+
+# Generates a script which deploys and executes a package on a device.
+#
+# Parameters:
+# package: The package() target which will be run.
+# package_name_override: Specifies the name of the generated package, if its
+# name is different than the |package| target name. This value must match
+# package_name_override in the |package| target.
+# package_deps: An array of [package, package_name_override] array pairs
+# which specify additional dependency packages to be installed
+# prior to execution.
+# runner_script: The runner script implementation to use, relative to
+# "build/fuchsia". Defaults to "exe_runner.py".
+# install_only: If true, executing the script will only install the package
+# on the device, but not run it.
+template("fuchsia_package_runner") {
+ forward_variables_from(invoker, [ "runner_script" ])
+
+ if (defined(invoker.package_name_override)) {
+ _pkg_shortname = invoker.package_name_override
+ } else {
+ _pkg_shortname = get_label_info(invoker.package, "name")
+ }
+
+ _pkg_dir = "$root_out_dir/gen/" + get_label_info(invoker.package, "dir") +
+ "/" + _pkg_shortname
+ _manifest_path = "$_pkg_dir/${_pkg_shortname}.archive_manifest"
+ _package_path = "$_pkg_dir/${_pkg_shortname}.far"
+
+ if (!defined(runner_script)) {
+ runner_script = "//build/fuchsia/exe_runner.py"
+ }
+
+ generated_run_pkg_script_path = "$root_build_dir/bin/run_${_pkg_shortname}"
+ generated_install_pkg_script_path =
+ "$root_build_dir/bin/install_$_pkg_shortname"
+
+ _generate_runner_target = "${target_name}__generate_runner"
+ _generate_installer_target = "${target_name}__generate_installer"
+ _generate_template = "${target_name}__generate_template"
+
+ # Generates a script to install and optionally run a package.
+ #
+ # Parameters:
+ # |install_only|: If true, builds a script that only installs a package.
+ # |script_path|: The path of the script to generate.
+ template(_generate_template) {
+ generate_wrapper(target_name) {
+ forward_variables_from(invoker,
+ [
+ "install_only",
+ "script_path",
+ "target",
+ "testonly",
+ ])
+
+ executable = runner_script
+ wrapper_script = script_path
+
+ deps = [
+ "//build/config/fuchsia:blobstore_extended_qcow2",
+ invoker.package,
+ ]
+
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+
+ data = [
+ _manifest_path,
+ "//build/fuchsia/",
+ "//build/util/lib/",
+ "//third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer",
+ "${qemu_root}/",
+ "${fuchsia_sdk}/",
+ ]
+
+ data_deps = [
+ invoker.package,
+ ]
+
+ executable_args = []
+
+ if (defined(invoker.package_deps)) {
+ foreach(cur_package, invoker.package_deps) {
+ deps += [ cur_package[0] ]
+ dep_package_path =
+ get_label_info(cur_package[0], "target_gen_dir") + "/" +
+ cur_package[1] + "/" + cur_package[1] + ".far"
+ _rebased_dep_package_path =
+ rebase_path(dep_package_path, root_build_dir)
+ executable_args += [
+ "--package-dep",
+ "@WrappedPath(${_rebased_dep_package_path})",
+ ]
+ }
+ }
+
+ _rebased_package_path = rebase_path(_package_path, root_build_dir)
+ executable_args += [
+ "--output-directory",
+ "@WrappedPath(.)",
+ "--target-cpu",
+ target_cpu,
+ "--package",
+ "@WrappedPath(${_rebased_package_path})",
+ "--package-name",
+ _pkg_shortname,
+ ]
+
+ if (defined(invoker.use_test_server) && invoker.use_test_server) {
+ executable_args += [ "--enable-test-server" ]
+ }
+
+ if (defined(install_only) && install_only) {
+ executable_args += [ "--install-only" ]
+ }
+ }
+ }
+
+ target(_generate_template, _generate_runner_target) {
+ forward_variables_from(invoker, "*")
+ script_path = generated_run_pkg_script_path
+ }
+
+ target(_generate_template, _generate_installer_target) {
+ forward_variables_from(invoker, "*")
+ script_path = generated_install_pkg_script_path
+ install_only = true
+ }
+
+ # Build the installer script, and the runner for non-|install_only| targets.
+ group(target_name) {
+ forward_variables_from(invoker, [ "testonly" ])
+ deps = [
+ ":${_generate_installer_target}",
+ ]
+
+ # Generate a runner script if the target is not install-only.
+ if (!defined(invoker.install_only)) {
+ deps += [ ":${_generate_runner_target}" ]
+ }
+ }
+}
diff --git a/deps/v8/build/config/fuchsia/symbol_archive.gni b/deps/v8/build/config/fuchsia/symbol_archive.gni
new file mode 100644
index 0000000000..1415132790
--- /dev/null
+++ b/deps/v8/build/config/fuchsia/symbol_archive.gni
@@ -0,0 +1,46 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_fuchsia)
+
+# Creates a tarball of binaries' debug data, structured according
+# to the ".build_ids" convention used by the symbolizer and GNU GDB.
+#
+# Parameters:
+# ids_txt: The "ids.txt" file which lists the relative paths to unstripped
+# executables and libraries, along with their build IDs.
+# archive_name: The path to the compressed tarball that will be
+# generated.
+# include_prebuilt_symbols: Include symbols from SDK prebuilt libraries.
+template("symbol_archive") {
+ assert(!is_debug)
+
+ action(target_name) {
+ _ids_txt = invoker.ids_txt
+ _build_ids = invoker.archive_name
+ _eu_strip_path = "//buildtools/third_party/eu-strip/bin/eu-strip"
+
+ script = "//build/config/fuchsia/build_symbol_archive.py"
+
+ inputs = [
+ _ids_txt,
+ ]
+
+ outputs = [
+ _build_ids,
+ ]
+
+ deps = invoker.deps
+
+ args = [
+ rebase_path(_ids_txt),
+ "-o",
+ rebase_path(_build_ids),
+ "--eu-strip",
+ rebase_path(_eu_strip_path),
+ "--fuchsia-build-id-dir",
+ rebase_path("//third_party/fuchsia-sdk/sdk/.build-id"),
+ ]
+ }
+}
diff --git a/deps/v8/build/config/fuchsia/testing_sandbox_policy b/deps/v8/build/config/fuchsia/testing_sandbox_policy
new file mode 100644
index 0000000000..322af44058
--- /dev/null
+++ b/deps/v8/build/config/fuchsia/testing_sandbox_policy
@@ -0,0 +1,25 @@
+{
+ "features": [
+ "isolated-persistent-storage",
+ "root-ssl-certificates",
+ "system-temp",
+ "vulkan" ],
+ "dev": ["null", "zero"],
+ "services": [
+ "fuchsia.fonts.Provider",
+ "fuchsia.media.Audio",
+ "fuchsia.mediacodec.CodecFactory",
+ "fuchsia.net.SocketProvider",
+ "fuchsia.netstack.Netstack",
+ "fuchsia.process.Launcher",
+ "fuchsia.sys.Environment",
+ "fuchsia.sys.Launcher",
+ "fuchsia.sys.Loader",
+ "fuchsia.ui.input.ImeService",
+ "fuchsia.ui.input.ImeVisibilityService",
+ "fuchsia.ui.policy.Presenter",
+ "fuchsia.ui.scenic.Scenic",
+ "fuchsia.vulkan.loader.Loader",
+ "fuchsia.web.ContextProvider"
+ ]
+}
diff --git a/deps/v8/build/config/gcc/BUILD.gn b/deps/v8/build/config/gcc/BUILD.gn
new file mode 100644
index 0000000000..747245f376
--- /dev/null
+++ b/deps/v8/build/config/gcc/BUILD.gn
@@ -0,0 +1,116 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/c++/c++.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/sysroot.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+ # When non empty, overrides the target rpath value. This allows a user to
+ # make a Chromium build where binaries and shared libraries are meant to be
+ # installed into separate directories, like /usr/bin/chromium and
+ # /usr/lib/chromium for instance. It is useful when a build system that
+ # generates a whole target root filesystem (like Yocto) is used on top of gn,
+ # especially when cross-compiling.
+ # Note: this gn arg is similar to gyp target_rpath generator flag.
+ gcc_target_rpath = ""
+ ldso_path = ""
+}
+
+# This config causes functions not to be automatically exported from shared
+# libraries. By default, all symbols are exported but this means there are
+# lots of exports that slow everything down. In general we explicitly mark
+# which functions we want to export from components.
+#
+# Some third_party code assumes all functions are exported so this is separated
+# into its own config so such libraries can remove this config to make symbols
+# public again.
+#
+# See http://gcc.gnu.org/wiki/Visibility
+config("symbol_visibility_hidden") {
+ cflags = [ "-fvisibility=hidden" ]
+
+ # Visibility attribute is not supported on AIX.
+ if (current_os != "aix") {
+ cflags_cc = [ "-fvisibility-inlines-hidden" ]
+ cflags_objcc = cflags_cc
+ }
+}
+
+# This config is usually set when :symbol_visibility_hidden is removed.
+# It's often a good idea to set visibility explicitly, as there're flags
+# which would error out otherwise (e.g. -fsanitize=cfi-unrelated-cast)
+config("symbol_visibility_default") {
+ cflags = [ "-fvisibility=default" ]
+}
+
+# The rpath is the dynamic library search path. Setting this config on a link
+# step will put the directory where the build generates shared libraries into
+# the rpath.
+#
+# This is required for component builds since the build generates many shared
+# libraries in the build directory that we expect to be automatically loaded.
+# It will be automatically applied in this case by :executable_config.
+#
+# In non-component builds, certain test binaries may expect to load dynamic
+# libraries from the current directory. As long as these aren't distributed,
+# this is OK. For these cases use something like this:
+#
+# if (is_linux && !is_component_build) {
+# configs += [ "//build/config/gcc:rpath_for_built_shared_libraries" ]
+# }
+config("rpath_for_built_shared_libraries") {
+ if (!is_android) {
+ # Note: Android doesn't support rpath.
+ if (current_toolchain != default_toolchain || gcc_target_rpath == "") {
+ ldflags = [
+ # Want to pass "\$". GN will re-escape as required for ninja.
+ "-Wl,-rpath=\$ORIGIN",
+ ]
+ } else {
+ ldflags = [ "-Wl,-rpath=${gcc_target_rpath}" ]
+ }
+ if (current_toolchain == default_toolchain && ldso_path != "") {
+ ldflags += [ "-Wl,--dynamic-linker=${ldso_path}" ]
+ }
+ }
+}
+
+if (is_component_build && !is_android) {
+ # See the rpath_for... config above for why this is necessary for component
+ # builds.
+ executable_and_shared_library_configs_ =
+ [ ":rpath_for_built_shared_libraries" ]
+} else {
+ executable_and_shared_library_configs_ = []
+}
+
+# Settings for executables.
+config("executable_config") {
+ configs = executable_and_shared_library_configs_
+ ldflags = [ "-pie" ]
+ if (is_android) {
+ ldflags += [
+ "-Bdynamic",
+ "-Wl,-z,nocopyreloc",
+ ]
+ }
+
+ if (!is_android && current_os != "aix") {
+ ldflags += [
+ # TODO(GYP): Do we need a check on the binutils version here?
+ #
+ # Newer binutils don't set DT_RPATH unless you disable "new" dtags
+ # and the new DT_RUNPATH doesn't work without --no-as-needed flag.
+ "-Wl,--disable-new-dtags",
+ ]
+ }
+}
+
+# Settings for shared libraries.
+config("shared_library_config") {
+ configs = executable_and_shared_library_configs_
+}
diff --git a/deps/v8/build/config/get_host_byteorder.py b/deps/v8/build/config/get_host_byteorder.py
new file mode 100755
index 0000000000..c8fcf1f352
--- /dev/null
+++ b/deps/v8/build/config/get_host_byteorder.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Get Byteorder of host architecture"""
+
+
+import sys
+
+print sys.byteorder
diff --git a/deps/v8/build/config/host_byteorder.gni b/deps/v8/build/config/host_byteorder.gni
new file mode 100644
index 0000000000..48a1a7f1e3
--- /dev/null
+++ b/deps/v8/build/config/host_byteorder.gni
@@ -0,0 +1,27 @@
+# Copyright (c) 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This header file defines the "host_byteorder" variable.
+# Not that this is currently used only for building v8.
+# The chromium code generally assumes little-endianness.
+declare_args() {
+ host_byteorder = "undefined"
+}
+
+# Detect host byteorder
+# ppc64 can be either BE or LE
+if (host_cpu == "ppc64") {
+ if (current_os == "aix") {
+ host_byteorder = "big"
+ } else {
+ # Only use the script when absolutely necessary
+ host_byteorder =
+ exec_script("//build/config/get_host_byteorder.py", [], "trim string")
+ }
+} else if (host_cpu == "ppc" || host_cpu == "s390" || host_cpu == "s390x" ||
+ host_cpu == "mips" || host_cpu == "mips64") {
+ host_byteorder = "big"
+} else {
+ host_byteorder = "little"
+}
diff --git a/deps/v8/build/config/ios/BUILD.gn b/deps/v8/build/config/ios/BUILD.gn
new file mode 100644
index 0000000000..8a50ccf906
--- /dev/null
+++ b/deps/v8/build/config/ios/BUILD.gn
@@ -0,0 +1,136 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/coverage/coverage.gni")
+import("//build/config/ios/ios_sdk.gni")
+import("//build/config/sysroot.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+ # Enabling this option makes clang compile to an intermediate
+ # representation ("bitcode"), and not to native code. This is preferred
+ # when including WebRTC in the apps that will be sent to Apple's App Store
+ # and mandatory for the apps that run on watchOS or tvOS.
+ # The option only works when building with Xcode (use_xcode_clang = true).
+ # Mimicking how Xcode handles it, the production builds (is_debug = false)
+ # get real bitcode sections added, while the debug builds (is_debug = true)
+ # only get bitcode-section "markers" added in them.
+ # NOTE: This option is ignored when building versions for the iOS simulator,
+ # where a part of libvpx is compiled from the assembly code written using
+ # Intel assembly syntax; Yasm / Nasm do not support emitting bitcode parts.
+ # That is not a limitation for now as Xcode mandates the presence of bitcode
+ # only when building bitcode-enabled projects for real devices (ARM CPUs).
+ enable_ios_bitcode = false
+}
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic.
+config("compiler") {
+ # These flags are shared between the C compiler and linker.
+ common_ios_flags = []
+
+ # CPU architecture.
+ if (current_cpu == "x64") {
+ common_ios_flags += [
+ "-arch",
+ "x86_64",
+ ]
+ } else if (current_cpu == "x86") {
+ common_ios_flags += [
+ "-arch",
+ "i386",
+ ]
+ } else if (current_cpu == "armv7" || current_cpu == "arm") {
+ common_ios_flags += [
+ "-arch",
+ "armv7",
+ ]
+ } else if (current_cpu == "arm64") {
+ common_ios_flags += [
+ "-arch",
+ "arm64",
+ ]
+ }
+
+ # This is here so that all files get recompiled after an Xcode update.
+ # (defines are passed via the command line, and build system rebuild things
+ # when their commandline changes). Nothing should ever read this define.
+ defines = [ "CR_XCODE_VERSION=$xcode_version" ]
+
+ asmflags = common_ios_flags
+ cflags = common_ios_flags
+
+ # Without this, the constructors and destructors of a C++ object inside
+ # an Objective C struct won't be called, which is very bad.
+ cflags_objcc = [ "-fobjc-call-cxx-cdtors" ]
+
+ # TODO(crbug.com/910200): Disable experimental isel until
+ # https://llvm.org/PR40887 is fixed.
+ if (is_debug) {
+ cflags += [ "-fno-experimental-isel" ]
+ }
+
+ ldflags = common_ios_flags
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is iOS-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+ common_flags = [
+ "-isysroot",
+ sysroot,
+
+ "-stdlib=libc++",
+ ]
+
+ if (use_ios_simulator) {
+ common_flags += [ "-mios-simulator-version-min=$ios_deployment_target" ]
+ } else {
+ common_flags += [ "-miphoneos-version-min=$ios_deployment_target" ]
+ }
+
+ if (use_xcode_clang && enable_ios_bitcode && !use_ios_simulator) {
+ if (is_debug) {
+ common_flags += [ "-fembed-bitcode-marker" ]
+ } else {
+ common_flags += [ "-fembed-bitcode" ]
+ }
+ }
+
+ asmflags = common_flags
+ cflags = common_flags
+ ldflags = common_flags
+
+ if (use_clang_coverage) {
+ configs = [ "//build/config/coverage:default_coverage" ]
+ }
+}
+
+config("ios_executable_flags") {
+}
+
+config("ios_dynamic_flags") {
+ ldflags = [ "-Wl,-ObjC" ] # Always load Objective-C categories and class.
+}
+
+config("xctest_config") {
+ common_flags = [
+ "-F",
+ "$ios_sdk_platform_path/Developer/Library/Frameworks",
+ ]
+
+ cflags = common_flags
+ ldflags = common_flags
+
+ libs = [
+ "Foundation.framework",
+ "XCTest.framework",
+ ]
+}
+
+group("xctest") {
+ public_configs = [ ":xctest_config" ]
+}
diff --git a/deps/v8/build/config/ios/BuildInfo.plist b/deps/v8/build/config/ios/BuildInfo.plist
new file mode 100644
index 0000000000..3595e5aefb
--- /dev/null
+++ b/deps/v8/build/config/ios/BuildInfo.plist
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>BuildMachineOSBuild</key>
+ <string>${BUILD_MACHINE_OS_BUILD}</string>
+ <key>CFBundleSupportedPlatforms</key>
+ <array>
+ <string>${IOS_SUPPORTED_PLATFORM}</string>
+ </array>
+ <key>DTCompiler</key>
+ <string>${GCC_VERSION}</string>
+ <key>DTPlatformName</key>
+ <string>${IOS_PLATFORM_NAME}</string>
+ <key>DTPlatformVersion</key>
+ <string>${IOS_PLATFORM_VERSION}</string>
+ <key>DTPlatformBuild</key>
+ <string>${IOS_PLATFORM_BUILD}</string>
+ <key>DTSDKBuild</key>
+ <string>${IOS_SDK_BUILD}</string>
+ <key>DTSDKName</key>
+ <string>${IOS_SDK_NAME}</string>
+ <key>MinimumOSVersion</key>
+ <string>${IOS_DEPLOYMENT_TARGET}</string>
+ <key>DTXcode</key>
+ <string>${XCODE_VERSION}</string>
+ <key>DTXcodeBuild</key>
+ <string>${XCODE_BUILD}</string>
+ <key>UIDeviceFamily</key>
+ <array>
+ <integer>1</integer>
+ <integer>2</integer>
+ </array>
+</dict>
+</plist>
diff --git a/deps/v8/build/config/ios/Host-Info.plist b/deps/v8/build/config/ios/Host-Info.plist
new file mode 100644
index 0000000000..9f6f5deef9
--- /dev/null
+++ b/deps/v8/build/config/ios/Host-Info.plist
@@ -0,0 +1,126 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>${IOS_BUNDLE_ID_PREFIX}.test.${EXECUTABLE_NAME:rfc1034identifier}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>LSRequiresIPhoneOS</key>
+ <true/>
+ <key>NSAppTransportSecurity</key>
+ <dict>
+ <key>NSAllowsArbitraryLoads</key>
+ <true/>
+ </dict>
+ <key>UIRequiredDeviceCapabilities</key>
+ <array>
+ <string>armv7</string>
+ </array>
+ <key>UILaunchImages</key>
+ <array>
+ <dict>
+ <key>UILaunchImageMinimumOSVersion</key>
+ <string>7.0</string>
+ <key>UILaunchImageName</key>
+ <string>Default</string>
+ <key>UILaunchImageOrientation</key>
+ <string>Portrait</string>
+ <key>UILaunchImageSize</key>
+ <string>{320, 480}</string>
+ </dict>
+ <dict>
+ <key>UILaunchImageMinimumOSVersion</key>
+ <string>7.0</string>
+ <key>UILaunchImageName</key>
+ <string>Default</string>
+ <key>UILaunchImageOrientation</key>
+ <string>Portrait</string>
+ <key>UILaunchImageSize</key>
+ <string>{320, 568}</string>
+ </dict>
+ <dict>
+ <key>UILaunchImageMinimumOSVersion</key>
+ <string>8.0</string>
+ <key>UILaunchImageName</key>
+ <string>Default</string>
+ <key>UILaunchImageOrientation</key>
+ <string>Portrait</string>
+ <key>UILaunchImageSize</key>
+ <string>{375, 667}</string>
+ </dict>
+ <dict>
+ <key>UILaunchImageMinimumOSVersion</key>
+ <string>8.0</string>
+ <key>UILaunchImageName</key>
+ <string>Default</string>
+ <key>UILaunchImageOrientation</key>
+ <string>Portrait</string>
+ <key>UILaunchImageSize</key>
+ <string>{414, 736}</string>
+ </dict>
+ <dict>
+ <key>UILaunchImageMinimumOSVersion</key>
+ <string>8.0</string>
+ <key>UILaunchImageName</key>
+ <string>Default</string>
+ <key>UILaunchImageOrientation</key>
+ <string>Landscape</string>
+ <key>UILaunchImageSize</key>
+ <string>{414, 736}</string>
+ </dict>
+ </array>
+ <key>UILaunchImages~ipad</key>
+ <array>
+ <dict>
+ <key>UILaunchImageMinimumOSVersion</key>
+ <string>7.0</string>
+ <key>UILaunchImageName</key>
+ <string>Default</string>
+ <key>UILaunchImageOrientation</key>
+ <string>Portrait</string>
+ <key>UILaunchImageSize</key>
+ <string>{768, 1024}</string>
+ </dict>
+ <dict>
+ <key>UILaunchImageMinimumOSVersion</key>
+ <string>7.0</string>
+ <key>UILaunchImageName</key>
+ <string>Default</string>
+ <key>UILaunchImageOrientation</key>
+ <string>Landscape</string>
+ <key>UILaunchImageSize</key>
+ <string>{768, 1024}</string>
+ </dict>
+ </array>
+ <key>UISupportedInterfaceOrientations</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+ <key>UISupportedInterfaceOrientations~ipad</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationPortraitUpsideDown</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+</dict>
+</plist>
diff --git a/deps/v8/build/config/ios/Module-Info.plist b/deps/v8/build/config/ios/Module-Info.plist
new file mode 100644
index 0000000000..13b67c473f
--- /dev/null
+++ b/deps/v8/build/config/ios/Module-Info.plist
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>${IOS_BUNDLE_ID_PREFIX}.${MODULE_BUNDLE_ID:rfc1034identifier}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>BNDL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1</string>
+</dict>
+</plist>
diff --git a/deps/v8/build/config/ios/OWNERS b/deps/v8/build/config/ios/OWNERS
new file mode 100644
index 0000000000..0e726c6a2c
--- /dev/null
+++ b/deps/v8/build/config/ios/OWNERS
@@ -0,0 +1 @@
+file://build/config/mac/OWNERS
diff --git a/deps/v8/build/config/ios/asset_catalog.gni b/deps/v8/build/config/ios/asset_catalog.gni
new file mode 100644
index 0000000000..460cd96262
--- /dev/null
+++ b/deps/v8/build/config/ios/asset_catalog.gni
@@ -0,0 +1,148 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This template declares a bundle_data target that references an asset
+# catalog so that it is compiled to the asset catalog of the generated
+# bundle.
+#
+# The create_bundle target requires that all asset catalogs are part of an
+# .xcasset bundle. This requirement comes from actool that only receives
+# the path to the .xcasset bundle directory and not to the individual
+# .imageset directories.
+#
+# The requirement is a bit problematic as it prevents compiling only a
+# subset of the asset catakig that are contained in a .xcasset. This template
+# fixes that by instead copying the content of the asset catalog to temporary
+# .xcasset directory (below $root_out_dir) and defining a bundle_data
+# target that refers to those copies (this is efficient as the "copy" is
+# implemented by hardlinking if possible on macOS).
+#
+# Since the create_data target will only refer to the .xcasset directory
+# and additional "action" target that runs a dummy script is defined. It
+# does nothing but pretends to generate the .xcassets directory (while
+# it is really created as a side-effect of the "copy" step). This allows
+# to workaround the check in "gn" that all inputs below $root_out_dir have
+# to be outputs of another target with a public dependency path.
+#
+# This template also ensures that the file are only copied once when the
+# build targets multiple architectures at the same time (aka "fat build").
+#
+# Arguments
+#
+# sources:
+# required, list of strings, paths to the file contained in the
+# asset catalog directory; this must contain the Contents.json file
+# and all the image referenced by it (not enforced by the template).
+#
+# asset_type:
+# required, string, type of the asset catalog, that is the extension
+# of the directory containing the images and the Contents.json file.
+#
+template("asset_catalog") {
+ assert(defined(invoker.sources) && invoker.sources != [],
+ "sources must be defined and not empty for $target_name")
+
+ assert(defined(invoker.asset_type) && invoker.asset_type != "",
+ "asset_type must be defined and not empty for $target_name")
+
+ if (current_toolchain != default_toolchain) {
+ group(target_name) {
+ public_deps = [
+ ":$target_name($default_toolchain)",
+ ]
+ }
+ } else {
+ _copy_target_name = target_name + "__copy"
+ _data_target_name = target_name
+
+ _sources = invoker.sources
+ _outputs = []
+
+ # The compilation of resources into Assets.car is enabled automatically
+ # by the "create_bundle" target if any of the "bundle_data" sources's
+ # path is in a .xcassets directory and matches one of the know asset
+ # catalog type.
+ _xcassets_dir = "$target_gen_dir/${target_name}.xcassets"
+ _output_dir = "$_xcassets_dir/" +
+ get_path_info(get_path_info(_sources[0], "dir"), "file")
+
+ foreach(_source, invoker.sources) {
+ _dir = get_path_info(_source, "dir")
+ _outputs += [ "$_output_dir/" + get_path_info(_source, "file") ]
+
+ assert(get_path_info(_dir, "extension") == invoker.asset_type,
+ "$_source dirname must have .${invoker.asset_type} extension")
+ }
+
+ action(_copy_target_name) {
+ # Forward "deps", "public_deps" and "testonly" in case some of the
+ # source files are generated.
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "public_deps",
+ "testonly",
+ ])
+
+ script = "//build/config/ios/hardlink.py"
+
+ visibility = [ ":$_data_target_name" ]
+ sources = _sources
+ outputs = _outputs + [ _xcassets_dir ]
+
+ args = [
+ rebase_path(get_path_info(_sources[0], "dir"), root_out_dir),
+ rebase_path(_output_dir, root_out_dir),
+ ]
+ }
+
+ bundle_data(_data_target_name) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "deps",
+ "outputs",
+ "public_deps",
+ "sources",
+ ])
+
+ sources = _outputs
+ outputs = [
+ "{{bundle_resources_dir}}/{{source_file_part}}",
+ ]
+ public_deps = [
+ ":$_copy_target_name",
+ ]
+ }
+ }
+}
+
+# Those templates are specialisation of the asset_catalog template for known
+# types of asset catalog types (imageset, launchimage, appiconset).
+#
+# Arguments
+#
+# sources:
+# required, list of strings, paths to the file contained in the
+# asset catalog directory; this must contain the Contents.json file
+# and all the image referenced by it (not enforced by the template).
+#
+template("appiconset") {
+ asset_catalog(target_name) {
+ forward_variables_from(invoker, "*", [ "asset_type" ])
+ asset_type = "appiconset"
+ }
+}
+template("imageset") {
+ asset_catalog(target_name) {
+ forward_variables_from(invoker, "*", [ "asset_type" ])
+ asset_type = "imageset"
+ }
+}
+template("launchimage") {
+ asset_catalog(target_name) {
+ forward_variables_from(invoker, "*", [ "asset_type" ])
+ asset_type = "launchimage"
+ }
+}
diff --git a/deps/v8/build/config/ios/codesign.py b/deps/v8/build/config/ios/codesign.py
new file mode 100644
index 0000000000..64bd1293a7
--- /dev/null
+++ b/deps/v8/build/config/ios/codesign.py
@@ -0,0 +1,534 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import datetime
+import fnmatch
+import glob
+import os
+import plistlib
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+def GetProvisioningProfilesDir():
+ """Returns the location of the installed mobile provisioning profiles.
+
+ Returns:
+ The path to the directory containing the installed mobile provisioning
+ profiles as a string.
+ """
+ return os.path.join(
+ os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
+
+
+def LoadPlistFile(plist_path):
+ """Loads property list file at |plist_path|.
+
+ Args:
+ plist_path: path to the property list file to load.
+
+ Returns:
+ The content of the property list file as a python object.
+ """
+ return plistlib.readPlistFromString(subprocess.check_output([
+ 'xcrun', 'plutil', '-convert', 'xml1', '-o', '-', plist_path]))
+
+
+class Bundle(object):
+ """Wraps a bundle."""
+
+ def __init__(self, bundle_path):
+ """Initializes the Bundle object with data from bundle Info.plist file."""
+ self._path = bundle_path
+ self._data = LoadPlistFile(os.path.join(self._path, 'Info.plist'))
+
+ @property
+ def path(self):
+ return self._path
+
+ @property
+ def identifier(self):
+ return self._data['CFBundleIdentifier']
+
+ @property
+ def binary_path(self):
+ return os.path.join(self._path, self._data['CFBundleExecutable'])
+
+ def Validate(self, expected_mappings):
+ """Checks that keys in the bundle have the expected value.
+
+ Args:
+ expected_mappings: a dictionary of string to object, each mapping will
+ be looked up in the bundle data to check it has the same value (missing
+ values will be ignored)
+
+ Returns:
+ A dictionary of the key with a different value between expected_mappings
+ and the content of the bundle (i.e. errors) so that caller can format the
+ error message. The dictionary will be empty if there are no errors.
+ """
+ errors = {}
+ for key, expected_value in expected_mappings.iteritems():
+ if key in self._data:
+ value = self._data[key]
+ if value != expected_value:
+ errors[key] = (value, expected_value)
+ return errors
+
+
+class ProvisioningProfile(object):
+ """Wraps a mobile provisioning profile file."""
+
+ def __init__(self, provisioning_profile_path):
+ """Initializes the ProvisioningProfile with data from profile file."""
+ self._path = provisioning_profile_path
+ self._data = plistlib.readPlistFromString(subprocess.check_output([
+ 'xcrun', 'security', 'cms', '-D', '-u', 'certUsageAnyCA',
+ '-i', provisioning_profile_path]))
+
+ @property
+ def path(self):
+ return self._path
+
+ @property
+ def application_identifier_pattern(self):
+ return self._data.get('Entitlements', {}).get('application-identifier', '')
+
+ @property
+ def team_identifier(self):
+ return self._data.get('TeamIdentifier', [''])[0]
+
+ @property
+ def entitlements(self):
+ return self._data.get('Entitlements', {})
+
+ @property
+ def expiration_date(self):
+ return self._data.get('ExpirationDate', datetime.datetime.now())
+
+ def ValidToSignBundle(self, bundle_identifier):
+ """Checks whether the provisioning profile can sign bundle_identifier.
+
+ Args:
+ bundle_identifier: the identifier of the bundle that needs to be signed.
+
+ Returns:
+ True if the mobile provisioning profile can be used to sign a bundle
+ with the corresponding bundle_identifier, False otherwise.
+ """
+ return fnmatch.fnmatch(
+ '%s.%s' % (self.team_identifier, bundle_identifier),
+ self.application_identifier_pattern)
+
+ def Install(self, installation_path):
+ """Copies mobile provisioning profile info to |installation_path|."""
+ shutil.copy2(self.path, installation_path)
+
+
+class Entitlements(object):
+ """Wraps an Entitlement plist file."""
+
+ def __init__(self, entitlements_path):
+ """Initializes Entitlements object from entitlement file."""
+ self._path = entitlements_path
+ self._data = LoadPlistFile(self._path)
+
+ @property
+ def path(self):
+ return self._path
+
+ def ExpandVariables(self, substitutions):
+ self._data = self._ExpandVariables(self._data, substitutions)
+
+ def _ExpandVariables(self, data, substitutions):
+ if isinstance(data, str):
+ for key, substitution in substitutions.iteritems():
+ data = data.replace('$(%s)' % (key,), substitution)
+ return data
+
+ if isinstance(data, dict):
+ for key, value in data.iteritems():
+ data[key] = self._ExpandVariables(value, substitutions)
+ return data
+
+ if isinstance(data, list):
+ for i, value in enumerate(data):
+ data[i] = self._ExpandVariables(value, substitutions)
+
+ return data
+
+ def LoadDefaults(self, defaults):
+ for key, value in defaults.iteritems():
+ if key not in self._data:
+ self._data[key] = value
+
+ def WriteTo(self, target_path):
+ plistlib.writePlist(self._data, target_path)
+
+
+def FindProvisioningProfile(bundle_identifier, required):
+ """Finds mobile provisioning profile to use to sign bundle.
+
+ Args:
+ bundle_identifier: the identifier of the bundle to sign.
+
+ Returns:
+ The ProvisioningProfile object that can be used to sign the Bundle
+ object or None if no matching provisioning profile was found.
+ """
+ provisioning_profile_paths = glob.glob(
+ os.path.join(GetProvisioningProfilesDir(), '*.mobileprovision'))
+
+ # Iterate over all installed mobile provisioning profiles and filter those
+ # that can be used to sign the bundle, ignoring expired ones.
+ now = datetime.datetime.now()
+ valid_provisioning_profiles = []
+ one_hour = datetime.timedelta(0, 3600)
+ for provisioning_profile_path in provisioning_profile_paths:
+ provisioning_profile = ProvisioningProfile(provisioning_profile_path)
+ if provisioning_profile.expiration_date - now < one_hour:
+ sys.stderr.write(
+ 'Warning: ignoring expired provisioning profile: %s.\n' %
+ provisioning_profile_path)
+ continue
+ if provisioning_profile.ValidToSignBundle(bundle_identifier):
+ valid_provisioning_profiles.append(provisioning_profile)
+
+ if not valid_provisioning_profiles:
+ if required:
+ sys.stderr.write(
+ 'Error: no mobile provisioning profile found for "%s".\n' %
+ bundle_identifier)
+ sys.exit(1)
+ return None
+
+ # Select the most specific mobile provisioning profile, i.e. the one with
+ # the longest application identifier pattern (prefer the one with the latest
+ # expiration date as a secondary criteria).
+ selected_provisioning_profile = max(
+ valid_provisioning_profiles,
+ key=lambda p: (len(p.application_identifier_pattern), p.expiration_date))
+
+ one_week = datetime.timedelta(7)
+ if selected_provisioning_profile.expiration_date - now < 2 * one_week:
+ sys.stderr.write(
+ 'Warning: selected provisioning profile will expire soon: %s' %
+ selected_provisioning_profile.path)
+ return selected_provisioning_profile
+
+
+def CodeSignBundle(bundle_path, identity, extra_args):
+ process = subprocess.Popen(['xcrun', 'codesign', '--force', '--sign',
+ identity, '--timestamp=none'] + list(extra_args) + [bundle_path],
+ stderr=subprocess.PIPE)
+ _, stderr = process.communicate()
+ if process.returncode:
+ sys.stderr.write(stderr)
+ sys.exit(process.returncode)
+ for line in stderr.splitlines():
+ if line.endswith(': replacing existing signature'):
+ # Ignore warning about replacing existing signature as this should only
+ # happen when re-signing system frameworks (and then it is expected).
+ continue
+ sys.stderr.write(line)
+ sys.stderr.write('\n')
+
+
+def InstallSystemFramework(framework_path, bundle_path, args):
+ """Install framework from |framework_path| to |bundle| and code-re-sign it."""
+ installed_framework_path = os.path.join(
+ bundle_path, 'Frameworks', os.path.basename(framework_path))
+
+ if os.path.isfile(framework_path):
+ shutil.copy(framework_path, installed_framework_path)
+ elif os.path.isdir(framework_path):
+ if os.path.exists(installed_framework_path):
+ shutil.rmtree(installed_framework_path)
+ shutil.copytree(framework_path, installed_framework_path)
+
+ CodeSignBundle(installed_framework_path, args.identity,
+ ['--deep', '--preserve-metadata=identifier,entitlements,flags'])
+
+
+def GenerateEntitlements(path, provisioning_profile, bundle_identifier):
+ """Generates an entitlements file.
+
+ Args:
+ path: path to the entitlements template file
+ provisioning_profile: ProvisioningProfile object to use, may be None
+ bundle_identifier: identifier of the bundle to sign.
+ """
+ entitlements = Entitlements(path)
+ if provisioning_profile:
+ entitlements.LoadDefaults(provisioning_profile.entitlements)
+ app_identifier_prefix = provisioning_profile.team_identifier + '.'
+ else:
+ app_identifier_prefix = '*.'
+ entitlements.ExpandVariables({
+ 'CFBundleIdentifier': bundle_identifier,
+ 'AppIdentifierPrefix': app_identifier_prefix,
+ })
+ return entitlements
+
+
+def GenerateBundleInfoPlist(bundle_path, plist_compiler, partial_plist):
+ """Generates the bundle Info.plist for a list of partial .plist files.
+
+ Args:
+ bundle_path: path to the bundle
+ plist_compiler: string, path to the Info.plist compiler
+ partial_plist: list of path to partial .plist files to merge
+ """
+
+ # Filter empty partial .plist files (this happens if an application
+ # does not include need to compile any asset catalog, in which case
+ # the partial .plist file from the asset catalog compilation step is
+ # just a stamp file).
+ filtered_partial_plist = []
+ for plist in partial_plist:
+ plist_size = os.stat(plist).st_size
+ if plist_size:
+ filtered_partial_plist.append(plist)
+
+ # Invoke the plist_compiler script. It needs to be a python script.
+ subprocess.check_call([
+ 'python', plist_compiler, 'merge', '-f', 'binary1',
+ '-o', os.path.join(bundle_path, 'Info.plist'),
+ ] + filtered_partial_plist)
+
+
+class Action(object):
+ """Class implementing one action supported by the script."""
+
+ @classmethod
+ def Register(cls, subparsers):
+ parser = subparsers.add_parser(cls.name, help=cls.help)
+ parser.set_defaults(func=cls._Execute)
+ cls._Register(parser)
+
+
+class CodeSignBundleAction(Action):
+ """Class implementing the code-sign-bundle action."""
+
+ name = 'code-sign-bundle'
+ help = 'perform code signature for a bundle'
+
+ @staticmethod
+ def _Register(parser):
+ parser.add_argument(
+ '--entitlements', '-e', dest='entitlements_path',
+ help='path to the entitlements file to use')
+ parser.add_argument(
+ 'path', help='path to the iOS bundle to codesign')
+ parser.add_argument(
+ '--identity', '-i', required=True,
+ help='identity to use to codesign')
+ parser.add_argument(
+ '--binary', '-b', required=True,
+ help='path to the iOS bundle binary')
+ parser.add_argument(
+ '--framework', '-F', action='append', default=[], dest='frameworks',
+ help='install and resign system framework')
+ parser.add_argument(
+ '--disable-code-signature', action='store_true', dest='no_signature',
+ help='disable code signature')
+ parser.add_argument(
+ '--disable-embedded-mobileprovision', action='store_false',
+ default=True, dest='embedded_mobileprovision',
+ help='disable finding and embedding mobileprovision')
+ parser.add_argument(
+ '--platform', '-t', required=True,
+ help='platform the signed bundle is targeting')
+ parser.add_argument(
+ '--partial-info-plist', '-p', action='append', default=[],
+ help='path to partial Info.plist to merge to create bundle Info.plist')
+ parser.add_argument(
+ '--plist-compiler-path', '-P', action='store',
+ help='path to the plist compiler script (for --partial-info-plist)')
+ parser.set_defaults(no_signature=False)
+
+ @staticmethod
+ def _Execute(args):
+ if not args.identity:
+ args.identity = '-'
+
+ if args.partial_info_plist:
+ GenerateBundleInfoPlist(
+ args.path,
+ args.plist_compiler_path,
+ args.partial_info_plist)
+
+ bundle = Bundle(args.path)
+
+ # According to Apple documentation, the application binary must be the same
+ # as the bundle name without the .app suffix. See crbug.com/740476 for more
+ # information on what problem this can cause.
+ #
+ # To prevent this class of error, fail with an error if the binary name is
+ # incorrect in the Info.plist as it is not possible to update the value in
+ # Info.plist at this point (the file has been copied by a different target
+ # and ninja would consider the build dirty if it was updated).
+ #
+ # Also checks that the name of the bundle is correct too (does not cause the
+ # build to be considered dirty, but still terminate the script in case of an
+ # incorrect bundle name).
+ #
+ # Apple documentation is available at:
+ # https://developer.apple.com/library/content/documentation/CoreFoundation/Conceptual/CFBundles/BundleTypes/BundleTypes.html
+ bundle_name = os.path.splitext(os.path.basename(bundle.path))[0]
+ errors = bundle.Validate({
+ 'CFBundleName': bundle_name,
+ 'CFBundleExecutable': bundle_name,
+ })
+ if errors:
+ for key in sorted(errors):
+ value, expected_value = errors[key]
+ sys.stderr.write('%s: error: %s value incorrect: %s != %s\n' % (
+ bundle.path, key, value, expected_value))
+ sys.stderr.flush()
+ sys.exit(1)
+
+ # Delete existing embedded mobile provisioning.
+ embedded_provisioning_profile = os.path.join(
+ bundle.path, 'embedded.mobileprovision')
+ if os.path.isfile(embedded_provisioning_profile):
+ os.unlink(embedded_provisioning_profile)
+
+ # Delete existing code signature.
+ signature_file = os.path.join(args.path, '_CodeSignature', 'CodeResources')
+ if os.path.isfile(signature_file):
+ shutil.rmtree(os.path.dirname(signature_file))
+
+ # Install system frameworks if requested.
+ for framework_path in args.frameworks:
+ InstallSystemFramework(framework_path, args.path, args)
+
+ # Copy main binary into bundle.
+ if os.path.isfile(bundle.binary_path):
+ os.unlink(bundle.binary_path)
+ shutil.copy(args.binary, bundle.binary_path)
+
+ if args.no_signature:
+ return
+
+ codesign_extra_args = []
+
+ if args.embedded_mobileprovision:
+ # Find mobile provisioning profile and embeds it into the bundle (if a
+ # code signing identify has been provided, fails if no valid mobile
+ # provisioning is found).
+ provisioning_profile_required = args.identity != '-'
+ provisioning_profile = FindProvisioningProfile(
+ bundle.identifier, provisioning_profile_required)
+ if provisioning_profile and args.platform != 'iphonesimulator':
+ provisioning_profile.Install(embedded_provisioning_profile)
+
+ if args.entitlements_path is not None:
+ temporary_entitlements_file = \
+ tempfile.NamedTemporaryFile(suffix='.xcent')
+ codesign_extra_args.extend(
+ ['--entitlements', temporary_entitlements_file.name])
+
+ entitlements = GenerateEntitlements(
+ args.entitlements_path, provisioning_profile, bundle.identifier)
+ entitlements.WriteTo(temporary_entitlements_file.name)
+
+ CodeSignBundle(bundle.path, args.identity, codesign_extra_args)
+
+
+class CodeSignFileAction(Action):
+ """Class implementing code signature for a single file."""
+
+ name = 'code-sign-file'
+ help = 'code-sign a single file'
+
+ @staticmethod
+ def _Register(parser):
+ parser.add_argument(
+ 'path', help='path to the file to codesign')
+ parser.add_argument(
+ '--identity', '-i', required=True,
+ help='identity to use to codesign')
+ parser.add_argument(
+ '--output', '-o',
+ help='if specified copy the file to that location before signing it')
+ parser.set_defaults(sign=True)
+
+ @staticmethod
+ def _Execute(args):
+ if not args.identity:
+ args.identity = '-'
+
+ install_path = args.path
+ if args.output:
+
+ if os.path.isfile(args.output):
+ os.unlink(args.output)
+ elif os.path.isdir(args.output):
+ shutil.rmtree(args.output)
+
+ if os.path.isfile(args.path):
+ shutil.copy(args.path, args.output)
+ elif os.path.isdir(args.path):
+ shutil.copytree(args.path, args.output)
+
+ install_path = args.output
+
+ CodeSignBundle(install_path, args.identity,
+ ['--deep', '--preserve-metadata=identifier,entitlements'])
+
+
+class GenerateEntitlementsAction(Action):
+ """Class implementing the generate-entitlements action."""
+
+ name = 'generate-entitlements'
+ help = 'generate entitlements file'
+
+ @staticmethod
+ def _Register(parser):
+ parser.add_argument(
+ '--entitlements', '-e', dest='entitlements_path',
+ help='path to the entitlements file to use')
+ parser.add_argument(
+ 'path', help='path to the entitlements file to generate')
+ parser.add_argument(
+ '--info-plist', '-p', required=True,
+ help='path to the bundle Info.plist')
+
+ @staticmethod
+ def _Execute(args):
+ info_plist = LoadPlistFile(args.info_plist)
+ bundle_identifier = info_plist['CFBundleIdentifier']
+ provisioning_profile = FindProvisioningProfile(bundle_identifier, False)
+ entitlements = GenerateEntitlements(
+ args.entitlements_path, provisioning_profile, bundle_identifier)
+ entitlements.WriteTo(args.path)
+
+
+def Main():
+ parser = argparse.ArgumentParser('codesign iOS bundles')
+ parser.add_argument('--developer_dir', required=False,
+ help='Path to Xcode.')
+ subparsers = parser.add_subparsers()
+
+ actions = [
+ CodeSignBundleAction,
+ CodeSignFileAction,
+ GenerateEntitlementsAction,
+ ]
+
+ for action in actions:
+ action.Register(subparsers)
+
+ args = parser.parse_args()
+ if args.developer_dir:
+ os.environ['DEVELOPER_DIR'] = args.developer_dir
+ args.func(args)
+
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/deps/v8/build/config/ios/dummy.py b/deps/v8/build/config/ios/dummy.py
new file mode 100644
index 0000000000..b23b7dab96
--- /dev/null
+++ b/deps/v8/build/config/ios/dummy.py
@@ -0,0 +1,15 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Empty script that does nothing and return success error code.
+
+This script is used by some gn targets that pretend creating some output
+but instead depend on another target creating the output indirectly (in
+general this output is a directory that is used as input by a bundle_data
+target).
+
+It ignores all parameters and terminate with a success error code. It
+does the same thing as the unix command "true", but gn can only invoke
+python scripts.
+"""
diff --git a/deps/v8/build/config/ios/entitlements.plist b/deps/v8/build/config/ios/entitlements.plist
new file mode 100644
index 0000000000..429762e3a3
--- /dev/null
+++ b/deps/v8/build/config/ios/entitlements.plist
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>application-identifier</key>
+ <string>$(AppIdentifierPrefix)$(CFBundleIdentifier)</string>
+ <key>keychain-access-groups</key>
+ <array>
+ <string>$(AppIdentifierPrefix)$(CFBundleIdentifier)</string>
+ </array>
+</dict>
+</plist>
diff --git a/deps/v8/build/config/ios/find_signing_identity.py b/deps/v8/build/config/ios/find_signing_identity.py
new file mode 100644
index 0000000000..7add474b9c
--- /dev/null
+++ b/deps/v8/build/config/ios/find_signing_identity.py
@@ -0,0 +1,47 @@
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import subprocess
+import sys
+import re
+
+def ListIdentities():
+ return subprocess.check_output([
+ 'xcrun',
+ 'security',
+ 'find-identity',
+ '-v',
+ '-p',
+ 'codesigning',
+ ])
+
+
+def FindValidIdentity(identity_description):
+ lines = list(map(str.strip, ListIdentities().splitlines()))
+ # Look for something like "2) XYZ "iPhone Developer: Name (ABC)""
+ exp = re.compile('[0-9]+\) ([A-F0-9]+) "([^"]*)"')
+ for line in lines:
+ res = exp.match(line)
+ if res is None:
+ continue
+ if identity_description in res.group(2):
+ yield res.group(1)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser('codesign iOS bundles')
+ parser.add_argument(
+ '--developer_dir', required=False,
+ help='Path to Xcode.')
+ parser.add_argument(
+ '--identity-description', required=True,
+ help='Text description used to select the code signing identity.')
+ args = parser.parse_args()
+ if args.developer_dir:
+ os.environ['DEVELOPER_DIR'] = args.developer_dir
+
+ for identity in FindValidIdentity(args.identity_description):
+ print identity
diff --git a/deps/v8/build/config/ios/generate_umbrella_header.py b/deps/v8/build/config/ios/generate_umbrella_header.py
new file mode 100644
index 0000000000..8547e18aa7
--- /dev/null
+++ b/deps/v8/build/config/ios/generate_umbrella_header.py
@@ -0,0 +1,75 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates an umbrella header for an iOS framework."""
+
+import argparse
+import datetime
+import os
+import re
+import string
+
+
+HEADER_TEMPLATE = string.Template('''\
+// Copyright $year The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// This file is auto-generated by //build/ios/config/generate_umbrella_header.py
+
+#ifndef $header_guard
+#define $header_guard
+
+$imports
+
+#endif // $header_guard
+''')
+
+
+def ComputeHeaderGuard(file_path):
+ """Computes the header guard for a file path.
+
+ Args:
+ file_path: The path to convert into an header guard.
+ Returns:
+ The header guard string for the file_path.
+ """
+ return re.sub(r'[.+/\\]', r'_', file_path.upper()) + '_'
+
+
+def WriteUmbrellaHeader(output_path, imported_headers):
+ """Writes the umbrella header.
+
+ Args:
+ output_path: The path to the umbrella header.
+ imported_headers: A list of headers to #import in the umbrella header.
+ """
+ year = datetime.date.today().year
+ header_guard = ComputeHeaderGuard(output_path)
+ imports = '\n'.join([
+ '#import "%s"' % os.path.basename(header)
+ for header in sorted(imported_headers)
+ ])
+ with open(output_path, 'w') as output_file:
+ output_file.write(
+ HEADER_TEMPLATE.safe_substitute({
+ 'year': year,
+ 'header_guard': header_guard,
+ 'imports': imports,
+ }))
+
+
+def Main():
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument('--output-path', required=True, type=str,
+ help='Path to the generated umbrella header.')
+ parser.add_argument('imported_headers', type=str, nargs='+',
+ help='Headers to #import in the umbrella header.')
+ options = parser.parse_args()
+
+ return WriteUmbrellaHeader(options.output_path, options.imported_headers)
+
+
+if __name__ == '__main__':
+ Main()
diff --git a/deps/v8/build/config/ios/hardlink.py b/deps/v8/build/config/ios/hardlink.py
new file mode 100644
index 0000000000..91dbf62f98
--- /dev/null
+++ b/deps/v8/build/config/ios/hardlink.py
@@ -0,0 +1,69 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Recursively create hardlink to target named output."""
+
+
+import argparse
+import os
+import shutil
+
+
+def CreateHardlinkHelper(target, output):
+ """Recursively create a hardlink named output pointing to target.
+
+ Args:
+ target: path to an existing file or directory
+ output: path to the newly created hardlink
+
+ This function assumes that output does not exists but that the parent
+ directory containing output does. If those conditions are false, then
+ the function will fails with an exception corresponding to an OS error.
+ """
+ if os.path.islink(target):
+ os.symlink(os.readlink(target), output)
+ elif not os.path.isdir(target):
+ try:
+ os.link(target, output)
+ except:
+ shutil.copy(target, output)
+ else:
+ os.mkdir(output)
+ for name in os.listdir(target):
+ CreateHardlinkHelper(
+ os.path.join(target, name),
+ os.path.join(output, name))
+
+
+def CreateHardlink(target, output):
+ """Recursively create a hardlink named output pointing to target.
+
+ Args:
+ target: path to an existing file or directory
+ output: path to the newly created hardlink
+
+ If output already exists, it is first removed. In all cases, the
+ parent directory containing output is created.
+ """
+ if os.path.exists(output):
+ shutil.rmtree(output)
+
+ parent_dir = os.path.dirname(os.path.abspath(output))
+ if not os.path.isdir(parent_dir):
+ os.makedirs(parent_dir)
+
+ CreateHardlinkHelper(target, output)
+
+
+def Main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('target', help='path to the file or directory to link to')
+ parser.add_argument('output', help='name of the hardlink to create')
+ args = parser.parse_args()
+
+ CreateHardlink(args.target, args.output)
+
+
+if __name__ == '__main__':
+ Main()
diff --git a/deps/v8/build/config/ios/ios_sdk.gni b/deps/v8/build/config/ios/ios_sdk.gni
new file mode 100644
index 0000000000..f45629ae99
--- /dev/null
+++ b/deps/v8/build/config/ios/ios_sdk.gni
@@ -0,0 +1,167 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/ios/ios_sdk_overrides.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+ # SDK path to use. When empty this will use the default SDK based on the
+ # value of use_ios_simulator.
+ ios_sdk_path = ""
+ ios_sdk_name = ""
+ ios_sdk_version = ""
+ ios_sdk_platform = ""
+ ios_sdk_platform_path = ""
+ xcode_version = ""
+ xcode_version_int = 0
+ xcode_build = ""
+ machine_os_build = ""
+
+ # The iOS Code signing identity to use
+ # TODO(GYP), TODO(sdfresne): Consider having a separate
+ # ios_enable_code_signing_flag=<bool> flag to make the invocation clearer.
+ ios_enable_code_signing = true
+ ios_code_signing_identity = ""
+ ios_code_signing_identity_description = "iPhone Developer"
+
+ # Prefix for CFBundleIdentifier property of iOS bundles (correspond to the
+ # "Organization Identifier" in Xcode). Code signing will fail if no mobile
+ # provisioning for the selected code signing identify support that prefix.
+ ios_app_bundle_id_prefix = "org.chromium"
+
+ # If true, then allow using Xcode to automatically manage certificates. This
+ # requires loading a separate Xcode project and enable automatically managed
+ # certificates. When true, all test application will use the same bundle id
+ # to avoid running out of certificates if using a free account.
+ ios_automatically_manage_certs = false
+
+ # If non-empty, this list must contain valid cpu architecture, and the final
+ # build will be a multi-architecture build (aka fat build) supporting the
+ # main $target_cpu architecture and all of $additional_target_cpus.
+ #
+ # For example to build an application that will run on both arm64 and armv7
+ # devices, you would use the following in args.gn file when running "gn args":
+ #
+ # target_os = "ios"
+ # target_cpu = "arm64"
+ # additional_target_cpus = [ "arm" ]
+ #
+ # You can also pass the value via "--args" parameter for "gn gen" command by
+ # using the syntax --args='additional_target_cpus=["arm"] target_cpu="arm64"'.
+ additional_target_cpus = []
+}
+
+assert(custom_toolchain == "" || additional_target_cpus == [],
+ "cannot define both custom_toolchain and additional_target_cpus")
+
+use_ios_simulator = current_cpu == "x86" || current_cpu == "x64"
+
+ios_generic_test_bundle_id_suffix = "generic-unit-test"
+
+# Initialize additional_toolchains from additional_target_cpus. Assert here
+# that the list does not contains $target_cpu nor duplicates as this would
+# cause weird errors during the build.
+additional_toolchains = []
+if (additional_target_cpus != []) {
+ foreach(_additional_target_cpu, additional_target_cpus) {
+ assert(_additional_target_cpu != target_cpu,
+ "target_cpu must not be listed in additional_target_cpus")
+
+ _toolchain = "//build/toolchain/mac:ios_clang_$_additional_target_cpu"
+ foreach(_additional_toolchain, additional_toolchains) {
+ assert(_toolchain != _additional_toolchain,
+ "additional_target_cpus must not contains duplicate values")
+ }
+
+ additional_toolchains += [ _toolchain ]
+ }
+}
+
+if (ios_sdk_path == "") {
+ # Compute default target.
+ if (use_ios_simulator) {
+ ios_sdk_name = "iphonesimulator"
+ ios_sdk_platform = "iPhoneSimulator"
+ } else {
+ ios_sdk_name = "iphoneos"
+ ios_sdk_platform = "iPhoneOS"
+ }
+
+ ios_sdk_info_args = []
+ if (!use_system_xcode) {
+ ios_sdk_info_args += [
+ "--developer_dir",
+ hermetic_xcode_path,
+ ]
+ }
+ ios_sdk_info_args += [ ios_sdk_name ]
+ script_name = "//build/config/mac/sdk_info.py"
+ _ios_sdk_result = exec_script(script_name, ios_sdk_info_args, "scope")
+ ios_sdk_path = _ios_sdk_result.sdk_path
+ ios_sdk_version = _ios_sdk_result.sdk_version
+ ios_sdk_platform_path = _ios_sdk_result.sdk_platform_path
+ ios_sdk_build = _ios_sdk_result.sdk_build
+ xcode_version = _ios_sdk_result.xcode_version
+ xcode_version_int = _ios_sdk_result.xcode_version_int
+ xcode_build = _ios_sdk_result.xcode_build
+ machine_os_build = _ios_sdk_result.machine_os_build
+ if (use_ios_simulator) {
+ # This is weird, but Xcode sets DTPlatformBuild to an empty field for
+ # simulator builds.
+ ios_platform_build = ""
+ } else {
+ ios_platform_build = ios_sdk_build
+ }
+}
+
+if (ios_enable_code_signing && !use_ios_simulator) {
+ find_signing_identity_args = [
+ "--identity-description",
+ ios_code_signing_identity_description,
+ ]
+ if (!use_system_xcode) {
+ find_signing_identity_args += [
+ "--developer_dir",
+ hermetic_xcode_path,
+ ]
+ }
+
+ # If an identity is not provided, look for one on the host
+ if (ios_code_signing_identity == "") {
+ _ios_identities = exec_script("find_signing_identity.py",
+ find_signing_identity_args,
+ "list lines")
+ if (_ios_identities == []) {
+ print("Automatic code signing identity selection was enabled but could")
+ print("not find exactly one code signing identity matching")
+ print("$ios_code_signing_identity_description. Check that your keychain")
+ print("is accessible and that there is a valid code signing identity")
+ print("listed by `xcrun security find-identity -v -p codesigning`")
+ print("TIP: Simulator builds don't require code signing...")
+ assert(false)
+ } else {
+ _ios_identities_len = 0
+ foreach(_, _ios_identities) {
+ _ios_identities_len += 1
+ }
+
+ ios_code_signing_identity = _ios_identities[0]
+ if (_ios_identities_len != 1) {
+ print("Warning: Multiple codesigning identities match " +
+ "\"$ios_code_signing_identity_description\"")
+ foreach(_ios_identity, _ios_identities) {
+ _selected = ""
+ if (ios_code_signing_identity == _ios_identity) {
+ _selected = " (selected)"
+ }
+ print("Warning: - $_ios_identity$_selected")
+ }
+ print("Warning: Please use either ios_code_signing_identity or ")
+ print("Warning: ios_code_signing_identity_description variable to ")
+ print("Warning: control which identity is selected.")
+ print()
+ }
+ }
+ }
+}
diff --git a/deps/v8/build/config/ios/ios_sdk_overrides.gni b/deps/v8/build/config/ios/ios_sdk_overrides.gni
new file mode 100644
index 0000000000..5699ebe06c
--- /dev/null
+++ b/deps/v8/build/config/ios/ios_sdk_overrides.gni
@@ -0,0 +1,17 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains arguments that subprojects may choose to override. It
+# asserts that those overrides are used, to prevent unused args warnings.
+
+declare_args() {
+ # Version of iOS that we're targeting.
+ ios_deployment_target = "11.0"
+}
+
+# Always assert that ios_deployment_target is used on non-iOS platforms to
+# prevent unused args warnings.
+if (!is_ios) {
+ assert(ios_deployment_target == "11.0" || true)
+}
diff --git a/deps/v8/build/config/ios/rules.gni b/deps/v8/build/config/ios/rules.gni
new file mode 100644
index 0000000000..4b5ffa02b3
--- /dev/null
+++ b/deps/v8/build/config/ios/rules.gni
@@ -0,0 +1,2047 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/ios/ios_sdk.gni")
+import("//build/config/mac/base_rules.gni")
+import("//build/config/mac/symbols.gni")
+import("//build/toolchain/toolchain.gni")
+
+# Constants corresponding to the bundle type identifier for XCTest and XCUITest
+# targets.
+_ios_xcode_xctest_bundle_id = "com.apple.product-type.bundle.unit-test"
+_ios_xcode_xcuitest_bundle_id = "com.apple.product-type.bundle.ui-testing"
+
+# Invokes lipo on multiple arch-specific binaries to create a fat binary.
+#
+# Arguments
+#
+# arch_binary_target
+# name of the target generating the arch-specific binaries, they must
+# be named $target_out_dir/$toolchain_cpu/$arch_binary_output.
+#
+# arch_binary_output
+# (optional, defaults to the name of $arch_binary_target) base name of
+# the arch-specific binary generated by arch_binary_target.
+#
+# output_name
+# (optional, defaults to $target_name) base name of the target output,
+# the full path will be $target_out_dir/$output_name.
+#
+# configs
+# (optional) a list of configurations, this is used to check whether
+# the binary should be stripped, when "enable_stripping" is true.
+#
+template("lipo_binary") {
+ assert(defined(invoker.arch_binary_target),
+ "arch_binary_target must be defined for $target_name")
+
+ _target_name = target_name
+ _output_name = target_name
+ if (defined(invoker.output_name)) {
+ _output_name = invoker.output_name
+ }
+
+ _all_target_cpu = [ current_cpu ] + additional_target_cpus
+ _all_toolchains = [ current_toolchain ] + additional_toolchains
+
+ _arch_binary_target = invoker.arch_binary_target
+ _arch_binary_output = get_label_info(_arch_binary_target, "name")
+ if (defined(invoker.arch_binary_output)) {
+ _arch_binary_output = invoker.arch_binary_output
+ }
+
+ action(_target_name) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "arch_binary_output",
+ "arch_binary_target",
+ "configs",
+ "output_name",
+ ])
+
+ script = "//build/toolchain/mac/linker_driver.py"
+
+ # http://crbug.com/762840. Fix for bots running out of memory.
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ outputs = [
+ "$target_out_dir/$_output_name",
+ ]
+
+ deps = []
+ _index = 0
+ inputs = []
+ foreach(_cpu, _all_target_cpu) {
+ _toolchain = _all_toolchains[_index]
+ _index = _index + 1
+
+ inputs +=
+ [ get_label_info("$_arch_binary_target($_toolchain)",
+ "target_out_dir") + "/$_cpu/$_arch_binary_output" ]
+
+ deps += [ "$_arch_binary_target($_toolchain)" ]
+ }
+
+ args = []
+ if (!use_system_xcode) {
+ args += [
+ "--developer_dir",
+ hermetic_xcode_path,
+ ]
+ }
+ args += [
+ "xcrun",
+ "lipo",
+ "-create",
+ "-output",
+ rebase_path("$target_out_dir/$_output_name", root_build_dir),
+ ] + rebase_path(inputs, root_build_dir)
+
+ if (enable_dsyms) {
+ _dsyms_output_dir = "$root_out_dir/$_output_name.dSYM"
+ outputs += [
+ "$_dsyms_output_dir/",
+ "$_dsyms_output_dir/Contents/Info.plist",
+ "$_dsyms_output_dir/Contents/Resources/DWARF/$_output_name",
+ ]
+ args += [ "-Wcrl,dsym," + rebase_path("$root_out_dir/.", root_build_dir) ]
+ }
+
+ if (enable_stripping) {
+ args += [ "-Wcrl,strip,-x,-S" ]
+ if (save_unstripped_output) {
+ outputs += [ "$root_out_dir/$_output_name.unstripped" ]
+ args += [ "-Wcrl,unstripped," +
+ rebase_path("$root_out_dir/.", root_build_dir) ]
+ }
+ }
+ }
+}
+
+# Wrapper around create_bundle taking care of code signature settings.
+#
+# Arguments
+#
+# product_type
+# string, product type for the generated Xcode project.
+#
+# bundle_gen_dir
+# (optional) directory where the bundle is generated; must be below
+# root_out_dir and defaults to root_out_dir if omitted.
+#
+# bundle_deps
+# (optional) list of additional dependencies.
+#
+# bundle_deps_filter
+# (optional) list of dependencies to filter (for more information
+# see "gn help bundle_deps_filter").
+#
+# bundle_extension
+# string, extension of the bundle, used to generate bundle name.
+#
+# bundle_binary_target
+# (optional) string, label of the target generating the bundle main
+# binary. This target and bundle_binary_path are mutually exclusive.
+#
+# bundle_binary_output
+# (optional) string, base name of the binary generated by the
+# bundle_binary_target target, defaults to the target name.
+#
+# bundle_binary_path
+# (optional) string, path to the bundle main binary. This target and
+# bundle_binary_target are mutually exclusive.
+#
+# output_name:
+# (optional) string, name of the generated application, if omitted,
+# defaults to the target_name.
+#
+# extra_system_frameworks
+# (optional) list of system framework to copy to the bundle.
+#
+# enable_code_signing
+# (optional) boolean, control whether code signing is enabled or not,
+# default to ios_enable_code_signing if not defined.
+#
+# entitlements_path:
+# (optional) path to the template to use to generate the application
+# entitlements by performing variable substitutions, defaults to
+# //build/config/ios/entitlements.plist.
+#
+# entitlements_target:
+# (optional) label of the target generating the application
+# entitlements (must generate a single file as output); cannot be
+# defined if entitlements_path is set.
+#
+# disable_entitlements
+# (optional, defaults to false) boolean, control whether entitlements willi
+# be embedded in the application during signature. If false and no
+# entitlements are provided, default empty entitlements will be used.
+#
+# disable_embedded_mobileprovision
+# (optional, default to false) boolean, control whether mobile provisions
+# will be embedded in the bundle. If true, the existing
+# embedded.mobileprovision will be deleted.
+#
+# xcode_extra_attributes
+# (optional) scope, extra attributes for Xcode projects.
+#
+# xcode_test_application_name:
+# (optional) string, name of the test application for Xcode unit or ui
+# test target.
+#
+# primary_info_plist:
+# (optional) path to Info.plist to merge with the $partial_info_plist
+# generated by the compilation of the asset catalog.
+#
+# partial_info_plist:
+# (optional) path to the partial Info.plist generated by the asset
+# catalog compiler; if defined $primary_info_plist must also be defined.
+#
+template("create_signed_bundle") {
+ assert(defined(invoker.product_type),
+ "product_type must be defined for $target_name")
+ assert(defined(invoker.bundle_extension),
+ "bundle_extension must be defined for $target_name")
+ assert(defined(invoker.bundle_binary_target) !=
+ defined(invoker.bundle_binary_path),
+ "Only one of bundle_binary_target or bundle_binary_path may be " +
+ "specified for $target_name")
+ assert(!defined(invoker.partial_info_plist) ||
+ defined(invoker.primary_info_plist),
+ "primary_info_plist must be defined when partial_info_plist is " +
+ "defined for $target_name")
+
+ if (defined(invoker.xcode_test_application_name)) {
+ assert(
+ invoker.product_type == _ios_xcode_xctest_bundle_id ||
+ invoker.product_type == _ios_xcode_xcuitest_bundle_id,
+ "xcode_test_application_name can be only defined for Xcode unit or ui test target.")
+ }
+
+ _target_name = target_name
+ _output_name = target_name
+ if (defined(invoker.output_name)) {
+ _output_name = invoker.output_name
+ }
+
+ if (defined(invoker.bundle_binary_path)) {
+ _bundle_binary_path = invoker.bundle_binary_path
+ } else {
+ _bundle_binary_target = invoker.bundle_binary_target
+ _bundle_binary_output = get_label_info(_bundle_binary_target, "name")
+ if (defined(invoker.bundle_binary_output)) {
+ _bundle_binary_output = invoker.bundle_binary_output
+ }
+ _bundle_binary_path =
+ get_label_info(_bundle_binary_target, "target_out_dir") +
+ "/$_bundle_binary_output"
+ }
+
+ _bundle_gen_dir = root_out_dir
+ if (defined(invoker.bundle_gen_dir)) {
+ _bundle_gen_dir = invoker.bundle_gen_dir
+ }
+
+ _bundle_extension = invoker.bundle_extension
+
+ _enable_embedded_mobileprovision = true
+ if (defined(invoker.disable_embedded_mobileprovision)) {
+ _enable_embedded_mobileprovision = !invoker.disable_embedded_mobileprovision
+ }
+
+ _enable_entitlements = true
+ if (defined(invoker.disable_entitlements)) {
+ _enable_entitlements = !invoker.disable_entitlements
+ }
+
+ if (_enable_entitlements) {
+ if (!defined(invoker.entitlements_target)) {
+ _entitlements_path = "//build/config/ios/entitlements.plist"
+ if (defined(invoker.entitlements_path)) {
+ _entitlements_path = invoker.entitlements_path
+ }
+ } else {
+ assert(!defined(invoker.entitlements_path),
+ "Cannot define both entitlements_path and entitlements_target " +
+ "for $target_name")
+
+ _entitlements_target_outputs =
+ get_target_outputs(invoker.entitlements_target)
+ _entitlements_path = _entitlements_target_outputs[0]
+ }
+ }
+
+ _enable_code_signing = ios_enable_code_signing
+ if (defined(invoker.enable_code_signing)) {
+ _enable_code_signing = invoker.enable_code_signing
+ }
+
+ create_bundle(_target_name) {
+ forward_variables_from(invoker,
+ [
+ "bundle_deps_filter",
+ "data_deps",
+ "deps",
+ "partial_info_plist",
+ "product_type",
+ "public_configs",
+ "public_deps",
+ "testonly",
+ "visibility",
+ "xcode_test_application_name",
+ ])
+
+ bundle_root_dir = "$_bundle_gen_dir/$_output_name$_bundle_extension"
+ bundle_contents_dir = bundle_root_dir
+ bundle_resources_dir = bundle_contents_dir
+ bundle_executable_dir = bundle_contents_dir
+
+ if (!defined(public_deps)) {
+ public_deps = []
+ }
+
+ xcode_extra_attributes = {
+ IPHONEOS_DEPLOYMENT_TARGET = ios_deployment_target
+
+ # If invoker has defined extra attributes, they override the defaults.
+ if (defined(invoker.xcode_extra_attributes)) {
+ forward_variables_from(invoker.xcode_extra_attributes, "*")
+ }
+ }
+
+ if (defined(invoker.bundle_binary_target)) {
+ public_deps += [ invoker.bundle_binary_target ]
+ }
+
+ if (defined(invoker.bundle_deps)) {
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += invoker.bundle_deps
+ }
+ if (!defined(deps)) {
+ deps = []
+ }
+
+ code_signing_script = "//build/config/ios/codesign.py"
+ code_signing_sources = [ _bundle_binary_path ]
+ if (_enable_entitlements) {
+ if (defined(invoker.entitlements_target)) {
+ deps += [ invoker.entitlements_target ]
+ }
+ code_signing_sources += [ _entitlements_path ]
+ }
+ code_signing_outputs = [ "$bundle_contents_dir/$_output_name" ]
+ if (_enable_code_signing) {
+ code_signing_outputs +=
+ [ "$bundle_contents_dir/_CodeSignature/CodeResources" ]
+ }
+ if (ios_code_signing_identity != "" && !use_ios_simulator &&
+ _enable_embedded_mobileprovision) {
+ code_signing_outputs +=
+ [ "$bundle_contents_dir/embedded.mobileprovision" ]
+ }
+
+ if (defined(invoker.extra_system_frameworks)) {
+ foreach(_framework, invoker.extra_system_frameworks) {
+ code_signing_outputs += [ "$bundle_contents_dir/Frameworks/" +
+ get_path_info(_framework, "file") ]
+ }
+ }
+
+ code_signing_args = []
+ if (!use_system_xcode) {
+ code_signing_args += [
+ "--developer_dir",
+ hermetic_xcode_path,
+ ]
+ }
+ code_signing_args += [
+ "code-sign-bundle",
+ "-t=" + ios_sdk_name,
+ "-i=" + ios_code_signing_identity,
+ "-b=" + rebase_path(_bundle_binary_path, root_build_dir),
+ ]
+ if (_enable_entitlements) {
+ code_signing_args +=
+ [ "-e=" + rebase_path(_entitlements_path, root_build_dir) ]
+ }
+ if (!_enable_embedded_mobileprovision) {
+ code_signing_args += [ "--disable-embedded-mobileprovision" ]
+ }
+ code_signing_args += [ rebase_path(bundle_root_dir, root_build_dir) ]
+ if (!_enable_code_signing) {
+ code_signing_args += [ "--disable-code-signature" ]
+ }
+ if (defined(invoker.extra_system_frameworks)) {
+ # All framework in extra_system_frameworks are expected to be
+ # system framework and the path to be already system absolute
+ # so do not use rebase_path here.
+ foreach(_framework, invoker.extra_system_frameworks) {
+ code_signing_args += [ "-F=" + _framework ]
+ }
+ }
+ if (defined(invoker.partial_info_plist)) {
+ _partial_info_plists = [
+ invoker.primary_info_plist,
+ invoker.partial_info_plist,
+ ]
+
+ _plist_compiler_path = "//build/config/mac/plist_util.py"
+
+ code_signing_sources += _partial_info_plists
+ code_signing_sources += [ _plist_compiler_path ]
+ code_signing_outputs += [ "$bundle_contents_dir/Info.plist" ]
+
+ code_signing_args +=
+ [ "-P=" + rebase_path(_plist_compiler_path, root_build_dir) ]
+ foreach(_partial_info_plist, _partial_info_plists) {
+ code_signing_args +=
+ [ "-p=" + rebase_path(_partial_info_plist, root_build_dir) ]
+ }
+ }
+ }
+}
+
+# Generates Info.plist files for Mac apps and frameworks.
+#
+# Arguments
+#
+# info_plist:
+# (optional) string, path to the Info.plist file that will be used for
+# the bundle.
+#
+# info_plist_target:
+# (optional) string, if the info_plist is generated from an action,
+# rather than a regular source file, specify the target name in lieu
+# of info_plist. The two arguments are mutually exclusive.
+#
+# executable_name:
+# string, name of the generated target used for the product
+# and executable name as specified in the output Info.plist.
+#
+# extra_substitutions:
+# (optional) string array, 'key=value' pairs for extra fields which are
+# specified in a source Info.plist template.
+template("ios_info_plist") {
+ assert(defined(invoker.info_plist) != defined(invoker.info_plist_target),
+ "Only one of info_plist or info_plist_target may be specified in " +
+ target_name)
+
+ if (defined(invoker.info_plist)) {
+ _info_plist = invoker.info_plist
+ } else {
+ _info_plist_target_output = get_target_outputs(invoker.info_plist_target)
+ _info_plist = _info_plist_target_output[0]
+ }
+
+ info_plist(target_name) {
+ format = "binary1"
+ extra_substitutions = []
+ if (defined(invoker.extra_substitutions)) {
+ extra_substitutions = invoker.extra_substitutions
+ }
+ extra_substitutions += [
+ "IOS_BUNDLE_ID_PREFIX=$ios_app_bundle_id_prefix",
+ "IOS_PLATFORM_BUILD=$ios_platform_build",
+ "IOS_PLATFORM_NAME=$ios_sdk_name",
+ "IOS_PLATFORM_VERSION=$ios_sdk_version",
+ "IOS_SDK_BUILD=$ios_sdk_build",
+ "IOS_SDK_NAME=$ios_sdk_name$ios_sdk_version",
+ "IOS_SUPPORTED_PLATFORM=$ios_sdk_platform",
+ ]
+ plist_templates = [
+ "//build/config/ios/BuildInfo.plist",
+ _info_plist,
+ ]
+ if (defined(invoker.info_plist_target)) {
+ deps = [
+ invoker.info_plist_target,
+ ]
+ }
+ forward_variables_from(invoker,
+ [
+ "executable_name",
+ "output_name",
+ "visibility",
+ "testonly",
+ ])
+ }
+}
+
+# Template to build an application bundle for iOS.
+#
+# This should be used instead of "executable" built-in target type on iOS.
+# As the template forward the generation of the application executable to
+# an "executable" target, all arguments supported by "executable" targets
+# are also supported by this template.
+#
+# Arguments
+#
+# output_name:
+# (optional) string, name of the generated application, if omitted,
+# defaults to the target_name.
+#
+# extra_substitutions:
+# (optional) list of string in "key=value" format, each value will
+# be used as an additional variable substitution rule when generating
+# the application Info.plist
+#
+# info_plist:
+# (optional) string, path to the Info.plist file that will be used for
+# the bundle.
+#
+# info_plist_target:
+# (optional) string, if the info_plist is generated from an action,
+# rather than a regular source file, specify the target name in lieu
+# of info_plist. The two arguments are mutually exclusive.
+#
+# entitlements_path:
+# (optional) path to the template to use to generate the application
+# entitlements by performing variable substitutions, defaults to
+# //build/config/ios/entitlements.plist.
+#
+# entitlements_target:
+# (optional) label of the target generating the application
+# entitlements (must generate a single file as output); cannot be
+# defined if entitlements_path is set.
+#
+# bundle_extension:
+# (optional) bundle extension including the dot, default to ".app".
+#
+# product_type
+# (optional) string, product type for the generated Xcode project,
+# default to "com.apple.product-type.application". Should generally
+# not be overridden.
+#
+# enable_code_signing
+# (optional) boolean, control whether code signing is enabled or not,
+# default to ios_enable_code_signing if not defined.
+#
+# variants
+# (optional) list of scopes, each scope needs to define the attributes
+# "name" and "bundle_deps"; if defined and non-empty, then one bundle
+# named $target_out_dir/$variant/$output_name will be created for each
+# variant with the same binary but the correct bundle_deps, the bundle
+# at $target_out_dir/$output_name will be a copy of the first variant.
+#
+# For more information, see "gn help executable".
+template("ios_app_bundle") {
+ _output_name = target_name
+ _target_name = target_name
+ if (defined(invoker.output_name)) {
+ _output_name = invoker.output_name
+ }
+
+ _arch_executable_source = _target_name + "_arch_executable_sources"
+ _arch_executable_target = _target_name + "_arch_executable"
+ _lipo_executable_target = _target_name + "_executable"
+
+ if (defined(invoker.variants) && invoker.variants != []) {
+ _variants = []
+
+ foreach(_variant, invoker.variants) {
+ assert(defined(_variant.name) && _variant.name != "",
+ "name must be defined for all $target_name variants")
+
+ assert(defined(_variant.bundle_deps),
+ "bundle_deps must be defined for all $target_name variants")
+
+ _variants += [
+ {
+ name = _variant.name
+ bundle_deps = _variant.bundle_deps
+ target_name = "${_target_name}_variants_${_variant.name}"
+ bundle_gen_dir = "$root_out_dir/variants/${_variant.name}"
+ },
+ ]
+ }
+ } else {
+ # If no variants are passed to the template, use a fake variant with
+ # no name to avoid duplicating code. As no variant can have an empty
+ # name except this fake variant, it is possible to know if a variant
+ # is fake or not.
+ _variants = [
+ {
+ name = ""
+ bundle_deps = []
+ target_name = _target_name
+ bundle_gen_dir = root_out_dir
+ },
+ ]
+ }
+
+ _default_variant = _variants[0]
+
+ if (current_toolchain != default_toolchain) {
+ # For use of _variants and _default_variant for secondary toolchain to
+ # avoid the "Assignment had no effect" error from gn.
+ assert(_variants != [])
+ assert(_default_variant.target_name != "")
+ }
+
+ source_set(_arch_executable_source) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "bundle_deps",
+ "bundle_deps_filter",
+ "bundle_extension",
+ "enable_code_signing",
+ "entitlements_path",
+ "entitlements_target",
+ "extra_substitutions",
+ "extra_system_frameworks",
+ "info_plist",
+ "info_plist_target",
+ "output_name",
+ "product_type",
+ "visibility",
+ ])
+
+ visibility = [ ":$_arch_executable_target" ]
+ }
+
+ if (current_toolchain == default_toolchain || use_ios_simulator) {
+ _generate_entitlements_target = _target_name + "_gen_entitlements"
+ _generate_entitlements_output =
+ get_label_info(":$_generate_entitlements_target($default_toolchain)",
+ "target_out_dir") + "/$_output_name.xcent"
+ }
+
+ executable(_arch_executable_target) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "bundle_deps",
+ "bundle_deps_filter",
+ "bundle_extension",
+ "enable_code_signing",
+ "entitlements_path",
+ "entitlements_target",
+ "extra_substitutions",
+ "extra_system_frameworks",
+ "info_plist",
+ "info_plist_target",
+ "output_name",
+ "product_type",
+ "sources",
+ "visibility",
+ ])
+
+ visibility = [ ":$_lipo_executable_target($default_toolchain)" ]
+ if (current_toolchain != default_toolchain) {
+ visibility += [ ":$_target_name" ]
+ }
+
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += [ ":$_arch_executable_source" ]
+
+ if (!defined(libs)) {
+ libs = []
+ }
+ libs += [ "UIKit.framework" ]
+
+ if (!defined(ldflags)) {
+ ldflags = []
+ }
+ ldflags += [
+ "-Xlinker",
+ "-rpath",
+ "-Xlinker",
+ "@executable_path/Frameworks",
+ "-Xlinker",
+ "-objc_abi_version",
+ "-Xlinker",
+ "2",
+ ]
+
+ if (use_ios_simulator) {
+ deps += [ ":$_generate_entitlements_target($default_toolchain)" ]
+
+ if (!defined(inputs)) {
+ inputs = []
+ }
+ inputs += [ _generate_entitlements_output ]
+
+ if (!defined(ldflags)) {
+ ldflags = []
+ }
+ ldflags += [
+ "-Xlinker",
+ "-sectcreate",
+ "-Xlinker",
+ "__TEXT",
+ "-Xlinker",
+ "__entitlements",
+ "-Xlinker",
+ rebase_path(_generate_entitlements_output, root_build_dir),
+ ]
+ }
+
+ output_name = _output_name
+ output_prefix_override = true
+ output_dir = "$target_out_dir/$current_cpu"
+ }
+
+ if (current_toolchain != default_toolchain) {
+ # For fat builds, only the default toolchain will generate an application
+ # bundle. For the other toolchains, the template is only used for building
+ # the arch-specific binary, thus the default target is just a group().
+
+ group(_target_name) {
+ forward_variables_from(invoker,
+ [
+ "visibility",
+ "testonly",
+ ])
+ public_deps = [
+ ":$_arch_executable_target",
+ ]
+ }
+ } else {
+ lipo_binary(_lipo_executable_target) {
+ forward_variables_from(invoker,
+ [
+ "configs",
+ "testonly",
+ ])
+
+ visibility = []
+ foreach(_variant, _variants) {
+ visibility += [ ":${_variant.target_name}" ]
+ }
+
+ output_name = _output_name
+ arch_binary_target = ":$_arch_executable_target"
+ arch_binary_output = _output_name
+ }
+
+ _generate_info_plist = target_name + "_generate_info_plist"
+ ios_info_plist(_generate_info_plist) {
+ forward_variables_from(invoker,
+ [
+ "extra_substitutions",
+ "info_plist",
+ "info_plist_target",
+ ])
+
+ executable_name = _output_name
+ }
+
+ if (current_toolchain == default_toolchain) {
+ if (!defined(invoker.entitlements_target)) {
+ _entitlements_path = "//build/config/ios/entitlements.plist"
+ if (defined(invoker.entitlements_path)) {
+ _entitlements_path = invoker.entitlements_path
+ }
+ } else {
+ assert(!defined(invoker.entitlements_path),
+ "Cannot define both entitlements_path and entitlements_target" +
+ "for $_target_name")
+
+ _entitlements_target_outputs =
+ get_target_outputs(invoker.entitlements_target)
+ _entitlements_path = _entitlements_target_outputs[0]
+ }
+
+ action(_generate_entitlements_target) {
+ _gen_info_plist_outputs = get_target_outputs(":$_generate_info_plist")
+ _info_plist_path = _gen_info_plist_outputs[0]
+
+ script = "//build/config/ios/codesign.py"
+ deps = [
+ ":$_generate_info_plist",
+ ]
+ if (defined(invoker.entitlements_target)) {
+ deps += [ invoker.entitlements_target ]
+ }
+ sources = [
+ _entitlements_path,
+ _info_plist_path,
+ ]
+ outputs = [
+ _generate_entitlements_output,
+ ]
+
+ args = []
+ if (!use_system_xcode) {
+ args += [
+ "--developer_dir",
+ hermetic_xcode_path,
+ ]
+ }
+ args += [
+ "generate-entitlements",
+ "-e=" + rebase_path(_entitlements_path, root_build_dir),
+ "-p=" + rebase_path(_info_plist_path, root_build_dir),
+ ] + rebase_path(outputs, root_build_dir)
+ }
+ }
+
+ _app_product_type = "com.apple.product-type.application"
+ _product_type = _app_product_type
+ if (defined(invoker.product_type)) {
+ _product_type = invoker.product_type
+ }
+
+ _app_bundle_extension = ".app"
+ _bundle_extension = _app_bundle_extension
+ if (defined(invoker.bundle_extension)) {
+ _bundle_extension = invoker.bundle_extension
+ }
+
+ # Only write PkgInfo for real application, not application extension (they
+ # have the same product type but a different extension).
+ _write_pkg_info = _product_type == _app_product_type &&
+ _bundle_extension == _app_bundle_extension
+
+ if (_write_pkg_info) {
+ _create_pkg_info = target_name + "_pkg_info"
+ action(_create_pkg_info) {
+ forward_variables_from(invoker, [ "testonly" ])
+ script = "//build/config/mac/write_pkg_info.py"
+ sources = get_target_outputs(":$_generate_info_plist")
+ outputs = [
+ # Cannot name the output PkgInfo as the name will not be unique if
+ # multiple ios_app_bundle are defined in the same BUILD.gn file. The
+ # file is renamed in the bundle_data outputs to the correct name.
+ "$target_gen_dir/$target_name",
+ ]
+ args = [ "--plist" ] + rebase_path(sources, root_build_dir) +
+ [ "--output" ] + rebase_path(outputs, root_build_dir)
+ deps = [
+ ":$_generate_info_plist",
+ ]
+ }
+
+ _bundle_data_pkg_info = target_name + "_bundle_data_pkg_info"
+ bundle_data(_bundle_data_pkg_info) {
+ forward_variables_from(invoker, [ "testonly" ])
+ sources = get_target_outputs(":$_create_pkg_info")
+ outputs = [
+ "{{bundle_resources_dir}}/PkgInfo",
+ ]
+ public_deps = [
+ ":$_create_pkg_info",
+ ]
+ }
+ }
+
+ foreach(_variant, _variants) {
+ create_signed_bundle(_variant.target_name) {
+ forward_variables_from(invoker,
+ [
+ "bundle_deps",
+ "bundle_deps_filter",
+ "data_deps",
+ "deps",
+ "enable_code_signing",
+ "entitlements_path",
+ "entitlements_target",
+ "extra_system_frameworks",
+ "public_configs",
+ "public_deps",
+ "testonly",
+ "visibility",
+ ])
+
+ output_name = _output_name
+ bundle_gen_dir = _variant.bundle_gen_dir
+ bundle_binary_target = ":$_lipo_executable_target"
+ bundle_binary_output = _output_name
+ bundle_extension = _bundle_extension
+ product_type = _product_type
+
+ _generate_info_plist_outputs =
+ get_target_outputs(":$_generate_info_plist")
+ primary_info_plist = _generate_info_plist_outputs[0]
+ partial_info_plist =
+ "$target_gen_dir/${_variant.target_name}_partial_info.plist"
+
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += [ ":$_generate_info_plist" ]
+
+ if (!defined(bundle_deps)) {
+ bundle_deps = []
+ }
+ if (_write_pkg_info) {
+ bundle_deps += [ ":$_bundle_data_pkg_info" ]
+ }
+ bundle_deps += _variant.bundle_deps
+
+ if (use_ios_simulator) {
+ if (!defined(data_deps)) {
+ data_deps = []
+ }
+ data_deps += [ "//testing/iossim" ]
+ }
+ }
+ }
+
+ if (_default_variant.name != "") {
+ _bundle_short_name = "$_output_name$_bundle_extension"
+ action(_target_name) {
+ forward_variables_from(invoker, [ "testonly" ])
+
+ script = "//build/config/ios/hardlink.py"
+ public_deps = []
+ foreach(_variant, _variants) {
+ public_deps += [ ":${_variant.target_name}" ]
+ }
+
+ sources = [
+ "${_default_variant.bundle_gen_dir}/$_bundle_short_name",
+ ]
+ outputs = [
+ "$root_out_dir/$_bundle_short_name",
+ ]
+
+ args = rebase_path(sources, root_out_dir) +
+ rebase_path(outputs, root_out_dir)
+ }
+ }
+ }
+}
+
+set_defaults("ios_app_bundle") {
+ configs = default_executable_configs
+}
+
+# Template to build an application extension bundle for iOS.
+#
+# This should be used instead of "executable" built-in target type on iOS.
+# As the template forward the generation of the application executable to
+# an "executable" target, all arguments supported by "executable" targets
+# are also supported by this template.
+#
+# Arguments
+#
+# output_name:
+# (optional) string, name of the generated application, if omitted,
+# defaults to the target_name.
+#
+# extra_substitutions:
+# (optional) list of string in "key=value" format, each value will
+# be used as an additional variable substitution rule when generating
+# the application Info.plist
+#
+# info_plist:
+# (optional) string, path to the Info.plist file that will be used for
+# the bundle.
+#
+# info_plist_target:
+# (optional) string, if the info_plist is generated from an action,
+# rather than a regular source file, specify the target name in lieu
+# of info_plist. The two arguments are mutually exclusive.
+#
+# For more information, see "gn help executable".
+template("ios_appex_bundle") {
+ ios_app_bundle(target_name) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "bundle_extension",
+ "product_type",
+ ])
+ bundle_extension = ".appex"
+ product_type = "com.apple.product-type.app-extension"
+
+ # Add linker flags required for an application extension (determined by
+ # inspecting the link command-line when using Xcode 9.0+).
+ if (!defined(ldflags)) {
+ ldflags = []
+ }
+ ldflags += [
+ "-e",
+ "_NSExtensionMain",
+ "-fapplication-extension",
+ ]
+ }
+}
+
+set_defaults("ios_appex_bundle") {
+ configs = default_executable_configs
+}
+
+# Compile a xib or storyboard file and add it to a bundle_data so that it is
+# available at runtime in the bundle.
+#
+# Arguments
+#
+# source:
+# string, path of the xib or storyboard to compile.
+#
+# Forwards all variables to the bundle_data target.
+template("bundle_data_ib_file") {
+ assert(defined(invoker.source), "source needs to be defined for $target_name")
+
+ _source_extension = get_path_info(invoker.source, "extension")
+ assert(_source_extension == "xib" || _source_extension == "storyboard",
+ "source must be a .xib or .storyboard for $target_name")
+
+ _target_name = target_name
+ if (_source_extension == "xib") {
+ _compile_ib_file = target_name + "_compile_xib"
+ _output_extension = "nib"
+ } else {
+ _compile_ib_file = target_name + "_compile_storyboard"
+ _output_extension = "storyboardc"
+ }
+
+ compile_ib_files(_compile_ib_file) {
+ sources = [
+ invoker.source,
+ ]
+ output_extension = _output_extension
+ visibility = [ ":$_target_name" ]
+ ibtool_flags = [
+ "--minimum-deployment-target",
+ ios_deployment_target,
+ "--auto-activate-custom-fonts",
+ "--target-device",
+ "iphone",
+ "--target-device",
+ "ipad",
+ ]
+ }
+
+ bundle_data(_target_name) {
+ forward_variables_from(invoker, "*", [ "source" ])
+
+ if (!defined(public_deps)) {
+ public_deps = []
+ }
+ public_deps += [ ":$_compile_ib_file" ]
+
+ sources = get_target_outputs(":$_compile_ib_file")
+
+ outputs = [
+ "{{bundle_resources_dir}}/{{source_file_part}}",
+ ]
+ }
+}
+
+# Compile a strings file and add it to a bundle_data so that it is available
+# at runtime in the bundle.
+#
+# Arguments
+#
+# source:
+# string, path of the strings file to compile.
+#
+# output:
+# string, path of the compiled file in the final bundle.
+#
+# Forwards all variables to the bundle_data target.
+template("bundle_data_strings") {
+ assert(defined(invoker.source), "source needs to be defined for $target_name")
+ assert(defined(invoker.output), "output needs to be defined for $target_name")
+
+ _source_extension = get_path_info(invoker.source, "extension")
+ assert(_source_extension == "strings",
+ "source must be a .strings for $target_name")
+
+ _target_name = target_name
+ _convert_target = target_name + "_compile_strings"
+
+ convert_plist(_convert_target) {
+ visibility = [ ":$_target_name" ]
+ source = invoker.source
+ output =
+ "$target_gen_dir/$_target_name/" + get_path_info(invoker.source, "file")
+ format = "binary1"
+ }
+
+ bundle_data(_target_name) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "source",
+ "output",
+ ])
+
+ if (!defined(public_deps)) {
+ public_deps = []
+ }
+ public_deps += [ ":$_convert_target" ]
+
+ sources = get_target_outputs(":$_convert_target")
+
+ outputs = [
+ invoker.output,
+ ]
+ }
+}
+
+# Template to package a shared library into an iOS framework bundle.
+#
+# By default, the bundle target this template generates does not link the
+# resulting framework into anything that depends on it. If a dependency wants
+# a link-time (as well as build-time) dependency on the framework bundle,
+# depend against "$target_name+link". If only the build-time dependency is
+# required (e.g., for copying into another bundle), then use "$target_name".
+#
+# Arguments
+#
+# output_name:
+# (optional) string, name of the generated framework without the
+# .framework suffix. If omitted, defaults to target_name.
+#
+# public_headers:
+# (optional) list of paths to header file that needs to be copied
+# into the framework bundle Headers subdirectory. If omitted or
+# empty then the Headers subdirectory is not created.
+#
+# sources
+# (optional) list of files. Needs to be defined and non-empty if
+# public_headers is defined and non-empty.
+#
+# enable_code_signing
+# (optional) boolean, control whether code signing is enabled or not,
+# default to ios_enable_code_signing if not defined.
+#
+# This template provides two targets for the resulting framework bundle. The
+# link-time behavior varies depending on which of the two targets below is
+# added as a dependency:
+# - $target_name only adds a build-time dependency. Targets that depend on
+# it will not link against the framework.
+# - $target_name+link adds a build-time and link-time dependency. Targets
+# that depend on it will link against the framework.
+#
+# The build-time-only dependency is used for when a target needs to use the
+# framework either only for resources, or because the target loads it at run-
+# time, via dlopen() or NSBundle. The link-time dependency will cause the
+# dependee to have the framework loaded by dyld at launch.
+#
+# Example of build-time only dependency:
+#
+# framework_bundle("CoreTeleportation") {
+# sources = [ ... ]
+# }
+#
+# bundle_data("core_teleportation_bundle_data") {
+# deps = [ ":CoreTeleportation" ]
+# sources = [ "$root_out_dir/CoreTeleportation.framework" ]
+# outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ]
+# }
+#
+# app_bundle("GoatTeleporter") {
+# sources = [ ... ]
+# deps = [
+# ":core_teleportation_bundle_data",
+# ]
+# }
+#
+# The GoatTeleporter.app will not directly link against
+# CoreTeleportation.framework, but it will be included in the bundle's
+# Frameworks directory.
+#
+# Example of link-time dependency:
+#
+# framework_bundle("CoreTeleportation") {
+# sources = [ ... ]
+# ldflags = [
+# "-install_name",
+# "@executable_path/../Frameworks/$target_name.framework"
+# ]
+# }
+#
+# bundle_data("core_teleportation_bundle_data") {
+# deps = [ ":CoreTeleportation+link" ]
+# sources = [ "$root_out_dir/CoreTeleportation.framework" ]
+# outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ]
+# }
+#
+# app_bundle("GoatTeleporter") {
+# sources = [ ... ]
+# deps = [
+# ":core_teleportation_bundle_data",
+# ]
+# }
+#
+# Note that the framework is still copied to the app's bundle, but dyld will
+# load this library when the app is launched because it uses the "+link"
+# target as a dependency. This also requires that the framework set its
+# install_name so that dyld can locate it.
+#
+# See "gn help shared_library" for more information on arguments supported
+# by shared library target.
+template("ios_framework_bundle") {
+ _target_name = target_name
+ _output_name = target_name
+ if (defined(invoker.output_name)) {
+ _output_name = invoker.output_name
+ }
+
+ _has_public_headers =
+ defined(invoker.public_headers) && invoker.public_headers != []
+
+ # Public configs are not propagated across toolchain (see crbug.com/675224)
+ # so some configs have to be defined for both default_toolchain and all others
+ # toolchains when performing a fat build. Use "get_label_info" to construct
+ # the path since they need to be relative to the default_toolchain.
+
+ _default_toolchain_root_out_dir =
+ get_label_info("$_target_name($default_toolchain)", "root_out_dir")
+ _default_toolchain_target_gen_dir =
+ get_label_info("$_target_name($default_toolchain)", "target_gen_dir")
+
+ if (_has_public_headers) {
+ _framework_headers_target = _target_name + "_framework_headers"
+ _framework_headers_config = _target_name + "_framework_headers_config"
+ config(_framework_headers_config) {
+ # The link settings are inherited from the framework_bundle config.
+ cflags = [
+ "-F",
+ rebase_path("$_default_toolchain_root_out_dir/.", root_build_dir),
+ ]
+ }
+
+ _headers_map_config = _target_name + "_headers_map"
+ _header_map_filename =
+ "$_default_toolchain_target_gen_dir/$_output_name.headers.hmap"
+ config(_headers_map_config) {
+ visibility = [ ":$_target_name" ]
+ include_dirs = [ _header_map_filename ]
+ }
+ }
+
+ _arch_shared_library_source = _target_name + "_arch_shared_library_sources"
+ _arch_shared_library_target = _target_name + "_arch_shared_library"
+ _lipo_shared_library_target = _target_name + "_shared_library"
+ _link_target_name = _target_name + "+link"
+
+ _framework_public_config = _target_name + "_public_config"
+ config(_framework_public_config) {
+ # TODO(sdefresne): should we have a framework_dirs similar to lib_dirs
+ # and include_dirs to avoid duplicate values on the command-line.
+ visibility = [ ":$_target_name" ]
+ ldflags = [
+ "-F",
+ rebase_path("$_default_toolchain_root_out_dir/.", root_build_dir),
+ ]
+ lib_dirs = [ root_out_dir ]
+ libs = [ "$_output_name.framework" ]
+ }
+
+ source_set(_arch_shared_library_source) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "bundle_deps",
+ "bundle_deps_filter",
+ "data_deps",
+ "enable_code_signing",
+ "extra_substitutions",
+ "info_plist",
+ "info_plist_target",
+ "output_name",
+ "visibility",
+ ])
+
+ visibility = [ ":$_arch_shared_library_target" ]
+
+ if (_has_public_headers) {
+ configs += [
+ ":$_framework_headers_config",
+ ":$_headers_map_config",
+ ]
+
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += [ ":$_framework_headers_target($default_toolchain)" ]
+ }
+ }
+
+ shared_library(_arch_shared_library_target) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "bundle_deps",
+ "bundle_deps_filter",
+ "data_deps",
+ "enable_code_signing",
+ "extra_substitutions",
+ "info_plist",
+ "info_plist_target",
+ "output_name",
+ "sources",
+ "visibility",
+ ])
+
+ visibility = [ ":$_lipo_shared_library_target($default_toolchain)" ]
+ if (current_toolchain != default_toolchain) {
+ visibility += [ ":$_target_name" ]
+ }
+
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += [ ":$_arch_shared_library_source" ]
+ if (_has_public_headers) {
+ deps += [ ":$_framework_headers_target($default_toolchain)" ]
+ }
+ if (!defined(ldflags)) {
+ ldflags = []
+ }
+ ldflags += [
+ "-Xlinker",
+ "-install_name",
+ "-Xlinker",
+ "@rpath/$_output_name.framework/$_output_name",
+ "-Xlinker",
+ "-objc_abi_version",
+ "-Xlinker",
+ "2",
+ ]
+
+ output_extension = ""
+ output_name = _output_name
+ output_prefix_override = true
+ output_dir = "$target_out_dir/$current_cpu"
+ }
+
+ if (current_toolchain != default_toolchain) {
+ # For fat builds, only the default toolchain will generate a framework
+ # bundle. For the other toolchains, the template is only used for building
+ # the arch-specific binary, thus the default target is just a group().
+
+ group(_target_name) {
+ forward_variables_from(invoker,
+ [
+ "visibility",
+ "testonly",
+ ])
+ public_deps = [
+ ":$_arch_shared_library_target",
+ ]
+ }
+
+ group(_link_target_name) {
+ forward_variables_from(invoker,
+ [
+ "public_configs",
+ "visibility",
+ "testonly",
+ ])
+ public_deps = [
+ ":$_link_target_name($default_toolchain)",
+ ]
+
+ if (_has_public_headers) {
+ if (!defined(public_configs)) {
+ public_configs = []
+ }
+ public_configs += [ ":$_framework_headers_config" ]
+ }
+ if (!defined(all_dependent_configs)) {
+ all_dependent_configs = []
+ }
+ all_dependent_configs += [ ":$_framework_public_config" ]
+ }
+
+ if (defined(invoker.bundle_deps)) {
+ assert(invoker.bundle_deps != [], "mark bundle_deps as used")
+ }
+ } else {
+ if (_has_public_headers) {
+ _public_headers = invoker.public_headers
+ _framework_root = "$root_out_dir/$_output_name.framework"
+
+ _compile_headers_map_target = _target_name + "_compile_headers_map"
+ action(_compile_headers_map_target) {
+ visibility = [ ":$_framework_headers_target" ]
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "public_deps",
+ "testonly",
+ ])
+ script = "//build/config/ios/write_framework_hmap.py"
+ outputs = [
+ _header_map_filename,
+ ]
+
+ # The header map generation only wants the list of headers, not all of
+ # sources, so filter any non-header source files from "sources". It is
+ # less error prone that having the developer duplicate the list of all
+ # headers in addition to "sources".
+ set_sources_assignment_filter([
+ "*.c",
+ "*.cc",
+ "*.cpp",
+ "*.m",
+ "*.mm",
+ ])
+ sources = invoker.sources
+ set_sources_assignment_filter([])
+
+ args = [
+ rebase_path(_header_map_filename),
+ rebase_path(_framework_root, root_build_dir),
+ ] + rebase_path(sources, root_build_dir)
+ }
+
+ _create_module_map_target = _target_name + "_module_map"
+ action(_create_module_map_target) {
+ visibility = [ ":$_framework_headers_target" ]
+ script = "//build/config/ios/write_framework_modulemap.py"
+ outputs = [
+ "$_framework_root/Modules/module.modulemap",
+ ]
+ args = [ rebase_path("$_framework_root", root_build_dir) ]
+ }
+
+ _copy_public_headers_target = _target_name + "_copy_public_headers"
+ copy(_copy_public_headers_target) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "deps",
+ ])
+ visibility = [ ":$_framework_headers_target" ]
+ sources = _public_headers
+ outputs = [
+ "$_framework_root/Headers/{{source_file_part}}",
+ ]
+
+ # Do not use forward_variables_from for "public_deps" as
+ # we do not want to forward those dependencies.
+ if (defined(invoker.public_deps)) {
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += invoker.public_deps
+ }
+ }
+
+ group(_framework_headers_target) {
+ forward_variables_from(invoker, [ "testonly" ])
+ deps = [
+ ":$_compile_headers_map_target",
+ ":$_create_module_map_target",
+ ]
+ public_deps = [
+ ":$_copy_public_headers_target",
+ ]
+ }
+ }
+
+ lipo_binary(_lipo_shared_library_target) {
+ forward_variables_from(invoker,
+ [
+ "configs",
+ "testonly",
+ ])
+
+ visibility = [ ":$_target_name" ]
+ output_name = _output_name
+ arch_binary_target = ":$_arch_shared_library_target"
+ arch_binary_output = _output_name
+ }
+
+ _info_plist_target = _target_name + "_info_plist"
+ _info_plist_bundle = _target_name + "_info_plist_bundle"
+ ios_info_plist(_info_plist_target) {
+ visibility = [ ":$_info_plist_bundle" ]
+ executable_name = _output_name
+ forward_variables_from(invoker,
+ [
+ "extra_substitutions",
+ "info_plist",
+ "info_plist_target",
+ ])
+ }
+
+ bundle_data(_info_plist_bundle) {
+ visibility = [ ":$_target_name" ]
+ forward_variables_from(invoker, [ "testonly" ])
+ sources = get_target_outputs(":$_info_plist_target")
+ outputs = [
+ "{{bundle_contents_dir}}/Info.plist",
+ ]
+ public_deps = [
+ ":$_info_plist_target",
+ ]
+ }
+
+ create_signed_bundle(_target_name) {
+ forward_variables_from(invoker,
+ [
+ "bundle_deps",
+ "bundle_deps_filter",
+ "data_deps",
+ "deps",
+ "enable_code_signing",
+ "public_configs",
+ "public_deps",
+ "testonly",
+ "visibility",
+ ])
+
+ product_type = "com.apple.product-type.framework"
+ bundle_extension = ".framework"
+
+ output_name = _output_name
+ bundle_binary_target = ":$_lipo_shared_library_target"
+ bundle_binary_output = _output_name
+
+ # Framework do not have entitlements nor mobileprovision because they use
+ # the one from the bundle using them (.app or .appex) as they are just
+ # dynamic library with shared code.
+ disable_entitlements = true
+ disable_embedded_mobileprovision = true
+
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += [ ":$_info_plist_bundle" ]
+ }
+
+ group(_link_target_name) {
+ forward_variables_from(invoker,
+ [
+ "public_configs",
+ "public_deps",
+ "testonly",
+ "visibility",
+ ])
+ if (!defined(public_deps)) {
+ public_deps = []
+ }
+ public_deps += [ ":$_target_name" ]
+
+ if (_has_public_headers) {
+ if (!defined(public_configs)) {
+ public_configs = []
+ }
+ public_configs += [ ":$_framework_headers_config" ]
+ }
+ if (!defined(all_dependent_configs)) {
+ all_dependent_configs = []
+ }
+ all_dependent_configs += [ ":$_framework_public_config" ]
+ }
+
+ bundle_data(_target_name + "+bundle") {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+ ])
+ public_deps = [
+ ":$_target_name",
+ ]
+ sources = [
+ "$root_out_dir/$_output_name.framework",
+ ]
+ outputs = [
+ "{{bundle_resources_dir}}/Frameworks/$_output_name.framework",
+ ]
+ }
+ }
+}
+
+set_defaults("ios_framework_bundle") {
+ configs = default_shared_library_configs
+}
+
+# Template to build a xctest bundle that contains a loadable module for iOS.
+#
+# Arguments
+#
+# deps:
+# list of labels to depends on, these values are used to create the
+# loadable module.
+#
+# product_type
+# string, product type for the generated Xcode project, use
+# "com.apple.product-type.bundle.unit-test" for unit test and
+# "com.apple.product-type.bundle.ui-testing" for UI testing.
+#
+# host_target:
+# string, name of the target that depends on the generated bundle, this
+# value is used to restrict visibilities.
+#
+# xcode_test_application_name:
+# string, name of the test application for Xcode unit or ui test target.
+#
+# output_name
+# (optional) string, name of the generated application, if omitted,
+# defaults to the target_name.
+#
+# This template defines two targets, one named "${target_name}" is the xctest
+# bundle, and the other named "${target_name}_bundle" is a bundle_data that
+# wraps the xctest bundle and that only the "${host_target}" can depend on.
+#
+template("ios_xctest_bundle") {
+ assert(defined(invoker.deps), "deps must be defined for $target_name")
+ assert(defined(invoker.product_type),
+ "product_type must be defined for $target_name")
+ assert(invoker.product_type == _ios_xcode_xctest_bundle_id ||
+ invoker.product_type == _ios_xcode_xcuitest_bundle_id,
+ "product_type defined for $target_name is invalid.")
+ assert(defined(invoker.host_target),
+ "host_target must be defined for $target_name")
+ assert(defined(invoker.xcode_test_application_name),
+ "xcode_test_application_name must be defined for $target_name")
+
+ # Silence "assignment had no effect" error for non-default toolchains as
+ # following variables are only used in the expansion of the template for the
+ # default toolchain.
+ assert(invoker.configs != [])
+ assert(invoker.host_target != target_name)
+ assert(invoker.xcode_test_application_name != target_name)
+
+ _target_name = target_name
+ _output_name = target_name
+
+ if (defined(invoker.output_name)) {
+ _output_name = invoker.output_name
+ }
+
+ _arch_loadable_module_source = _target_name + "_arch_loadable_module_source"
+ _arch_loadable_module_target = _target_name + "_arch_loadable_module"
+ _lipo_loadable_module_target = _target_name + "_loadable_module"
+
+ source_set(_arch_loadable_module_source) {
+ forward_variables_from(invoker, [ "deps" ])
+
+ testonly = true
+ visibility = [ ":$_arch_loadable_module_target" ]
+ }
+
+ loadable_module(_arch_loadable_module_target) {
+ testonly = true
+ visibility = [ ":$_lipo_loadable_module_target($default_toolchain)" ]
+ if (current_toolchain != default_toolchain) {
+ visibility += [ ":$_target_name" ]
+ }
+
+ deps = [
+ ":$_arch_loadable_module_source",
+ ]
+ configs += [ "//build/config/ios:xctest_config" ]
+
+ output_dir = "$target_out_dir/$current_cpu"
+ output_name = _output_name
+ output_prefix_override = true
+ output_extension = ""
+ }
+
+ if (current_toolchain != default_toolchain) {
+ # For fat builds, only the default toolchain will generate a test bundle.
+ # For the other toolchains, the template is only used for building the
+ # arch-specific binary, thus the default target is just a group().
+ group(_target_name) {
+ forward_variables_from(invoker, [ "visibility" ])
+ testonly = true
+
+ public_deps = [
+ ":$_arch_loadable_module_target",
+ ]
+ }
+ } else {
+ _info_plist_target = _target_name + "_info_plist"
+ _info_plist_bundle = _target_name + "_info_plist_bundle"
+
+ ios_info_plist(_info_plist_target) {
+ testonly = true
+ visibility = [ ":$_info_plist_bundle" ]
+
+ info_plist = "//build/config/ios/Module-Info.plist"
+ executable_name = _output_name
+
+ if (ios_automatically_manage_certs) {
+ # Use a fixed bundle identifier for EarlGrey tests when using Xcode to
+ # manage the certificates as the number of free certs is limited.
+ extra_substitutions = [
+ "MODULE_BUNDLE_ID=gtest.${ios_generic_test_bundle_id_suffix}-module",
+ ]
+ } else {
+ extra_substitutions = [ "MODULE_BUNDLE_ID=gtest.$_output_name" ]
+ }
+ }
+
+ bundle_data(_info_plist_bundle) {
+ testonly = true
+ visibility = [ ":$_target_name" ]
+
+ public_deps = [
+ ":$_info_plist_target",
+ ]
+
+ sources = get_target_outputs(":$_info_plist_target")
+ outputs = [
+ "{{bundle_contents_dir}}/Info.plist",
+ ]
+ }
+
+ lipo_binary(_lipo_loadable_module_target) {
+ forward_variables_from(invoker, [ "configs" ])
+
+ testonly = true
+ visibility = [ ":$_target_name" ]
+
+ output_name = _output_name
+ arch_binary_target = ":$_arch_loadable_module_target"
+ arch_binary_output = _output_name
+ }
+
+ _xctest_bundle = _target_name + "_bundle"
+ create_signed_bundle(_target_name) {
+ forward_variables_from(invoker,
+ [
+ "enable_code_signing",
+ "product_type",
+ "xcode_test_application_name",
+ ])
+
+ testonly = true
+ visibility = [ ":$_xctest_bundle" ]
+
+ bundle_extension = ".xctest"
+
+ output_name = _output_name
+ bundle_binary_target = ":$_lipo_loadable_module_target"
+ bundle_binary_output = _output_name
+
+ # Test files need to be known to Xcode for proper indexing and discovery
+ # of tests function for XCTest, but the compilation is done via ninja and
+ # thus must prevent Xcode from linking object files via this hack.
+ xcode_extra_attributes = {
+ OTHER_LDFLAGS = "-help"
+ ONLY_ACTIVE_ARCH = "YES"
+ DEBUG_INFORMATION_FORMAT = "dwarf"
+
+ # For XCUITest, Xcode requires specifying the host application name via
+ # the TEST_TARGET_NAME attribute.
+ if (invoker.product_type == _ios_xcode_xcuitest_bundle_id) {
+ TEST_TARGET_NAME = invoker.xcode_test_application_name
+ }
+
+ # For XCTest, Xcode requires specifying the host application path via
+ # both BUNDLE_LOADER and TEST_HOST attributes.
+ if (invoker.product_type == _ios_xcode_xctest_bundle_id) {
+ BUNDLE_LOADER = "\$(TEST_HOST)"
+ TEST_HOST =
+ "\$(BUILT_PRODUCTS_DIR)/${invoker.xcode_test_application_name}" +
+ ".app/${invoker.xcode_test_application_name}"
+ }
+ }
+
+ deps = [
+ ":$_info_plist_bundle",
+ ]
+ }
+
+ bundle_data(_xctest_bundle) {
+ forward_variables_from(invoker, [ "host_target" ])
+
+ testonly = true
+ visibility = [ ":$host_target" ]
+
+ public_deps = [
+ ":$_target_name",
+ ]
+ sources = [
+ "$root_out_dir/$_output_name.xctest",
+ ]
+ outputs = [
+ "{{bundle_contents_dir}}/PlugIns/$_output_name.xctest",
+ ]
+ }
+ }
+}
+
+set_defaults("ios_xctest_bundle") {
+ configs = default_shared_library_configs
+}
+
+# For Chrome on iOS we want to run XCTests for all our build configurations
+# (Debug, Release, ...). In addition, the symbols visibility is configured to
+# private by default. To simplify testing with those constraints, our tests are
+# compiled in the TEST_HOST target instead of the .xctest bundle.
+template("ios_xctest_test") {
+ _target_name = target_name
+ _output_name = target_name
+ if (defined(invoker.output_name)) {
+ _output_name = invoker.output_name
+ }
+
+ _xctest_target = _target_name + "_module"
+ _xctest_output = _output_name + "_module"
+
+ _host_target = _target_name
+ _host_output = _output_name
+
+ _xctest_shell_source_target = _xctest_target + "shell_source"
+ source_set(_xctest_shell_source_target) {
+ sources = [
+ "//build/config/ios/xctest_shell.mm",
+ ]
+
+ configs += [ "//build/config/ios:xctest_config" ]
+ }
+
+ ios_xctest_bundle(_xctest_target) {
+ output_name = _xctest_output
+ product_type = _ios_xcode_xctest_bundle_id
+ host_target = _host_target
+ xcode_test_application_name = _host_output
+
+ deps = [
+ ":$_xctest_shell_source_target",
+ ]
+ }
+
+ ios_app_bundle(_host_target) {
+ forward_variables_from(invoker, "*", [ "testonly" ])
+
+ testonly = true
+ output_name = _host_output
+ configs += [ "//build/config/ios:xctest_config" ]
+
+ if (!defined(invoker.info_plist) && !defined(invoker.info_plist_target)) {
+ info_plist = "//build/config/ios/Host-Info.plist"
+ if (ios_automatically_manage_certs) {
+ # Use the same bundle identifier for EarlGrey tests as for unit tests
+ # when managing certificates as the number of free certs is limited.
+ if (!defined(extra_substitutions)) {
+ extra_substitutions = []
+ }
+ extra_substitutions +=
+ [ "EXECUTABLE_NAME=gtest.${ios_generic_test_bundle_id_suffix}" ]
+ }
+ }
+
+ # Xcode needs those two framework installed in the application (and signed)
+ # for the XCTest to run, so install them using extra_system_frameworks.
+ _ios_platform_library = "$ios_sdk_platform_path/Developer/Library"
+ extra_system_frameworks = [
+ "$_ios_platform_library/Frameworks/XCTest.framework",
+ "$ios_sdk_platform_path/Developer/usr/lib/libXCTestBundleInject.dylib",
+ ]
+
+ _xctest_bundle = _xctest_target + "_bundle"
+ if (current_toolchain == default_toolchain) {
+ if (!defined(bundle_deps)) {
+ bundle_deps = []
+ }
+ bundle_deps += [ ":$_xctest_bundle" ]
+ }
+
+ if (!defined(ldflags)) {
+ ldflags = []
+ }
+ ldflags += [
+ "-Xlinker",
+ "-rpath",
+ "-Xlinker",
+ "@executable_path/Frameworks",
+ "-Xlinker",
+ "-rpath",
+ "-Xlinker",
+ "@loader_path/Frameworks",
+ ]
+ }
+}
+
+set_defaults("ios_xctest_test") {
+ configs = default_executable_configs
+}
+
+# Template to build a xcuitest test runner bundle.
+#
+# Xcode requires a test runner application with a copy of the XCTest dynamic
+# library bundle in it for the XCUITest to run. The test runner bundle is created
+# by copying the system bundle XCTRunner.app from Xcode SDK with the plist file
+# being properly tweaked, and a xctest and it needs to be code signed in order
+# to run on devices.
+#
+# Arguments
+#
+# xctest_bundle
+# string, name of the dependent xctest bundle target.
+#
+# output_name
+# (optional) string, name of the generated application, if omitted,
+# defaults to the target_name.
+#
+template("ios_xcuitest_test_runner_bundle") {
+ assert(defined(invoker.xctest_bundle),
+ "xctest_bundle must be defined for $target_name")
+
+ _target_name = target_name
+ _output_name = target_name
+ if (defined(invoker.output_name)) {
+ _output_name = invoker.output_name
+ }
+
+ _xctrunner_path =
+ "$ios_sdk_platform_path/Developer/Library/Xcode/Agents/XCTRunner.app"
+
+ _info_plist_merge_plist = _target_name + "_info_plist_merge_plist"
+ _info_plist_target = _target_name + "_info_plist"
+ _info_plist_bundle = _target_name + "_info_plist_bundle"
+
+ action(_info_plist_merge_plist) {
+ testonly = true
+ script = "//build/config/mac/plist_util.py"
+
+ sources = [
+ "$_xctrunner_path/Info.plist",
+
+ # NOTE: The XCTRunnerAddition+Info.plist must come after the Info.plist
+ # because it overrides the values under "CFBundleIdentifier" and
+ # "CFBundleName".
+ "//ios/chrome/app/resources/XCTRunnerAddition+Info.plist",
+ ]
+
+ _output_name = "$target_gen_dir/${_target_name}_merged.plist"
+ outputs = [
+ _output_name,
+ ]
+ args = [
+ "merge",
+ "-f=xml1",
+ "-o=" + rebase_path(_output_name, root_build_dir),
+ ] + rebase_path(sources, root_build_dir)
+ }
+
+ ios_info_plist(_info_plist_target) {
+ testonly = true
+ visibility = [ ":$_info_plist_bundle" ]
+
+ executable_name = _output_name
+ info_plist_target = ":$_info_plist_merge_plist"
+
+ if (ios_automatically_manage_certs) {
+ # Use the same bundle identifier for XCUITest tests as for unit tests
+ # when managing certificates as the number of free certs is limited.
+ extra_substitutions =
+ [ "EXECUTABLE_NAME=gtest.${ios_generic_test_bundle_id_suffix}" ]
+ }
+ }
+
+ bundle_data(_info_plist_bundle) {
+ testonly = true
+ visibility = [ ":$_target_name" ]
+
+ public_deps = [
+ ":$_info_plist_target",
+ ]
+
+ sources = get_target_outputs(":$_info_plist_target")
+ outputs = [
+ "{{bundle_contents_dir}}/Info.plist",
+ ]
+ }
+
+ _pkginfo_bundle = _target_name + "_pkginfo_bundle"
+ bundle_data(_pkginfo_bundle) {
+ testonly = true
+ visibility = [ ":$_target_name" ]
+
+ sources = [
+ "$_xctrunner_path/PkgInfo",
+ ]
+
+ outputs = [
+ "{{bundle_contents_dir}}/PkgInfo",
+ ]
+ }
+
+ _xctest_bundle = invoker.xctest_bundle
+ create_signed_bundle(_target_name) {
+ testonly = true
+
+ bundle_binary_path = "$_xctrunner_path/XCTRunner"
+ bundle_extension = ".app"
+ product_type = "com.apple.product-type.application"
+
+ output_name = _output_name
+
+ # Xcode needs the following frameworks installed in the application
+ # (and signed) for the XCUITest to run, so install them using
+ # extra_system_frameworks.
+ extra_system_frameworks = [
+ "$ios_sdk_platform_path/Developer/Library/Frameworks/XCTest.framework",
+ "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCTAutomationSupport.framework",
+ ]
+
+ bundle_deps = []
+ if (defined(invoker.bundle_deps)) {
+ bundle_deps += invoker.bundle_deps
+ }
+ bundle_deps += [
+ ":$_info_plist_bundle",
+ ":$_pkginfo_bundle",
+ ":$_xctest_bundle",
+ ]
+ }
+}
+
+# Template to build a XCUITest that consists of two parts: the test runner
+# application bundle and the xctest dynamic library.
+#
+# Arguments
+#
+# deps:
+# list of labels to depends on, these values are used to create the
+# xctest dynamic library.
+#
+# xcode_test_application_name:
+# string, name of the test application for the ui test target.
+#
+# This template defines two targets, one named "${target_name}_module" is the
+# xctest dynamic library, and the other named "${target_name}_runner" is the
+# test runner application bundle.
+#
+template("ios_xcuitest_test") {
+ assert(defined(invoker.deps), "deps must be defined for $target_name")
+ assert(defined(invoker.xcode_test_application_name),
+ "xcode_test_application_name must be defined for $target_name")
+
+ _xcuitest_target = target_name
+ _xcuitest_runner_target = _xcuitest_target + "_runner"
+ _xcuitest_module_target = _xcuitest_target + "_module"
+
+ group(_xcuitest_target) {
+ testonly = true
+
+ deps = [
+ ":$_xcuitest_runner_target",
+ ]
+ }
+
+ _xcuitest_module_output = _xcuitest_target
+ ios_xctest_bundle(_xcuitest_module_target) {
+ forward_variables_from(invoker, [ "xcode_test_application_name" ])
+
+ product_type = _ios_xcode_xcuitest_bundle_id
+ host_target = _xcuitest_runner_target
+ output_name = _xcuitest_module_output
+
+ deps = invoker.deps
+ }
+
+ _xcuitest_runner_output = _xcuitest_target + "-Runner"
+ ios_xcuitest_test_runner_bundle(_xcuitest_runner_target) {
+ output_name = _xcuitest_runner_output
+ xctest_bundle = _xcuitest_module_target + "_bundle"
+ forward_variables_from(invoker, [ "bundle_deps" ])
+ }
+}
+
+set_defaults("ios_xcuitest_test") {
+ configs = default_executable_configs
+}
diff --git a/deps/v8/build/config/ios/write_framework_hmap.py b/deps/v8/build/config/ios/write_framework_hmap.py
new file mode 100644
index 0000000000..8f6b1439d0
--- /dev/null
+++ b/deps/v8/build/config/ios/write_framework_hmap.py
@@ -0,0 +1,97 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import struct
+import sys
+
+def Main(args):
+ if len(args) < 4:
+ print >> sys.stderr, "Usage: %s output.hmap Foo.framework header1.h..." %\
+ (args[0])
+ return 1
+
+ (out, framework, all_headers) = args[1], args[2], args[3:]
+
+ framework_name = os.path.basename(framework).split('.')[0]
+ all_headers = map(os.path.abspath, all_headers)
+ filelist = {}
+ for header in all_headers:
+ filename = os.path.basename(header)
+ filelist[filename] = header
+ filelist[os.path.join(framework_name, filename)] = header
+ WriteHmap(out, filelist)
+ return 0
+
+
+def NextGreaterPowerOf2(x):
+ return 2**(x).bit_length()
+
+
+def WriteHmap(output_name, filelist):
+ """Generates a header map based on |filelist|.
+
+ Per Mark Mentovai:
+ A header map is structured essentially as a hash table, keyed by names used
+ in #includes, and providing pathnames to the actual files.
+
+ The implementation below and the comment above comes from inspecting:
+ http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
+ while also looking at the implementation in clang in:
+ https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
+ """
+ magic = 1751998832
+ version = 1
+ _reserved = 0
+ count = len(filelist)
+ capacity = NextGreaterPowerOf2(count)
+ strings_offset = 24 + (12 * capacity)
+ max_value_length = len(max(filelist.items(), key=lambda (k,v):len(v))[1])
+
+ out = open(output_name, 'wb')
+ out.write(struct.pack('<LHHLLLL', magic, version, _reserved, strings_offset,
+ count, capacity, max_value_length))
+
+ # Create empty hashmap buckets.
+ buckets = [None] * capacity
+ for file, path in filelist.items():
+ key = 0
+ for c in file:
+ key += ord(c.lower()) * 13
+
+ # Fill next empty bucket.
+ while buckets[key & capacity - 1] is not None:
+ key = key + 1
+ buckets[key & capacity - 1] = (file, path)
+
+ next_offset = 1
+ for bucket in buckets:
+ if bucket is None:
+ out.write(struct.pack('<LLL', 0, 0, 0))
+ else:
+ (file, path) = bucket
+ key_offset = next_offset
+ prefix_offset = key_offset + len(file) + 1
+ suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
+ next_offset = suffix_offset + len(os.path.basename(path)) + 1
+ out.write(struct.pack('<LLL', key_offset, prefix_offset, suffix_offset))
+
+ # Pad byte since next offset starts at 1.
+ out.write(struct.pack('<x'))
+
+ for bucket in buckets:
+ if bucket is not None:
+ (file, path) = bucket
+ out.write(struct.pack('<%ds' % len(file), file))
+ out.write(struct.pack('<s', '\0'))
+ base = os.path.dirname(path) + os.sep
+ out.write(struct.pack('<%ds' % len(base), base))
+ out.write(struct.pack('<s', '\0'))
+ path = os.path.basename(path)
+ out.write(struct.pack('<%ds' % len(path), path))
+ out.write(struct.pack('<s', '\0'))
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv))
diff --git a/deps/v8/build/config/ios/write_framework_modulemap.py b/deps/v8/build/config/ios/write_framework_modulemap.py
new file mode 100644
index 0000000000..b6da571282
--- /dev/null
+++ b/deps/v8/build/config/ios/write_framework_modulemap.py
@@ -0,0 +1,26 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+def Main(framework):
+ # Find the name of the binary based on the part before the ".framework".
+ binary = os.path.basename(framework).split('.')[0]
+ module_path = os.path.join(framework, 'Modules');
+ if not os.path.exists(module_path):
+ os.mkdir(module_path)
+ module_template = 'framework module %s {\n' \
+ ' umbrella header "%s.h"\n' \
+ '\n' \
+ ' export *\n' \
+ ' module * { export * }\n' \
+ '}\n' % (binary, binary)
+
+ module_file = open(os.path.join(module_path, 'module.modulemap'), 'w')
+ module_file.write(module_template)
+ module_file.close()
+
+if __name__ == '__main__':
+ Main(sys.argv[1])
diff --git a/deps/v8/build/config/ios/xctest_shell.mm b/deps/v8/build/config/ios/xctest_shell.mm
new file mode 100644
index 0000000000..dcf5bad5e7
--- /dev/null
+++ b/deps/v8/build/config/ios/xctest_shell.mm
@@ -0,0 +1,19 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <UIKit/UIKit.h>
+#import <XCTest/XCTest.h>
+
+// For Chrome on iOS we want to run EarlGrey tests (that are XCTests) for all
+// our build configurations (Debug, Release, ...). In addition, the symbols
+// visibility is configured to private by default. To simplify testing with
+// those constraints, our tests are compiled in the TEST_HOST target instead
+// of the .xctest bundle that all link against this single test (just there to
+// ensure that the bundle is not empty).
+
+@interface XCTestShellEmptyClass : NSObject
+@end
+
+@implementation XCTestShellEmptyClass
+@end
diff --git a/deps/v8/build/config/jumbo.gni b/deps/v8/build/config/jumbo.gni
new file mode 100644
index 0000000000..be90a7591f
--- /dev/null
+++ b/deps/v8/build/config/jumbo.gni
@@ -0,0 +1,313 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/split_static_library.gni") # When someone uses that target_type
+import("//build/toolchain/goma.gni")
+
+declare_args() {
+ # If true, use a jumbo build (files compiled together) to speed up
+ # compilation.
+ use_jumbo_build = false
+
+ # A list of build targets to exclude from jumbo builds, for optimal
+ # round trip time when frequently changing a set of cpp files. The
+ # targets can be just the short name (in which case it matches any
+ # target with that name), a directory prefixed with the root
+ # specifier //, or a full build target label.
+ #
+ # Example:
+ # These would all exclude the "browser" target in a file
+ # content/browser/BUILD.gn, and potentially more.
+ #
+ # jumbo_build_excluded = [ "browser" ]
+ # jumbo_build_excluded = [ "//content/browser" ]
+ # jumbo_build_excluded = [ "//content/browser:browser" ]
+ jumbo_build_excluded = []
+
+ # How many files to group on average. Smaller numbers give more
+ # parallellism, higher numbers give less total CPU usage. Higher
+ # numbers also give longer single-file recompilation times.
+ #
+ # Recommendations:
+ # Higher numbers than 100 does not reduce wall clock compile times
+ # even for 4 cores or less so no reason to go higher than 100.
+ # Going from 50 to 100 with a 4 core CPU saves about 3% CPU time and
+ # 3% wall clock time in a tree with blink, v8 and content
+ # jumbofied. At the same time it increases the compile time for the
+ # largest jumbo chunks by 10-20% and reduces the chance to use all
+ # available CPU cores. So set the default to 50 to balance between
+ # high and low-core build performance. -1 means do the default which
+ # varies depending on whether goma is enabled.
+ jumbo_file_merge_limit = -1
+}
+
+# Normal builds benefit from lots of jumbification
+jumbo_file_merge_default = 50
+
+# Goma builds benefit from more parallelism
+jumbo_file_merge_goma = 8
+
+# Use one of the targets jumbo_source_set, jumbo_static_library,
+# jumbo_split_static_library or jumbo_component to generate a target
+# which merges sources if possible to compile much faster.
+#
+# Special values.
+#
+# target_type
+# The kind of target to build. For example the string
+# "static_library".
+#
+# always_build_jumbo
+# If set and set to true, then use jumbo compile even when it is
+# globally disabled. Otherwise it has no effect.
+#
+# never_build_jumbo
+# If set and set to true, then do not jumbo compile even if it is
+# globally enabled. Otherwise it has no effect.
+#
+# jumbo_excluded_sources
+# If set to a list of files, those files will not be merged with
+# the rest. This can be necessary if merging the files causes
+# compilation issues and fixing the issues is impractical.
+template("internal_jumbo_target") {
+ use_jumbo_build_for_target = use_jumbo_build
+ if (defined(invoker.always_build_jumbo) && invoker.always_build_jumbo) {
+ use_jumbo_build_for_target = true
+ }
+ if (defined(invoker.never_build_jumbo) && invoker.never_build_jumbo) {
+ use_jumbo_build_for_target = false
+ }
+
+ foreach(excluded_target, jumbo_build_excluded) {
+ if (excluded_target == target_name ||
+ excluded_target == get_label_info(":" + target_name, "dir") ||
+ excluded_target ==
+ get_label_info(":" + target_name, "label_no_toolchain")) {
+ use_jumbo_build_for_target = false
+ }
+ }
+
+ excluded_sources = []
+ if (defined(invoker.jumbo_excluded_sources)) {
+ excluded_sources = invoker.jumbo_excluded_sources
+ }
+
+ if (defined(invoker.sources)) {
+ invoker_sources = invoker.sources
+ } else {
+ invoker_sources = []
+ }
+
+ gen_target_dir = invoker.target_gen_dir
+
+ not_needed([ "gen_target_dir" ]) # Prevent "unused variable".
+
+ if (use_jumbo_build_for_target) {
+ jumbo_files = []
+
+ # Split the sources list into chunks that are not excessively large
+ current_file_index = 0
+ next_chunk_start = 0
+ next_chunk_number = 1
+ merge_limit = jumbo_file_merge_limit
+ if (merge_limit == -1) {
+ if (use_goma) {
+ merge_limit = jumbo_file_merge_goma
+ } else {
+ merge_limit = jumbo_file_merge_default
+ }
+ }
+ has_c_file = false
+ has_objective_c_file = false
+ sources_in_jumbo_files = []
+ assert(merge_limit > 0)
+ foreach(source_file, invoker_sources) {
+ source_ext = get_path_info(source_file, "extension")
+ is_source_file = true
+ if (source_ext == "c") {
+ has_c_file = true
+ } else if (source_ext == "mm") {
+ has_objective_c_file = true
+ } else if (source_ext == "cc" || source_ext == "cpp") {
+ if (current_file_index == next_chunk_start) {
+ jumbo_files += [ "$gen_target_dir/" + target_name + "_jumbo_" +
+ next_chunk_number + ".cc" ]
+ next_chunk_number += 1
+ next_chunk_start += merge_limit
+ }
+ current_file_index += 1
+ } else {
+ is_source_file = false
+ }
+ if (is_source_file) {
+ sources_in_jumbo_files += [ source_file ]
+ }
+ }
+
+ if (jumbo_files == [] || current_file_index == 1) {
+ # Empty sources list or a sources list with only header files or
+ # at most one non-header file.
+ use_jumbo_build_for_target = false
+ not_needed([
+ "sources_in_jumbo_files",
+ "current_file_index",
+ "next_chunk_start",
+ "next_chunk_number",
+ ])
+ }
+
+ if (has_c_file) {
+ jumbo_files += [ "$gen_target_dir/" + target_name + "_jumbo_c.c" ]
+ }
+ if (has_objective_c_file) {
+ jumbo_files += [ "$gen_target_dir/" + target_name + "_jumbo_mm.mm" ]
+ }
+ }
+
+ if (use_jumbo_build_for_target) {
+ merge_action_name = target_name + "__jumbo_merge"
+ sources_in_jumbo_files -= excluded_sources
+
+ # Create an action that calls a script that merges all the source files.
+ action(merge_action_name) {
+ script = "//build/config/merge_for_jumbo.py"
+ response_file_contents =
+ rebase_path(sources_in_jumbo_files, root_build_dir)
+ outputs = jumbo_files
+ args = [ "--outputs" ] + rebase_path(outputs, root_build_dir) +
+ [ "--file-list={{response_file_name}}" ]
+
+ # For the "gn analyze" step to work, gn needs to know about the
+ # original source files. They can't be in |sources| because then
+ # they will be compiled, so they have to be somewhere else where
+ # gn analyze looks. One alternative is the |data| list but that
+ # will affect test packaging with known bad effects on
+ # distributed testing. Putting them in this action's input list
+ # is the least bad place.
+ inputs = []
+ foreach(f, invoker_sources - excluded_sources) {
+ # Avoid generated files and non non-source files.
+ in_source_tree = string_replace(rebase_path(f),
+ rebase_path(root_out_dir),
+ "dummy") == rebase_path(f)
+ is_source_file = get_path_info(f, "extension") == "cc" ||
+ get_path_info(f, "extension") == "cpp" ||
+ get_path_info(f, "extension") == "c" ||
+ get_path_info(f, "extension") == "mm"
+ if (in_source_tree && is_source_file) {
+ inputs += [ f ]
+ }
+ }
+ }
+ } else {
+ # If the list subtraction triggers a gn error,
+ # jumbo_excluded_sources lists a file that is not in sources.
+ sources_after_exclusion = invoker_sources - excluded_sources
+ not_needed([ "sources_after_exclusion" ])
+ }
+
+ target_type = invoker.target_type
+ if (use_jumbo_build_for_target && target_type == "split_static_library") {
+ # Meaningless and also impossible if split_count > len(jumbo_files)
+ target_type = "static_library"
+ not_needed(invoker, [ "split_count" ])
+ }
+
+ # Perform the actual operation, either on the original sources or
+ # the sources post-jumbo merging.
+ target(target_type, target_name) {
+ deps = []
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+
+ # Take everything else not handled above from the invoker.
+ variables_to_not_forward = [ "deps" ]
+ if (use_jumbo_build_for_target) {
+ deps += [ ":" + merge_action_name ]
+ variables_to_not_forward += [ "sources" ]
+ assert(jumbo_files != [])
+ set_sources_assignment_filter([]) # Prefiltered.
+ sources = invoker_sources - sources_in_jumbo_files + jumbo_files
+
+ # Change include_dirs to make sure that the jumbo file can find its
+ # #included files.
+ variables_to_not_forward += [ "include_dirs" ]
+ include_dirs = []
+ if (defined(invoker.include_dirs)) {
+ include_dirs = invoker.include_dirs
+ }
+ include_dirs += [ root_build_dir ]
+ }
+ forward_variables_from(invoker, "*", variables_to_not_forward)
+ }
+}
+
+# See documentation above by "internal_jumbo_target".
+template("jumbo_source_set") {
+ internal_jumbo_target(target_name) {
+ target_type = "source_set"
+ forward_variables_from(invoker, "*")
+ }
+}
+
+set_defaults("jumbo_source_set") {
+ # This sets the default list of configs when the jumbo_source_set target
+ # is defined. The default_compiler_configs comes from BUILDCONFIG.gn and
+ # is the list normally applied to static libraries and source sets.
+ configs = default_compiler_configs
+}
+
+# See documentation above by "internal_jumbo_target".
+template("jumbo_static_library") {
+ internal_jumbo_target(target_name) {
+ target_type = "static_library"
+ forward_variables_from(invoker, "*")
+ }
+}
+
+set_defaults("jumbo_static_library") {
+ # This sets the default list of configs when the jumbo_static_library target
+ # is defined. The default_compiler_configs comes from BUILDCONFIG.gn and
+ # is the list normally applied to static libraries and source sets.
+ configs = default_compiler_configs
+}
+
+# See documentation above by "internal_jumbo_target".
+template("jumbo_split_static_library") {
+ internal_jumbo_target(target_name) {
+ target_type = "split_static_library"
+ forward_variables_from(invoker, "*")
+ }
+}
+
+set_defaults("jumbo_split_static_library") {
+ # This sets the default list of configs when the
+ # jumbo_split_static_library target is defined. The
+ # default_compiler_configs comes from BUILDCONFIG.gn and is the list
+ # normally applied to static libraries and source sets.
+ configs = default_compiler_configs
+}
+
+# See documentation above by "internal_jumbo_target".
+template("jumbo_component") {
+ internal_jumbo_target(target_name) {
+ target_type = "component"
+ forward_variables_from(invoker, "*")
+ }
+}
+
+set_defaults("jumbo_component") {
+ # This sets the default list of configs when the jumbo_component
+ # target is defined. This code is a clone of set_defaults for the
+ # ordinary "component" template.
+ if (is_component_build) {
+ configs = default_shared_library_configs
+ if (is_android) {
+ configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
+ }
+ } else {
+ configs = default_compiler_configs
+ }
+}
diff --git a/deps/v8/build/config/linux/BUILD.gn b/deps/v8/build/config/linux/BUILD.gn
new file mode 100644
index 0000000000..6ca9b6a934
--- /dev/null
+++ b/deps/v8/build/config/linux/BUILD.gn
@@ -0,0 +1,110 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/c++/c++.gni")
+import("//build/config/linux/pkg_config.gni")
+import("//build/config/ui.gni")
+
+group("linux") {
+ visibility = [ "//:optimize_gn_gen" ]
+}
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic that is
+# Linux-only. This is not applied to Android, but is applied to ChromeOS.
+config("compiler") {
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Linux-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+ # Set here because OS_CHROMEOS cannot be autodetected in build_config.h like
+ # OS_LINUX and the like.
+ if (is_chromeos) {
+ defines = [ "OS_CHROMEOS" ]
+ }
+
+ if ((!is_chromeos || default_toolchain != "//build/toolchain/cros:target") &&
+ (!use_custom_libcxx || current_cpu == "mipsel")) {
+ libs = [ "atomic" ]
+ }
+}
+
+config("x11") {
+ libs = [
+ "X11",
+ "X11-xcb",
+ "xcb",
+ "Xcomposite",
+ "Xcursor",
+ "Xdamage",
+ "Xext",
+ "Xfixes",
+ "Xi",
+ "Xrender",
+ "Xtst",
+ ]
+}
+
+config("xcomposite") {
+ libs = [ "Xcomposite" ]
+}
+
+config("xext") {
+ libs = [ "Xext" ]
+}
+
+config("xrandr") {
+ libs = [ "Xrandr" ]
+}
+
+config("xscrnsaver") {
+ libs = [ "Xss" ]
+}
+
+config("xfixes") {
+ libs = [ "Xfixes" ]
+}
+
+config("libcap") {
+ libs = [ "cap" ]
+}
+
+config("xi") {
+ libs = [ "Xi" ]
+}
+
+config("xtst") {
+ libs = [ "Xtst" ]
+}
+
+config("libresolv") {
+ libs = [ "resolv" ]
+}
+
+if (use_glib) {
+ pkg_config("glib") {
+ packages = [
+ "glib-2.0",
+ "gmodule-2.0",
+ "gobject-2.0",
+ "gthread-2.0",
+ ]
+ defines = [
+ "GLIB_VERSION_MAX_ALLOWED=GLIB_VERSION_2_32",
+ "GLIB_VERSION_MIN_REQUIRED=GLIB_VERSION_2_26",
+ ]
+ }
+}
+
+# Ensures all exported symbols are added to the dynamic symbol table. This is
+# necessary to expose Chrome's custom operator new() and operator delete() (and
+# other memory-related symbols) to libraries. Otherwise, they might
+# (de)allocate memory on a different heap, which would spell trouble if pointers
+# to heap-allocated memory are passed over shared library boundaries.
+config("export_dynamic") {
+ ldflags = [ "-rdynamic" ]
+}
diff --git a/deps/v8/build/config/linux/OWNERS b/deps/v8/build/config/linux/OWNERS
new file mode 100644
index 0000000000..280ba478dc
--- /dev/null
+++ b/deps/v8/build/config/linux/OWNERS
@@ -0,0 +1 @@
+thomasanderson@chromium.org
diff --git a/deps/v8/build/config/linux/atk/BUILD.gn b/deps/v8/build/config/linux/atk/BUILD.gn
new file mode 100644
index 0000000000..afc811da11
--- /dev/null
+++ b/deps/v8/build/config/linux/atk/BUILD.gn
@@ -0,0 +1,59 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/features.gni")
+import("//build/config/linux/pkg_config.gni")
+import("//build/config/ui.gni")
+
+# CrOS doesn't install GTK or any gnome packages.
+assert(!is_chromeos)
+
+# These packages should _only_ be expected when building for a target.
+assert(current_toolchain == default_toolchain)
+
+if (use_atk) {
+ assert(use_glib, "use_atk=true requires that use_glib=true")
+}
+
+pkg_config("atk_base") {
+ packages = [
+ "atk",
+ "atk-bridge-2.0",
+ ]
+ atk_lib_dir = exec_script(pkg_config_script,
+ pkg_config_args + [
+ "--libdir",
+ "atk",
+ ],
+ "string")
+ defines = [
+ "ATK_LIB_DIR=\"$atk_lib_dir\"",
+ "USE_ATK_BRIDGE",
+
+ # The AtkValue interface methods we implement and test have been deprecated
+ # in favor of new API. But that new API cannot be used until corresponding
+ # support has been added to AT-SPI2 and we stop supporting earlier versions
+ # of ATK (i.e. < 2.12).
+ "ATK_DISABLE_DEPRECATION_WARNINGS",
+ ]
+}
+
+# gn orders flags on a target before flags from configs. The default config
+# adds -Wall, and these flags have to be after -Wall -- so they need to
+# come from a config and can't be on the target directly.
+config("atk") {
+ configs = [ ":atk_base" ]
+
+ cflags = [
+ # G_DEFINE_TYPE automatically generates a *get_instance_private
+ # inline function after glib 2.37. That's unused. Prevent to
+ # complain about it.
+ "-Wno-unused-function",
+ ]
+
+ if (is_clang) {
+ # glib uses the pre-c++11 typedef-as-static_assert hack.
+ cflags += [ "-Wno-unused-local-typedef" ]
+ }
+}
diff --git a/deps/v8/build/config/linux/atspi2/BUILD.gn b/deps/v8/build/config/linux/atspi2/BUILD.gn
new file mode 100644
index 0000000000..988a995681
--- /dev/null
+++ b/deps/v8/build/config/linux/atspi2/BUILD.gn
@@ -0,0 +1,29 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/config/ui.gni")
+
+# These packages should _only_ be expected when building for a target.
+assert(current_toolchain == default_toolchain)
+
+if (use_atk) {
+ pkg_config("atspi2") {
+ packages = [ "atspi-2" ]
+ atspi_version = exec_script(pkg_config_script,
+ pkg_config_args + [
+ "atspi-2",
+ "--version-as-components",
+ ],
+ "value")
+ atspi_major_version = atspi_version[0]
+ atspi_minor_version = atspi_version[1]
+ atspi_micro_version = atspi_version[2]
+ defines = [
+ "ATSPI_MAJOR_VERSION=$atspi_major_version",
+ "ATSPI_MINOR_VERSION=$atspi_minor_version",
+ "ATSPI_MICRO_VERSION=$atspi_micro_version",
+ ]
+ }
+}
diff --git a/deps/v8/build/config/linux/dbus/BUILD.gn b/deps/v8/build/config/linux/dbus/BUILD.gn
new file mode 100644
index 0000000000..f11cf7101c
--- /dev/null
+++ b/deps/v8/build/config/linux/dbus/BUILD.gn
@@ -0,0 +1,14 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/features.gni")
+import("//build/config/linux/pkg_config.gni")
+
+assert(use_dbus)
+
+# Note: if your target also depends on //dbus, you don't need to add this
+# config (it will get added automatically if you depend on //dbus).
+pkg_config("dbus") {
+ packages = [ "dbus-1" ]
+}
diff --git a/deps/v8/build/config/linux/dri/BUILD.gn b/deps/v8/build/config/linux/dri/BUILD.gn
new file mode 100644
index 0000000000..cad883b76d
--- /dev/null
+++ b/deps/v8/build/config/linux/dri/BUILD.gn
@@ -0,0 +1,18 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+assert(is_linux, "This file should only be referenced on Linux")
+
+pkg_config("dri") {
+ packages = [ "dri" ]
+ dri_driver_dir = exec_script(pkg_config_script,
+ pkg_config_args + [
+ "--dridriverdir",
+ "dri",
+ ],
+ "string")
+ defines = [ "DRI_DRIVER_DIR=\"$dri_driver_dir\"" ]
+}
diff --git a/deps/v8/build/config/linux/gtk/BUILD.gn b/deps/v8/build/config/linux/gtk/BUILD.gn
new file mode 100644
index 0000000000..d78f7407c1
--- /dev/null
+++ b/deps/v8/build/config/linux/gtk/BUILD.gn
@@ -0,0 +1,53 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/gtk/gtk.gni")
+import("//build/config/linux/pkg_config.gni")
+
+assert(is_linux, "This file should only be referenced on Linux")
+
+# GN doesn't check visibility for configs so we give this an obviously internal
+# name to discourage random targets from accidentally depending on this and
+# bypassing the GTK target's visibility.
+pkg_config("gtk_internal_config") {
+ # Gtk requires gmodule, but it does not list it as a dependency in some
+ # misconfigured systems.
+ packages = [
+ "gmodule-2.0",
+ "gtk+-${gtk_version}.0",
+ "gthread-2.0",
+ ]
+}
+
+# Basically no parts of Chrome should depend on GTK. To prevent accidents, the
+# parts that explicitly need GTK are whitelisted on this target.
+group("gtk") {
+ visibility = [
+ "//chrome/test:interactive_ui_tests",
+ "//chrome/test:unit_tests",
+ "//examples:peerconnection_client",
+ "//gpu/gles2_conform_support:gles2_conform_test_windowless",
+ "//remoting/host/linux",
+ "//remoting/host/it2me:common",
+ "//remoting/host/it2me:remote_assistance_host",
+ "//remoting/host:common",
+ "//remoting/host/file_transfer",
+ "//remoting/host:remoting_me2me_host_static",
+ "//remoting/test:it2me_standalone_host_main",
+ "//webrtc/examples:peerconnection_client",
+ "//chrome/browser/ui/libgtkui:*",
+ ]
+
+ public_configs = [ ":gtk_internal_config" ]
+}
+
+# Depend on "gtkprint" to get this.
+pkg_config("gtkprint_internal_config") {
+ packages = [ "gtk+-unix-print-${gtk_version}.0" ]
+}
+
+group("gtkprint") {
+ visibility = [ "//chrome/browser/ui/libgtkui:*" ]
+ public_configs = [ ":gtkprint_internal_config" ]
+}
diff --git a/deps/v8/build/config/linux/gtk/gtk.gni b/deps/v8/build/config/linux/gtk/gtk.gni
new file mode 100644
index 0000000000..1e0e6ace0f
--- /dev/null
+++ b/deps/v8/build/config/linux/gtk/gtk.gni
@@ -0,0 +1,10 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+ # The (major) version of GTK to build against.
+ gtk_version = 3
+}
+
+assert(gtk_version >= 3 && gtk_version <= 4)
diff --git a/deps/v8/build/config/linux/libdrm/BUILD.gn b/deps/v8/build/config/linux/libdrm/BUILD.gn
new file mode 100644
index 0000000000..daebcfd3a4
--- /dev/null
+++ b/deps/v8/build/config/linux/libdrm/BUILD.gn
@@ -0,0 +1,33 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+import("//build/config/linux/pkg_config.gni")
+
+assert(is_linux)
+
+declare_args() {
+ # Controls whether the build should use the version of libdrm
+ # library shipped with the system. In release builds of Chrome OS we
+ # use the system version, but when building on dev workstations we
+ # bundle it because Ubuntu doesn't ship a usable version.
+ # Chromecast will use this as well.
+ use_system_libdrm = false
+}
+
+if (use_system_libdrm) {
+ pkg_config("libdrm_config") {
+ packages = [ "libdrm" ]
+ }
+ group("libdrm") {
+ public_configs = [ ":libdrm_config" ]
+ }
+} else {
+ group("libdrm") {
+ public_deps = [
+ "//third_party/libdrm",
+ ]
+ }
+ config("libdrm_exynos_include_config") {
+ include_dirs = [ "//third_party/libdrm/src/exynos" ]
+ }
+}
diff --git a/deps/v8/build/config/linux/libffi/BUILD.gn b/deps/v8/build/config/linux/libffi/BUILD.gn
new file mode 100644
index 0000000000..a4041727b1
--- /dev/null
+++ b/deps/v8/build/config/linux/libffi/BUILD.gn
@@ -0,0 +1,9 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+pkg_config("libffi") {
+ packages = [ "libffi" ]
+}
diff --git a/deps/v8/build/config/linux/libva/BUILD.gn b/deps/v8/build/config/linux/libva/BUILD.gn
new file mode 100644
index 0000000000..f350a0d6f1
--- /dev/null
+++ b/deps/v8/build/config/linux/libva/BUILD.gn
@@ -0,0 +1,13 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+assert(is_linux, "This file should only be referenced on Linux")
+
+pkg_config("libva") {
+ packages = [ "libva" ]
+ # Do not use exec_script to check the version here. It is done with a
+ # static_assert instead.
+}
diff --git a/deps/v8/build/config/linux/nss/BUILD.gn b/deps/v8/build/config/linux/nss/BUILD.gn
new file mode 100644
index 0000000000..5788f3e5d6
--- /dev/null
+++ b/deps/v8/build/config/linux/nss/BUILD.gn
@@ -0,0 +1,22 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+if (is_linux) {
+ # This is a dependency on NSS with no libssl. On Linux we use a built-in SSL
+ # library but the system NSS libraries. Non-Linux platforms using NSS use the
+ # hermetic one in //third_party/nss.
+ #
+ # Generally you should depend on //crypto:platform instead of using this
+ # config since that will properly pick up NSS or OpenSSL depending on
+ # platform and build config.
+ pkg_config("system_nss_no_ssl_config") {
+ packages = [ "nss" ]
+ extra_args = [
+ "-v",
+ "-lssl3",
+ ]
+ }
+}
diff --git a/deps/v8/build/config/linux/pangocairo/BUILD.gn b/deps/v8/build/config/linux/pangocairo/BUILD.gn
new file mode 100644
index 0000000000..ddcc754bbd
--- /dev/null
+++ b/deps/v8/build/config/linux/pangocairo/BUILD.gn
@@ -0,0 +1,19 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pangocairo/pangocairo.gni")
+import("//build/config/linux/pkg_config.gni")
+
+if (use_pangocairo) {
+ pkg_config("pangocairo") {
+ packages = [ "pangocairo" ]
+
+ # We don't want pkgconfig for pangocairo to explicitly request FreeType to get
+ # linked, because we control which FreeType to link to.
+ extra_args = [
+ "-v",
+ "freetype",
+ ]
+ }
+}
diff --git a/deps/v8/build/config/linux/pangocairo/pangocairo.gni b/deps/v8/build/config/linux/pangocairo/pangocairo.gni
new file mode 100644
index 0000000000..ca99445b96
--- /dev/null
+++ b/deps/v8/build/config/linux/pangocairo/pangocairo.gni
@@ -0,0 +1,7 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/ui.gni")
+
+use_pangocairo = is_linux && !use_ozone
diff --git a/deps/v8/build/config/linux/pkg-config.py b/deps/v8/build/config/linux/pkg-config.py
new file mode 100755
index 0000000000..e2bf7666b2
--- /dev/null
+++ b/deps/v8/build/config/linux/pkg-config.py
@@ -0,0 +1,246 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import subprocess
+import sys
+import re
+from optparse import OptionParser
+
+# This script runs pkg-config, optionally filtering out some results, and
+# returns the result.
+#
+# The result will be [ <includes>, <cflags>, <libs>, <lib_dirs>, <ldflags> ]
+# where each member is itself a list of strings.
+#
+# You can filter out matches using "-v <regexp>" where all results from
+# pkgconfig matching the given regular expression will be ignored. You can
+# specify more than one regular expression my specifying "-v" more than once.
+#
+# You can specify a sysroot using "-s <sysroot>" where sysroot is the absolute
+# system path to the sysroot used for compiling. This script will attempt to
+# generate correct paths for the sysroot.
+#
+# When using a sysroot, you must also specify the architecture via
+# "-a <arch>" where arch is either "x86" or "x64".
+#
+# CrOS systemroots place pkgconfig files at <systemroot>/usr/share/pkgconfig
+# and one of <systemroot>/usr/lib/pkgconfig or <systemroot>/usr/lib64/pkgconfig
+# depending on whether the systemroot is for a 32 or 64 bit architecture. They
+# specify the 'lib' or 'lib64' of the pkgconfig path by defining the
+# 'system_libdir' variable in the args.gn file. pkg_config.gni communicates this
+# variable to this script with the "--system_libdir <system_libdir>" flag. If no
+# flag is provided, then pkgconfig files are assumed to come from
+# <systemroot>/usr/lib/pkgconfig.
+#
+# Additionally, you can specify the option --atleast-version. This will skip
+# the normal outputting of a dictionary and instead print true or false,
+# depending on the return value of pkg-config for the given package.
+
+
+def SetConfigPath(options):
+ """Set the PKG_CONFIG_LIBDIR environment variable.
+
+ This takes into account any sysroot and architecture specification from the
+ options on the given command line.
+ """
+
+ sysroot = options.sysroot
+ assert sysroot
+
+ # Compute the library path name based on the architecture.
+ arch = options.arch
+ if sysroot and not arch:
+ print "You must specify an architecture via -a if using a sysroot."
+ sys.exit(1)
+
+ libdir = sysroot + '/usr/' + options.system_libdir + '/pkgconfig'
+ libdir += ':' + sysroot + '/usr/share/pkgconfig'
+ os.environ['PKG_CONFIG_LIBDIR'] = libdir
+ return libdir
+
+
+def GetPkgConfigPrefixToStrip(options, args):
+ """Returns the prefix from pkg-config where packages are installed.
+
+ This returned prefix is the one that should be stripped from the beginning of
+ directory names to take into account sysroots.
+ """
+ # Some sysroots, like the Chromium OS ones, may generate paths that are not
+ # relative to the sysroot. For example,
+ # /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all
+ # paths relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr)
+ # instead of relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
+ # To support this correctly, it's necessary to extract the prefix to strip
+ # from pkg-config's |prefix| variable.
+ prefix = subprocess.check_output([options.pkg_config,
+ "--variable=prefix"] + args, env=os.environ)
+ if prefix[-4] == '/usr':
+ return prefix[4:]
+ return prefix
+
+
+def MatchesAnyRegexp(flag, list_of_regexps):
+ """Returns true if the first argument matches any regular expression in the
+ given list."""
+ for regexp in list_of_regexps:
+ if regexp.search(flag) != None:
+ return True
+ return False
+
+
+def RewritePath(path, strip_prefix, sysroot):
+ """Rewrites a path by stripping the prefix and prepending the sysroot."""
+ if os.path.isabs(path) and not path.startswith(sysroot):
+ if path.startswith(strip_prefix):
+ path = path[len(strip_prefix):]
+ path = path.lstrip('/')
+ return os.path.join(sysroot, path)
+ else:
+ return path
+
+
+def main():
+ # If this is run on non-Linux platforms, just return nothing and indicate
+ # success. This allows us to "kind of emulate" a Linux build from other
+ # platforms.
+ if "linux" not in sys.platform:
+ print "[[],[],[],[],[]]"
+ return 0
+
+ parser = OptionParser()
+ parser.add_option('-d', '--debug', action='store_true')
+ parser.add_option('-p', action='store', dest='pkg_config', type='string',
+ default='pkg-config')
+ parser.add_option('-v', action='append', dest='strip_out', type='string')
+ parser.add_option('-s', action='store', dest='sysroot', type='string')
+ parser.add_option('-a', action='store', dest='arch', type='string')
+ parser.add_option('--system_libdir', action='store', dest='system_libdir',
+ type='string', default='lib')
+ parser.add_option('--atleast-version', action='store',
+ dest='atleast_version', type='string')
+ parser.add_option('--libdir', action='store_true', dest='libdir')
+ parser.add_option('--dridriverdir', action='store_true', dest='dridriverdir')
+ parser.add_option('--version-as-components', action='store_true',
+ dest='version_as_components')
+ (options, args) = parser.parse_args()
+
+ # Make a list of regular expressions to strip out.
+ strip_out = []
+ if options.strip_out != None:
+ for regexp in options.strip_out:
+ strip_out.append(re.compile(regexp))
+
+ if options.sysroot:
+ libdir = SetConfigPath(options)
+ if options.debug:
+ sys.stderr.write('PKG_CONFIG_LIBDIR=%s\n' % libdir)
+ prefix = GetPkgConfigPrefixToStrip(options, args)
+ else:
+ prefix = ''
+
+ if options.atleast_version:
+ # When asking for the return value, just run pkg-config and print the return
+ # value, no need to do other work.
+ if not subprocess.call([options.pkg_config,
+ "--atleast-version=" + options.atleast_version] +
+ args):
+ print "true"
+ else:
+ print "false"
+ return 0
+
+ if options.version_as_components:
+ cmd = [options.pkg_config, "--modversion"] + args
+ try:
+ version_string = subprocess.check_output(cmd)
+ except:
+ sys.stderr.write('Error from pkg-config.\n')
+ return 1
+ print json.dumps(list(map(int, version_string.strip().split("."))))
+ return 0
+
+
+ if options.libdir:
+ cmd = [options.pkg_config, "--variable=libdir"] + args
+ if options.debug:
+ sys.stderr.write('Running: %s\n' % cmd)
+ try:
+ libdir = subprocess.check_output(cmd)
+ except:
+ print "Error from pkg-config."
+ return 1
+ sys.stdout.write(libdir.strip())
+ return 0
+
+ if options.dridriverdir:
+ cmd = [options.pkg_config, "--variable=dridriverdir"] + args
+ if options.debug:
+ sys.stderr.write('Running: %s\n' % cmd)
+ try:
+ dridriverdir = subprocess.check_output(cmd)
+ except:
+ print "Error from pkg-config."
+ return 1
+ sys.stdout.write(dridriverdir.strip())
+ return
+
+ cmd = [options.pkg_config, "--cflags", "--libs"] + args
+ if options.debug:
+ sys.stderr.write('Running: %s\n' % ' '.join(cmd))
+
+ try:
+ flag_string = subprocess.check_output(cmd)
+ except:
+ sys.stderr.write('Could not run pkg-config.\n')
+ return 1
+
+ # For now just split on spaces to get the args out. This will break if
+ # pkgconfig returns quoted things with spaces in them, but that doesn't seem
+ # to happen in practice.
+ all_flags = flag_string.strip().split(' ')
+
+
+ sysroot = options.sysroot
+ if not sysroot:
+ sysroot = ''
+
+ includes = []
+ cflags = []
+ libs = []
+ lib_dirs = []
+
+ for flag in all_flags[:]:
+ if len(flag) == 0 or MatchesAnyRegexp(flag, strip_out):
+ continue;
+
+ if flag[:2] == '-l':
+ libs.append(RewritePath(flag[2:], prefix, sysroot))
+ elif flag[:2] == '-L':
+ lib_dirs.append(RewritePath(flag[2:], prefix, sysroot))
+ elif flag[:2] == '-I':
+ includes.append(RewritePath(flag[2:], prefix, sysroot))
+ elif flag[:3] == '-Wl':
+ # Don't allow libraries to control ld flags. These should be specified
+ # only in build files.
+ pass
+ elif flag == '-pthread':
+ # Many libs specify "-pthread" which we don't need since we always include
+ # this anyway. Removing it here prevents a bunch of duplicate inclusions
+ # on the command line.
+ pass
+ else:
+ cflags.append(flag)
+
+ # Output a GN array, the first one is the cflags, the second are the libs. The
+ # JSON formatter prints GN compatible lists when everything is a list of
+ # strings.
+ print json.dumps([includes, cflags, libs, lib_dirs])
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/config/linux/pkg_config.gni b/deps/v8/build/config/linux/pkg_config.gni
new file mode 100644
index 0000000000..428e44ac0a
--- /dev/null
+++ b/deps/v8/build/config/linux/pkg_config.gni
@@ -0,0 +1,128 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+
+# Defines a config specifying the result of running pkg-config for the given
+# packages. Put the package names you want to query in the "packages" variable
+# inside the template invocation.
+#
+# You can also add defines via the "defines" variable. This can be useful to
+# add this to the config to pass defines that the library expects to get by
+# users of its headers.
+#
+# Example:
+# pkg_config("mything") {
+# packages = [ "mything1", "mything2" ]
+# defines = [ "ENABLE_AWESOME" ]
+# }
+#
+# You can also use "extra args" to filter out results (see pkg-config.py):
+# extra_args = [ "-v, "foo" ]
+# To ignore libs and ldflags (only cflags/defines will be set, which is useful
+# when doing manual dynamic linking), set:
+# ignore_libs = true
+
+declare_args() {
+ # A pkg-config wrapper to call instead of trying to find and call the right
+ # pkg-config directly. Wrappers like this are common in cross-compilation
+ # environments.
+ # Leaving it blank defaults to searching PATH for 'pkg-config' and relying on
+ # the sysroot mechanism to find the right .pc files.
+ pkg_config = ""
+
+ # A optional pkg-config wrapper to use for tools built on the host.
+ host_pkg_config = ""
+
+ # CrOS systemroots place pkgconfig files at <systemroot>/usr/share/pkgconfig
+ # and one of <systemroot>/usr/lib/pkgconfig or <systemroot>/usr/lib64/pkgconfig
+ # depending on whether the systemroot is for a 32 or 64 bit architecture.
+ #
+ # When build under GYP, CrOS board builds specify the 'system_libdir' variable
+ # as part of the GYP_DEFINES provided by the CrOS emerge build or simple
+ # chrome build scheme. This variable permits controlling this for GN builds
+ # in similar fashion by setting the `system_libdir` variable in the build's
+ # args.gn file to 'lib' or 'lib64' as appropriate for the target architecture.
+ system_libdir = "lib"
+}
+
+pkg_config_script = "//build/config/linux/pkg-config.py"
+
+# Define the args we pass to the pkg-config script for other build files that
+# need to invoke it manually.
+pkg_config_args = []
+
+if (sysroot != "") {
+ # Pass the sysroot if we're using one (it requires the CPU arch also).
+ pkg_config_args += [
+ "-s",
+ rebase_path(sysroot),
+ "-a",
+ current_cpu,
+ ]
+}
+
+if (pkg_config != "") {
+ pkg_config_args += [
+ "-p",
+ pkg_config,
+ ]
+}
+
+# Only use the custom libdir when building with the target sysroot.
+if (target_sysroot != "" && sysroot == target_sysroot) {
+ pkg_config_args += [
+ "--system_libdir",
+ system_libdir,
+ ]
+}
+
+if (host_pkg_config != "") {
+ host_pkg_config_args = [
+ "-p",
+ host_pkg_config,
+ ]
+} else {
+ host_pkg_config_args = pkg_config_args
+}
+
+template("pkg_config") {
+ assert(defined(invoker.packages),
+ "Variable |packages| must be defined to be a list in pkg_config.")
+ config(target_name) {
+ if (host_toolchain == current_toolchain) {
+ args = host_pkg_config_args + invoker.packages
+ } else {
+ args = pkg_config_args + invoker.packages
+ }
+ if (defined(invoker.extra_args)) {
+ args += invoker.extra_args
+ }
+
+ pkgresult = exec_script(pkg_config_script, args, "value")
+ cflags = pkgresult[1]
+
+ foreach(include, pkgresult[0]) {
+ if (use_sysroot) {
+ # We want the system include paths to use -isystem instead of -I to
+ # suppress warnings in those headers.
+ include_relativized = rebase_path(include, root_build_dir)
+ cflags += [ "-isystem$include_relativized" ]
+ } else {
+ cflags += [ "-I$include" ]
+ }
+ }
+
+ if (!defined(invoker.ignore_libs) || !invoker.ignore_libs) {
+ libs = pkgresult[2]
+ lib_dirs = pkgresult[3]
+ }
+
+ forward_variables_from(invoker,
+ [
+ "defines",
+ "visibility",
+ ])
+ }
+}
diff --git a/deps/v8/build/config/locales.gni b/deps/v8/build/config/locales.gni
new file mode 100644
index 0000000000..21f06187a4
--- /dev/null
+++ b/deps/v8/build/config/locales.gni
@@ -0,0 +1,189 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Android doesn't ship all locales in order to save space (but webview does).
+# http://crbug.com/369218
+android_chrome_omitted_locales = [
+ "bn",
+ "et",
+ "gu",
+ "kn",
+ "ml",
+ "mr",
+ "ms",
+ "ta",
+ "te",
+]
+
+# Chrome on iOS only ships with a subset of the locales supported by other
+# version of Chrome as the corresponding locales are not supported by the
+# operating system (but for simplicity, the corresponding .pak files are
+# still generated).
+ios_unsupported_locales = [
+ "am",
+ "bn",
+ "et",
+ "fil",
+ "gu",
+ "kn",
+ "lv",
+ "ml",
+ "mr",
+ "sl",
+ "sw",
+ "ta",
+ "te",
+]
+
+# These list are defined even when not building for Android or iOS for the
+# sake of build/locale_tool.py. These asserts ensure that GN doesn't complain
+# about them being unused.
+assert(android_chrome_omitted_locales != [])
+assert(ios_unsupported_locales != [])
+
+# Note: keep in sync with below.
+locales = [
+ "am",
+ "ar",
+ "bg",
+ "bn",
+ "ca",
+ "cs",
+ "da",
+ "de",
+ "el",
+ "en-GB",
+ "en-US",
+ "es",
+ "et",
+ "fa",
+ "fi",
+ "fil",
+ "fr",
+ "gu",
+ "he",
+ "hi",
+ "hr",
+ "hu",
+ "id",
+ "it",
+ "ja",
+ "kn",
+ "ko",
+ "lt",
+ "lv",
+ "ml",
+ "mr",
+ "ms",
+ "nb",
+ "nl",
+ "pl",
+ "pt-PT",
+ "ro",
+ "ru",
+ "sk",
+ "sl",
+ "sr",
+ "sv",
+ "sw",
+ "ta",
+ "te",
+ "th",
+ "tr",
+ "uk",
+ "vi",
+ "zh-CN",
+ "zh-TW",
+]
+
+# Chrome on iOS uses different names for "es-419" and "pt-BR" (called
+# respectively "es-MX" and "pt" on iOS).
+if (!is_ios) {
+ locales += [
+ "es-419",
+ "pt-BR",
+ ]
+} else {
+ locales += [
+ "es-MX",
+ "pt",
+ ]
+
+ ios_packed_locales = locales - ios_unsupported_locales
+}
+
+locales_with_fake_bidi = locales + [ "fake-bidi" ]
+
+# Same as the locales list but in the format Mac expects for output files:
+# it uses underscores instead of hyphens, and "en" instead of "en-US".
+locales_as_mac_outputs = [
+ "am",
+ "ar",
+ "bg",
+ "bn",
+ "ca",
+ "cs",
+ "da",
+ "de",
+ "el",
+ "en_GB",
+ "en",
+ "es",
+ "et",
+ "fa",
+ "fi",
+ "fil",
+ "fr",
+ "gu",
+ "he",
+ "hi",
+ "hr",
+ "hu",
+ "id",
+ "it",
+ "ja",
+ "kn",
+ "ko",
+ "lt",
+ "lv",
+ "ml",
+ "mr",
+ "ms",
+ "nb",
+ "nl",
+ "pl",
+ "pt_PT",
+ "ro",
+ "ru",
+ "sk",
+ "sl",
+ "sr",
+ "sv",
+ "sw",
+ "ta",
+ "te",
+ "th",
+ "tr",
+ "uk",
+ "vi",
+ "zh_CN",
+ "zh_TW",
+]
+
+# Chrome on iOS uses different names for "es-419" and "pt-BR" (called
+# respectively "es-MX" and "pt" on iOS).
+if (!is_ios) {
+ locales_as_mac_outputs += [
+ "es_419",
+ "pt_BR",
+ ]
+} else {
+ locales_as_mac_outputs += [
+ "es_MX",
+ "pt",
+ ]
+
+ ios_packed_locales_as_mac_outputs =
+ locales_as_mac_outputs - ios_unsupported_locales
+}
diff --git a/deps/v8/build/config/mac/BUILD.gn b/deps/v8/build/config/mac/BUILD.gn
new file mode 100644
index 0000000000..780f752060
--- /dev/null
+++ b/deps/v8/build/config/mac/BUILD.gn
@@ -0,0 +1,109 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/c++/c++.gni")
+import("//build/config/mac/mac_sdk.gni")
+import("//build/config/mac/symbols.gni")
+import("//build/config/sysroot.gni")
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic.
+config("compiler") {
+ # These flags are shared between the C compiler and linker.
+ common_mac_flags = []
+
+ # CPU architecture.
+ if (current_cpu == "x64") {
+ common_mac_flags += [
+ "-arch",
+ "x86_64",
+ ]
+ } else if (current_cpu == "x86") {
+ common_mac_flags += [
+ "-arch",
+ "i386",
+ ]
+ }
+
+ # This is here so that all files get recompiled after an Xcode update.
+ # (defines are passed via the command line, and build system rebuild things
+ # when their commandline changes). Nothing should ever read this define.
+ defines = [ "CR_XCODE_VERSION=$xcode_version" ]
+
+ asmflags = common_mac_flags
+ cflags = common_mac_flags
+
+ # Without this, the constructors and destructors of a C++ object inside
+ # an Objective C struct won't be called, which is very bad.
+ cflags_objcc = [ "-fobjc-call-cxx-cdtors" ]
+
+ ldflags = common_mac_flags
+
+ # Create a new read-only segment for protected memory. The default segments
+ # (__TEXT and __DATA) are mapped read-execute and read-write by default.
+ ldflags += [
+ "-segprot",
+ "PROTECTED_MEMORY",
+ "rw",
+ "r",
+ ]
+
+ if (save_unstripped_output) {
+ ldflags += [ "-Wcrl,unstripped," + rebase_path(root_out_dir) ]
+ }
+
+ if (export_libcxxabi_from_executables) {
+ ldflags += [ "-Wl,-undefined,dynamic_lookup" ]
+ }
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Mac-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+ common_flags = [
+ "-isysroot",
+ rebase_path(sysroot, root_build_dir),
+ "-mmacosx-version-min=$mac_deployment_target",
+ ]
+
+ asmflags = common_flags
+ cflags = common_flags
+ ldflags = common_flags
+
+ # Prevent Mac OS X AssertMacros.h (included by system header) from defining
+ # macros that collide with common names, like 'check', 'require', and
+ # 'verify'.
+ # http://opensource.apple.com/source/CarbonHeaders/CarbonHeaders-18.1/AssertMacros.h
+ defines = [ "__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORE=0" ]
+}
+
+# On Mac, this is used for everything except static libraries.
+config("mac_dynamic_flags") {
+ ldflags = [ "-Wl,-ObjC" ] # Always load Objective-C categories and classes.
+
+ if (is_component_build) {
+ ldflags += [
+ # Path for loading shared libraries for unbundled binaries.
+ "-Wl,-rpath,@loader_path/.",
+
+ # Path for loading shared libraries for bundled binaries. Get back from
+ # Binary.app/Contents/MacOS.
+ "-Wl,-rpath,@loader_path/../../..",
+ ]
+ }
+}
+
+# The ldflags referenced below are handled by
+# //build/toolchain/mac/linker_driver.py.
+# Remove this config if a target wishes to change the arguments passed to the
+# strip command during linking. This config by default strips all symbols
+# from a binary, but some targets may wish to specify an exports file to
+# preserve specific symbols.
+config("strip_all") {
+ if (enable_stripping) {
+ ldflags = [ "-Wcrl,strip,-x,-S" ]
+ }
+}
diff --git a/deps/v8/build/config/mac/BuildInfo.plist b/deps/v8/build/config/mac/BuildInfo.plist
new file mode 100644
index 0000000000..d32bf2ea7f
--- /dev/null
+++ b/deps/v8/build/config/mac/BuildInfo.plist
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>BuildMachineOSBuild</key>
+ <string>${BUILD_MACHINE_OS_BUILD}</string>
+ <key>DTCompiler</key>
+ <string>${GCC_VERSION}</string>
+ <key>DTSDKBuild</key>
+ <string>${MAC_SDK_BUILD}</string>
+ <key>DTSDKName</key>
+ <string>${MAC_SDK_NAME}</string>
+ <key>DTXcode</key>
+ <string>${XCODE_VERSION}</string>
+ <key>DTXcodeBuild</key>
+ <string>${XCODE_BUILD}</string>
+</dict>
+</plist>
diff --git a/deps/v8/build/config/mac/OWNERS b/deps/v8/build/config/mac/OWNERS
new file mode 100644
index 0000000000..14747a0025
--- /dev/null
+++ b/deps/v8/build/config/mac/OWNERS
@@ -0,0 +1,4 @@
+rsesek@chromium.org
+sdefresne@chromium.org
+
+# COMPONENT: Build
diff --git a/deps/v8/build/config/mac/base_rules.gni b/deps/v8/build/config/mac/base_rules.gni
new file mode 100644
index 0000000000..bcb34a166d
--- /dev/null
+++ b/deps/v8/build/config/mac/base_rules.gni
@@ -0,0 +1,308 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains rules that are shared between Mac and iOS.
+
+import("//build/toolchain/toolchain.gni")
+import("//build/config/mac/symbols.gni")
+
+if (is_mac) {
+ import("//build/config/mac/mac_sdk.gni")
+} else if (is_ios) {
+ import("//build/config/ios/ios_sdk.gni")
+}
+
+# Convert plist file to given format.
+#
+# Arguments
+#
+# source:
+# string, path to the plist file to convert
+#
+# output:
+# string, path to the converted plist, must be under $root_build_dir
+#
+# format:
+# string, the format to `plutil -convert` the plist to.
+template("convert_plist") {
+ assert(defined(invoker.source), "source must be defined for $target_name")
+ assert(defined(invoker.output), "output must be defined for $target_name")
+ assert(defined(invoker.format), "format must be defined for $target_name")
+
+ action(target_name) {
+ forward_variables_from(invoker,
+ [
+ "visibility",
+ "testonly",
+ "deps",
+ ])
+
+ script = "//build/config/mac/xcrun.py"
+ sources = [
+ invoker.source,
+ ]
+ outputs = [
+ invoker.output,
+ ]
+ args = []
+ if (!use_system_xcode) {
+ args += [
+ "--developer_dir",
+ hermetic_xcode_path,
+ ]
+ }
+ args += [
+ "plutil",
+ "-convert",
+ invoker.format,
+ "-o",
+ rebase_path(invoker.output, root_build_dir),
+ rebase_path(invoker.source, root_build_dir),
+ ]
+ }
+}
+
+# Template to merge multiple plist files and perform variable substitutions.
+#
+# Arguments
+#
+# plist_templates:
+# string array, paths to plist files which will be used for the bundle.
+#
+# format:
+# string, the format to `plutil -convert` the plist to when
+# generating the output.
+#
+# substitutions:
+# string array, 'key=value' pairs used to replace ${key} by value
+# when generating the output plist file.
+#
+# output_name:
+# string, name of the generated plist file.
+template("compile_plist") {
+ assert(defined(invoker.plist_templates),
+ "A list of template plist files must be specified for $target_name")
+ assert(defined(invoker.format),
+ "The plist format must be specified for $target_name")
+ assert(defined(invoker.substitutions),
+ "A list of key=value pairs must be specified for $target_name")
+ assert(defined(invoker.output_name),
+ "The name of the output file must be specified for $target_name")
+
+ _output_name = invoker.output_name
+ _merged_name = get_path_info(_output_name, "dir") + "/" +
+ get_path_info(_output_name, "name") + "_merged." +
+ get_path_info(_output_name, "extension")
+
+ _merge_target = target_name + "_merge"
+
+ action(_merge_target) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "testonly",
+ ])
+
+ script = "//build/config/mac/plist_util.py"
+ sources = invoker.plist_templates
+ outputs = [
+ _merged_name,
+ ]
+ args = [
+ "merge",
+ "-f=" + invoker.format,
+ "-o=" + rebase_path(_merged_name, root_build_dir),
+ ] + rebase_path(invoker.plist_templates, root_build_dir)
+ }
+
+ action(target_name) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+ ])
+ script = "//build/config/mac/plist_util.py"
+ sources = [
+ _merged_name,
+ ]
+ outputs = [
+ _output_name,
+ ]
+ args = [
+ "substitute",
+ "-f=" + invoker.format,
+ "-o=" + rebase_path(_output_name, root_build_dir),
+ "-t=" + rebase_path(_merged_name, root_build_dir),
+ ]
+ foreach(_substitution, invoker.substitutions) {
+ args += [ "-s=$_substitution" ]
+ }
+ deps = [
+ ":$_merge_target",
+ ]
+ }
+}
+
+# Template to merge multiple .entitlements files performing variable
+# substitutions.
+#
+# Arguments
+#
+# entitlements_templates:
+# string array, paths to entitlements files which will be used for the
+# bundle.
+#
+# substitutions:
+# string array, 'key=value' pairs used to replace ${key} by value
+# when generating the output plist file.
+#
+# output_name:
+# string, name of the generated entitlements file.
+template("compile_entitlements") {
+ assert(defined(invoker.entitlements_templates),
+ "A list of template plist files must be specified for $target_name")
+
+ compile_plist(target_name) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "entitlements_templates",
+ "format",
+ "plist_templates",
+ ])
+
+ plist_templates = invoker.entitlements_templates
+
+ # Entitlements files are always encoded in xml1.
+ format = "xml1"
+
+ # Entitlements files use unsubstitued variables, so define substitutions
+ # to leave those variables untouched.
+ if (!defined(substitutions)) {
+ substitutions = []
+ }
+
+ substitutions += [
+ "AppIdentifierPrefix=\$(AppIdentifierPrefix)",
+ "CFBundleIdentifier=\$(CFBundleIdentifier)",
+ ]
+ }
+}
+
+# The base template used to generate Info.plist files for iOS and Mac apps and
+# frameworks.
+#
+# Arguments
+#
+# plist_templates:
+# string array, paths to plist files which will be used for the bundle.
+#
+# executable_name:
+# string, name of the generated target used for the product
+# and executable name as specified in the output Info.plist.
+#
+# format:
+# string, the format to `plutil -convert` the plist to when
+# generating the output.
+#
+# extra_substitutions:
+# (optional) string array, 'key=value' pairs for extra fields which are
+# specified in a source Info.plist template.
+#
+# output_name:
+# (optional) string, name of the generated plist file, default to
+# "$target_gen_dir/$target_name.plist".
+template("info_plist") {
+ assert(defined(invoker.executable_name),
+ "The executable_name must be specified for $target_name")
+ executable_name = invoker.executable_name
+
+ compile_plist(target_name) {
+ forward_variables_from(invoker,
+ [
+ "plist_templates",
+ "testonly",
+ "deps",
+ "visibility",
+ "format",
+ ])
+
+ if (defined(invoker.output_name)) {
+ output_name = invoker.output_name
+ } else {
+ output_name = "$target_gen_dir/$target_name.plist"
+ }
+
+ substitutions = [
+ "BUILD_MACHINE_OS_BUILD=$machine_os_build",
+ "EXECUTABLE_NAME=$executable_name",
+ "GCC_VERSION=com.apple.compilers.llvm.clang.1_0",
+ "PRODUCT_NAME=$executable_name",
+ "XCODE_BUILD=$xcode_build",
+ "XCODE_VERSION=$xcode_version",
+ ]
+ if (is_mac) {
+ substitutions += [
+ "MACOSX_DEPLOYMENT_TARGET=$mac_deployment_target",
+ "CHROMIUM_MIN_SYSTEM_VERSION=$mac_min_system_version",
+ ]
+ } else if (is_ios) {
+ substitutions += [ "IOS_DEPLOYMENT_TARGET=$ios_deployment_target" ]
+ }
+ if (defined(invoker.extra_substitutions)) {
+ substitutions += invoker.extra_substitutions
+ }
+ }
+}
+
+# Template to compile .xib and .storyboard files.
+#
+# Arguments
+#
+# sources:
+# list of string, sources to compile
+#
+# ibtool_flags:
+# (optional) list of string, additional flags to pass to the ibtool
+template("compile_ib_files") {
+ action_foreach(target_name) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+ ])
+ assert(defined(invoker.sources),
+ "sources must be specified for $target_name")
+ assert(defined(invoker.output_extension),
+ "output_extension must be specified for $target_name")
+
+ ibtool_flags = []
+ if (defined(invoker.ibtool_flags)) {
+ ibtool_flags = invoker.ibtool_flags
+ }
+
+ _output_extension = invoker.output_extension
+
+ script = "//build/config/mac/compile_ib_files.py"
+ sources = invoker.sources
+ outputs = [
+ "$target_gen_dir/$target_name/{{source_name_part}}.$_output_extension",
+ ]
+ args = [
+ "--input",
+ "{{source}}",
+ "--output",
+ rebase_path(
+ "$target_gen_dir/$target_name/{{source_name_part}}.$_output_extension",
+ root_build_dir),
+ ]
+ if (!use_system_xcode) {
+ args += [
+ "--developer_dir",
+ hermetic_xcode_path,
+ ]
+ }
+ args += ibtool_flags
+ }
+}
diff --git a/deps/v8/build/config/mac/compile_ib_files.py b/deps/v8/build/config/mac/compile_ib_files.py
new file mode 100644
index 0000000000..281e554e1d
--- /dev/null
+++ b/deps/v8/build/config/mac/compile_ib_files.py
@@ -0,0 +1,61 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import argparse
+import logging
+import os
+import re
+import subprocess
+import sys
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='A script to compile xib and storyboard.',
+ fromfile_prefix_chars='@')
+ parser.add_argument('-o', '--output', required=True,
+ help='Path to output bundle.')
+ parser.add_argument('-i', '--input', required=True,
+ help='Path to input xib or storyboard.')
+ parser.add_argument('--developer_dir', required=False,
+ help='Path to Xcode.')
+ args, unknown_args = parser.parse_known_args()
+
+ if args.developer_dir:
+ os.environ['DEVELOPER_DIR'] = args.developer_dir
+
+ ibtool_args = [
+ 'xcrun', 'ibtool',
+ '--errors', '--warnings', '--notices',
+ '--output-format', 'human-readable-text'
+ ]
+ ibtool_args += unknown_args
+ ibtool_args += [
+ '--compile',
+ os.path.abspath(args.output),
+ os.path.abspath(args.input)
+ ]
+
+ ibtool_section_re = re.compile(r'/\*.*\*/')
+ ibtool_re = re.compile(r'.*note:.*is clipping its content')
+ try:
+ stdout = subprocess.check_output(ibtool_args)
+ except subprocess.CalledProcessError as e:
+ print(e.output)
+ raise
+ current_section_header = None
+ for line in stdout.splitlines():
+ if ibtool_section_re.match(line):
+ current_section_header = line
+ elif not ibtool_re.match(line):
+ if current_section_header:
+ print(current_section_header)
+ current_section_header = None
+ print(line)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/config/mac/mac_sdk.gni b/deps/v8/build/config/mac/mac_sdk.gni
new file mode 100644
index 0000000000..1a6d170a7e
--- /dev/null
+++ b/deps/v8/build/config/mac/mac_sdk.gni
@@ -0,0 +1,112 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/mac/mac_sdk_overrides.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+ # The MACOSX_DEPLOYMENT_TARGET variable used when compiling. This partially
+ # controls the minimum supported version of macOS for Chromium by
+ # affecting the symbol availability rules. This may differ from
+ # mac_min_system_version when dropping support for older macOSes but where
+ # additional code changes are required to be compliant with the availability
+ # rules.
+ # Must be of the form x.x.x for Info.plist files.
+ mac_deployment_target = "10.10.0"
+
+ # The value of the LSMinimmumSystemVersion in Info.plist files. This partially
+ # controls the minimum supported version of macOS for Chromium by
+ # affecting the Info.plist. This may differ from mac_deployment_target when
+ # dropping support for older macOSes. This should be greater than or equal to
+ # the mac_deployment_target version.
+ # Must be of the form x.x.x for Info.plist files.
+ mac_min_system_version = "10.10.0"
+
+ # Path to a specific version of the Mac SDK, not including a slash at the end.
+ # If empty, the path to the lowest version greater than or equal to
+ # mac_sdk_min is used.
+ mac_sdk_path = ""
+
+ # The SDK name as accepted by xcodebuild.
+ mac_sdk_name = "macosx"
+}
+
+# Check that the version of macOS SDK used is the one requested when building
+# a version of Chrome shipped to the users. Disable the check if building for
+# iOS as the version macOS SDK used is not relevant for the tool build for the
+# host (they are not shipped) --- this is required as Chrome on iOS is usually
+# build with the latest version of Xcode that may not ship with the version of
+# the macOS SDK used to build Chrome on mac.
+# TODO(crbug.com/635745): the check for target_os should be replaced by a
+# check that current_toolchain is default_toolchain, and the file should
+# assert that current_os is "mac" once this file is no longer included by
+# iOS toolchains.
+_verify_sdk = is_chrome_branded && is_official_build && target_os != "ios"
+
+find_sdk_args = [ "--print_sdk_path" ]
+if (!use_system_xcode) {
+ find_sdk_args += [
+ "--developer_dir",
+ hermetic_xcode_path,
+ ]
+}
+if (_verify_sdk) {
+ find_sdk_args += [
+ "--verify",
+ mac_sdk_min,
+ "--sdk_path=" + mac_sdk_path,
+ ]
+} else {
+ find_sdk_args += [ mac_sdk_min ]
+}
+
+# The tool will print the SDK path on the first line, and the version on the
+# second line.
+find_sdk_lines =
+ exec_script("//build/mac/find_sdk.py", find_sdk_args, "list lines")
+mac_sdk_version = find_sdk_lines[1]
+if (mac_sdk_path == "") {
+ mac_sdk_path = find_sdk_lines[0]
+}
+
+script_name = "//build/config/mac/sdk_info.py"
+sdk_info_args = []
+if (!use_system_xcode) {
+ sdk_info_args += [
+ "--developer_dir",
+ hermetic_xcode_path,
+ ]
+}
+sdk_info_args += [ mac_sdk_name ]
+
+_mac_sdk_result = exec_script(script_name, sdk_info_args, "scope")
+xcode_version = _mac_sdk_result.xcode_version
+xcode_build = _mac_sdk_result.xcode_build
+machine_os_build = _mac_sdk_result.machine_os_build
+
+if (mac_sdk_version != mac_sdk_min &&
+ exec_script("//build/check_return_value.py",
+ [
+ "test",
+ xcode_version,
+ "-ge",
+ "0730",
+ ],
+ "value") != 1) {
+ print(
+ "********************************************************************************")
+ print(
+ " WARNING: The Mac OS X SDK is incompatible with the version of Xcode. To fix,")
+ print(
+ " either upgrade Xcode to the latest version or install the Mac OS X")
+ print(
+ " $mac_sdk_min SDK. For more information, see https://crbug.com/620127.")
+ print()
+ print(" Current SDK Version: $mac_sdk_version")
+ print(" Current Xcode Version: $xcode_version ($xcode_build)")
+ print(
+ "********************************************************************************")
+ assert(false, "SDK is incompatible with Xcode")
+}
diff --git a/deps/v8/build/config/mac/mac_sdk_overrides.gni b/deps/v8/build/config/mac/mac_sdk_overrides.gni
new file mode 100644
index 0000000000..55451f043f
--- /dev/null
+++ b/deps/v8/build/config/mac/mac_sdk_overrides.gni
@@ -0,0 +1,16 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains arguments that subprojects may choose to override. It
+# asserts that those overrides are used, to prevent unused args warnings.
+
+_sdk_min_from_env = getenv("FORCE_MAC_SDK_MIN")
+declare_args() {
+ # Minimum supported version of the Mac SDK.
+ if (_sdk_min_from_env == "") {
+ mac_sdk_min = "10.13"
+ } else {
+ mac_sdk_min = _sdk_min_from_env
+ }
+}
diff --git a/deps/v8/build/config/mac/package_framework.py b/deps/v8/build/config/mac/package_framework.py
new file mode 100644
index 0000000000..75604094e4
--- /dev/null
+++ b/deps/v8/build/config/mac/package_framework.py
@@ -0,0 +1,60 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import errno
+import os
+import shutil
+import sys
+
+def Main():
+ parser = argparse.ArgumentParser(description='Create Mac Framework symlinks')
+ parser.add_argument('--framework', action='store', type=str, required=True)
+ parser.add_argument('--version', action='store', type=str)
+ parser.add_argument('--contents', action='store', type=str, nargs='+')
+ parser.add_argument('--stamp', action='store', type=str, required=True)
+ args = parser.parse_args()
+
+ VERSIONS = 'Versions'
+ CURRENT = 'Current'
+
+ # Ensure the Foo.framework/Versions/A/ directory exists and create the
+ # Foo.framework/Versions/Current symlink to it.
+ if args.version:
+ try:
+ os.makedirs(os.path.join(args.framework, VERSIONS, args.version), 0755)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise e
+ _Relink(os.path.join(args.version),
+ os.path.join(args.framework, VERSIONS, CURRENT))
+
+ # Establish the top-level symlinks in the framework bundle. The dest of
+ # the symlinks may not exist yet.
+ if args.contents:
+ for item in args.contents:
+ _Relink(os.path.join(VERSIONS, CURRENT, item),
+ os.path.join(args.framework, item))
+
+ # Write out a stamp file.
+ if args.stamp:
+ with open(args.stamp, 'w') as f:
+ f.write(str(args))
+
+ return 0
+
+
+def _Relink(dest, link):
+ """Creates a symlink to |dest| named |link|. If |link| already exists,
+ it is overwritten."""
+ try:
+ os.remove(link)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ shutil.rmtree(link)
+ os.symlink(dest, link)
+
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/deps/v8/build/config/mac/plist_util.py b/deps/v8/build/config/mac/plist_util.py
new file mode 100644
index 0000000000..bba0208a5b
--- /dev/null
+++ b/deps/v8/build/config/mac/plist_util.py
@@ -0,0 +1,226 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import plistlib
+import os
+import re
+import subprocess
+import sys
+import tempfile
+import shlex
+
+
+# Xcode substitutes variables like ${PRODUCT_NAME} or $(PRODUCT_NAME) when
+# compiling Info.plist. It also supports supports modifiers like :identifier
+# or :rfc1034identifier. SUBSTITUTION_REGEXP_LIST is a list of regular
+# expressions matching a variable substitution pattern with an optional
+# modifier, while INVALID_CHARACTER_REGEXP matches all characters that are
+# not valid in an "identifier" value (used when applying the modifier).
+INVALID_CHARACTER_REGEXP = re.compile(r'[_/\s]')
+SUBSTITUTION_REGEXP_LIST = (
+ re.compile(r'\$\{(?P<id>[^}]*?)(?P<modifier>:[^}]*)?\}'),
+ re.compile(r'\$\((?P<id>[^}]*?)(?P<modifier>:[^}]*)?\)'),
+)
+
+
+class SubstitutionError(Exception):
+ def __init__(self, key):
+ super(SubstitutionError, self).__init__()
+ self.key = key
+
+ def __str__(self):
+ return "SubstitutionError: {}".format(self.key)
+
+
+def InterpolateString(value, substitutions):
+ """Interpolates variable references into |value| using |substitutions|.
+
+ Inputs:
+ value: a string
+ substitutions: a mapping of variable names to values
+
+ Returns:
+ A new string with all variables references ${VARIABLES} replaced by their
+ value in |substitutions|. Raises SubstitutionError if a variable has no
+ substitution.
+ """
+ def repl(match):
+ variable = match.group('id')
+ if variable not in substitutions:
+ raise SubstitutionError(variable)
+ # Some values need to be identifier and thus the variables references may
+ # contains :modifier attributes to indicate how they should be converted
+ # to identifiers ("identifier" replaces all invalid characters by '_' and
+ # "rfc1034identifier" replaces them by "-" to make valid URI too).
+ modifier = match.group('modifier')
+ if modifier == ':identifier':
+ return INVALID_CHARACTER_REGEXP.sub('_', substitutions[variable])
+ elif modifier == ':rfc1034identifier':
+ return INVALID_CHARACTER_REGEXP.sub('-', substitutions[variable])
+ else:
+ return substitutions[variable]
+ for substitution_regexp in SUBSTITUTION_REGEXP_LIST:
+ value = substitution_regexp.sub(repl, value)
+ return value
+
+
+def Interpolate(value, substitutions):
+ """Interpolates variable references into |value| using |substitutions|.
+
+ Inputs:
+ value: a value, can be a dictionary, list, string or other
+ substitutions: a mapping of variable names to values
+
+ Returns:
+ A new value with all variables references ${VARIABLES} replaced by their
+ value in |substitutions|. Raises SubstitutionError if a variable has no
+ substitution.
+ """
+ if isinstance(value, dict):
+ return {k: Interpolate(v, substitutions) for k, v in value.iteritems()}
+ if isinstance(value, list):
+ return [Interpolate(v, substitutions) for v in value]
+ if isinstance(value, str):
+ return InterpolateString(value, substitutions)
+ return value
+
+
+def LoadPList(path):
+ """Loads Plist at |path| and returns it as a dictionary."""
+ fd, name = tempfile.mkstemp()
+ try:
+ subprocess.check_call(['plutil', '-convert', 'xml1', '-o', name, path])
+ with os.fdopen(fd, 'r') as f:
+ return plistlib.readPlist(f)
+ finally:
+ os.unlink(name)
+
+
+def SavePList(path, format, data):
+ """Saves |data| as a Plist to |path| in the specified |format|."""
+ fd, name = tempfile.mkstemp()
+ try:
+ # "plutil" does not replace the destination file but update it in place,
+ # so if more than one hardlink points to destination all of them will be
+ # modified. This is not what is expected, so delete destination file if
+ # it does exist.
+ if os.path.exists(path):
+ os.unlink(path)
+ with os.fdopen(fd, 'w') as f:
+ plistlib.writePlist(data, f)
+ subprocess.check_call(['plutil', '-convert', format, '-o', path, name])
+ finally:
+ os.unlink(name)
+
+
+def MergePList(plist1, plist2):
+ """Merges |plist1| with |plist2| recursively.
+
+ Creates a new dictionary representing a Property List (.plist) files by
+ merging the two dictionary |plist1| and |plist2| recursively (only for
+ dictionary values). List value will be concatenated.
+
+ Args:
+ plist1: a dictionary representing a Property List (.plist) file
+ plist2: a dictionary representing a Property List (.plist) file
+
+ Returns:
+ A new dictionary representing a Property List (.plist) file by merging
+ |plist1| with |plist2|. If any value is a dictionary, they are merged
+ recursively, otherwise |plist2| value is used. If values are list, they
+ are concatenated.
+ """
+ result = plist1.copy()
+ for key, value in plist2.iteritems():
+ if isinstance(value, dict):
+ old_value = result.get(key)
+ if isinstance(old_value, dict):
+ value = MergePList(old_value, value)
+ if isinstance(value, list):
+ value = plist1.get(key, []) + plist2.get(key, [])
+ result[key] = value
+ return result
+
+
+class Action(object):
+ """Class implementing one action supported by the script."""
+
+ @classmethod
+ def Register(cls, subparsers):
+ parser = subparsers.add_parser(cls.name, help=cls.help)
+ parser.set_defaults(func=cls._Execute)
+ cls._Register(parser)
+
+
+class MergeAction(Action):
+ """Class to merge multiple plist files."""
+
+ name = 'merge'
+ help = 'merge multiple plist files'
+
+ @staticmethod
+ def _Register(parser):
+ parser.add_argument(
+ '-o', '--output', required=True,
+ help='path to the output plist file')
+ parser.add_argument(
+ '-f', '--format', required=True, choices=('xml1', 'binary1', 'json'),
+ help='format of the plist file to generate')
+ parser.add_argument(
+ 'path', nargs="+",
+ help='path to plist files to merge')
+
+ @staticmethod
+ def _Execute(args):
+ data = {}
+ for filename in args.path:
+ data = MergePList(data, LoadPList(filename))
+ SavePList(args.output, args.format, data)
+
+
+class SubstituteAction(Action):
+ """Class implementing the variable substitution in a plist file."""
+
+ name = 'substitute'
+ help = 'perform pattern substitution in a plist file'
+
+ @staticmethod
+ def _Register(parser):
+ parser.add_argument(
+ '-o', '--output', required=True,
+ help='path to the output plist file')
+ parser.add_argument(
+ '-t', '--template', required=True,
+ help='path to the template file')
+ parser.add_argument(
+ '-s', '--substitution', action='append', default=[],
+ help='substitution rule in the format key=value')
+ parser.add_argument(
+ '-f', '--format', required=True, choices=('xml1', 'binary1', 'json'),
+ help='format of the plist file to generate')
+
+ @staticmethod
+ def _Execute(args):
+ substitutions = {}
+ for substitution in args.substitution:
+ key, value = substitution.split('=', 1)
+ substitutions[key] = value
+ data = Interpolate(LoadPList(args.template), substitutions)
+ SavePList(args.output, args.format, data)
+
+
+def Main():
+ parser = argparse.ArgumentParser(description='manipulate plist files')
+ subparsers = parser.add_subparsers()
+
+ for action in [MergeAction, SubstituteAction]:
+ action.Register(subparsers)
+
+ args = parser.parse_args()
+ args.func(args)
+
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/deps/v8/build/config/mac/prepare_framework_version.py b/deps/v8/build/config/mac/prepare_framework_version.py
new file mode 100644
index 0000000000..5e8a53f20a
--- /dev/null
+++ b/deps/v8/build/config/mac/prepare_framework_version.py
@@ -0,0 +1,42 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import shutil
+import sys
+
+# Ensures that the current version matches the last-produced version, which is
+# stored in the version_file. If it does not, then the framework_root_dir is
+# obliterated.
+# Usage: python prepare_framework_version.py out/obj/version_file \
+# out/Framework.framework \
+# 'A'
+
+def PrepareFrameworkVersion(version_file, framework_root_dir, version):
+ # Test what the current framework version is. Stop if it is up-to-date.
+ try:
+ with open(version_file, 'r') as f:
+ current_version = f.read()
+ if current_version == version:
+ return
+ except IOError:
+ pass
+
+ # The framework version has changed, so clobber the framework.
+ if os.path.exists(framework_root_dir):
+ shutil.rmtree(framework_root_dir)
+
+ # Write out the new framework version file, making sure its containing
+ # directory exists.
+ dirname = os.path.dirname(version_file)
+ if not os.path.isdir(dirname):
+ os.makedirs(dirname, 0700)
+
+ with open(version_file, 'w+') as f:
+ f.write(version)
+
+
+if __name__ == '__main__':
+ PrepareFrameworkVersion(sys.argv[1], sys.argv[2], sys.argv[3])
+ sys.exit(0)
diff --git a/deps/v8/build/config/mac/rules.gni b/deps/v8/build/config/mac/rules.gni
new file mode 100644
index 0000000000..d9f4b6e673
--- /dev/null
+++ b/deps/v8/build/config/mac/rules.gni
@@ -0,0 +1,676 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/mac/base_rules.gni")
+
+# Generates Info.plist files for Mac apps and frameworks.
+#
+# Arguments
+#
+# info_plist:
+# (optional) string, path to the Info.plist file that will be used for
+# the bundle.
+#
+# info_plist_target:
+# (optional) string, if the info_plist is generated from an action,
+# rather than a regular source file, specify the target name in lieu
+# of info_plist. The two arguments are mutually exclusive.
+#
+# executable_name:
+# string, name of the generated target used for the product
+# and executable name as specified in the output Info.plist.
+#
+# extra_substitutions:
+# (optional) string array, 'key=value' pairs for extra fields which are
+# specified in a source Info.plist template.
+template("mac_info_plist") {
+ assert(defined(invoker.info_plist) != defined(invoker.info_plist_target),
+ "Only one of info_plist or info_plist_target may be specified in " +
+ target_name)
+
+ if (defined(invoker.info_plist)) {
+ _info_plist = invoker.info_plist
+ } else {
+ _info_plist_target_output = get_target_outputs(invoker.info_plist_target)
+ _info_plist = _info_plist_target_output[0]
+ }
+
+ info_plist(target_name) {
+ format = "xml1"
+ extra_substitutions = []
+ if (defined(invoker.extra_substitutions)) {
+ extra_substitutions = invoker.extra_substitutions
+ }
+ extra_substitutions += [
+ "MAC_SDK_BUILD=$mac_sdk_version",
+ "MAC_SDK_NAME=$mac_sdk_name$mac_sdk_version",
+ ]
+ plist_templates = [
+ "//build/config/mac/BuildInfo.plist",
+ _info_plist,
+ ]
+ if (defined(invoker.info_plist_target)) {
+ deps = [
+ invoker.info_plist_target,
+ ]
+ }
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "executable_name",
+ ])
+ }
+}
+
+# Template to compile and package Mac XIB files as bundle data.
+#
+# Arguments
+#
+# sources:
+# list of string, sources to comiple
+#
+# output_path:
+# (optional) string, the path to use for the outputs list in the
+# bundle_data step. If unspecified, defaults to bundle_resources_dir.
+template("mac_xib_bundle_data") {
+ _target_name = target_name
+ _compile_target_name = _target_name + "_compile_ibtool"
+
+ compile_ib_files(_compile_target_name) {
+ forward_variables_from(invoker, [ "testonly" ])
+ visibility = [ ":$_target_name" ]
+ sources = invoker.sources
+ output_extension = "nib"
+ ibtool_flags = [
+ "--minimum-deployment-target",
+ mac_deployment_target,
+
+ # TODO(rsesek): Enable this once all the bots are on Xcode 7+.
+ # "--target-device",
+ # "mac",
+ ]
+ }
+
+ bundle_data(_target_name) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+ ])
+
+ public_deps = [
+ ":$_compile_target_name",
+ ]
+ sources = get_target_outputs(":$_compile_target_name")
+
+ _output_path = "{{bundle_resources_dir}}"
+ if (defined(invoker.output_path)) {
+ _output_path = invoker.output_path
+ }
+
+ outputs = [
+ "$_output_path/{{source_file_part}}",
+ ]
+ }
+}
+
+# Template to package a shared library into a Mac framework bundle.
+#
+# By default, the bundle target this template generates does not link the
+# resulting framework into anything that depends on it. If a dependency wants
+# a link-time (as well as build-time) dependency on the framework bundle,
+# depend against "$target_name+link". If only the build-time dependency is
+# required (e.g., for copying into another bundle), then use "$target_name".
+#
+# Arguments
+#
+# framework_version:
+# string, version of the framework. Typically this is a
+# single letter, like "A".
+#
+# framework_contents:
+# list of string, top-level items in the framework. This is
+# the list of symlinks to create in the .framework directory that link
+# into Versions/Current/.
+#
+# info_plist:
+# (optional) string, path to the Info.plist file that will be used for
+# the bundle.
+#
+# info_plist_target:
+# (optional) string, if the info_plist is generated from an action,
+# rather than a regular source file, specify the target name in lieu
+# of info_plist. The two arguments are mutually exclusive.
+#
+# output_name:
+# (optional) string, name of the generated framework without the
+# .framework suffix. If omitted, defaults to target_name.
+#
+# extra_substitutions:
+# (optional) string array, 'key=value' pairs for extra fields which are
+# specified in a source Info.plist template.
+#
+# This template provides two targets for the resulting framework bundle. The
+# link-time behavior varies depending on which of the two targets below is
+# added as a dependency:
+# - $target_name only adds a build-time dependency. Targets that depend on
+# it will not link against the framework.
+# - $target_name+link adds a build-time and link-time dependency. Targets
+# that depend on it will link against the framework.
+#
+# The build-time-only dependency is used for when a target needs to use the
+# framework either only for resources, or because the target loads it at run-
+# time, via dlopen() or NSBundle. The link-time dependency will cause the
+# dependee to have the framework loaded by dyld at launch.
+#
+# Example of build-time only dependency:
+#
+# mac_framework_bundle("CoreTeleportation") {
+# sources = [ ... ]
+# }
+#
+# bundle_data("core_teleportation_bundle_data") {
+# deps = [ ":CoreTeleportation" ]
+# sources = [ "$root_out_dir/CoreTeleportation.framework" ]
+# outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ]
+# }
+#
+# app_bundle("GoatTeleporter") {
+# sources = [ ... ]
+# deps = [
+# ":core_teleportation_bundle_data",
+# ]
+# }
+#
+# The GoatTeleporter.app will not directly link against
+# CoreTeleportation.framework, but it will be included in the bundle's
+# Frameworks directory.
+#
+# Example of link-time dependency:
+#
+# mac_framework_bundle("CoreTeleportation") {
+# sources = [ ... ]
+# ldflags = [
+# "-install_name",
+# "@executable_path/../Frameworks/$target_name.framework"
+# ]
+# }
+#
+# bundle_data("core_teleportation_bundle_data") {
+# deps = [ ":CoreTeleportation+link" ]
+# sources = [ "$root_out_dir/CoreTeleportation.framework" ]
+# outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ]
+# }
+#
+# app_bundle("GoatTeleporter") {
+# sources = [ ... ]
+# deps = [
+# ":core_teleportation_bundle_data",
+# ]
+# }
+#
+# Note that the framework is still copied to the app's bundle, but dyld will
+# load this library when the app is launched because it uses the "+link"
+# target as a dependency. This also requires that the framework set its
+# install_name so that dyld can locate it.
+#
+# See "gn help shared_library" for more information on arguments supported
+# by shared library target.
+template("mac_framework_bundle") {
+ assert(defined(invoker.deps),
+ "Dependencies must be specified for $target_name")
+ assert(invoker.framework_version != "", "framework_version is required")
+ assert(defined(invoker.framework_contents), "framework_contents is required")
+
+ _info_plist_target = target_name + "_info_plist"
+
+ mac_info_plist(_info_plist_target) {
+ executable_name = target_name
+ if (defined(invoker.output_name)) {
+ executable_name = invoker.output_name
+ }
+ forward_variables_from(invoker,
+ [
+ "extra_substitutions",
+ "info_plist",
+ "info_plist_target",
+ "testonly",
+ ])
+ }
+
+ _info_plist_bundle_data = _info_plist_target + "_bundle_data"
+
+ bundle_data(_info_plist_bundle_data) {
+ forward_variables_from(invoker, [ "testonly" ])
+ sources = get_target_outputs(":$_info_plist_target")
+ outputs = [
+ "{{bundle_resources_dir}}/Info.plist",
+ ]
+ public_deps = [
+ ":$_info_plist_target",
+ ]
+ }
+
+ _target_name = target_name
+ _output_name = target_name
+ if (defined(invoker.output_name)) {
+ _output_name = invoker.output_name
+ }
+
+ # Create a file to track the build dependency on the framework_version and
+ # framework_contents variables.
+ _framework_toc = [
+ "Version=" + invoker.framework_version,
+ _output_name,
+ ] + invoker.framework_contents
+ _framework_contents = [ _output_name ] + invoker.framework_contents
+ _framework_toc_file = "$target_out_dir/${target_name}.toc"
+ write_file(_framework_toc_file, _framework_toc)
+
+ # Create local variables for referencing different parts of the bundle.
+ _framework_target = _target_name
+ _framework_name = _output_name + ".framework"
+ _framework_base_dir = "$root_out_dir/$_framework_name"
+ _framework_root_dir =
+ _framework_base_dir + "/Versions/${invoker.framework_version}"
+
+ # Clean the entire framework if the framework_version changes.
+ _version_file = "$target_out_dir/${target_name}_version"
+ exec_script("//build/config/mac/prepare_framework_version.py",
+ [
+ rebase_path(_version_file),
+ rebase_path(_framework_base_dir),
+ invoker.framework_version,
+ ])
+
+ # Create the symlinks.
+ _framework_package_target = target_name + "_package"
+ action(_framework_package_target) {
+ script = "//build/config/mac/package_framework.py"
+
+ # The TOC file never needs to be read, since its contents are the values
+ # of GN variables. It is only used to trigger this rule when the values
+ # change.
+ inputs = [
+ _framework_toc_file,
+ ]
+
+ _stamp_file = "$target_out_dir/run_${_framework_package_target}.stamp"
+ outputs = [
+ _stamp_file,
+ ]
+
+ visibility = [ ":$_framework_target" ]
+
+ args = [
+ "--framework",
+ rebase_path(_framework_base_dir, root_build_dir),
+ "--stamp",
+ rebase_path(_stamp_file, root_build_dir),
+ "--version",
+ invoker.framework_version,
+ "--contents",
+ ] + _framework_contents
+
+ # It is not possible to list _framework_contents as outputs, since
+ # ninja does not properly stat symbolic links.
+ # https://github.com/ninja-build/ninja/issues/1186
+ }
+
+ _link_shared_library_target = target_name + "_shared_library"
+ _shared_library_bundle_data = target_name + "_shared_library_bundle_data"
+
+ shared_library(_link_shared_library_target) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "assert_no_deps",
+ "bundle_deps",
+ "code_signing_enabled",
+ "data_deps",
+ "info_plist",
+ "info_plist_target",
+ "output_name",
+ "visibility",
+ ])
+ visibility = [ ":$_shared_library_bundle_data" ]
+ output_name = _output_name
+ output_prefix_override = true
+ output_extension = ""
+ output_dir = "$target_out_dir/$_link_shared_library_target"
+ }
+
+ bundle_data(_shared_library_bundle_data) {
+ visibility = [ ":$_framework_target" ]
+ forward_variables_from(invoker, [ "testonly" ])
+ sources = [
+ "$target_out_dir/$_link_shared_library_target/$_output_name",
+ ]
+ outputs = [
+ "{{bundle_executable_dir}}/$_output_name",
+ ]
+ public_deps = [
+ ":$_link_shared_library_target",
+ ]
+ }
+
+ _framework_public_config = _target_name + "_public_config"
+ config(_framework_public_config) {
+ # TODO(sdefresne): should we have a framework_dirs similar to lib_dirs
+ # and include_dirs to avoid duplicate values on the command-line.
+ visibility = [ ":$_framework_target" ]
+ cflags = [
+ "-F",
+ rebase_path("$root_out_dir/.", root_build_dir),
+ ]
+ ldflags = [
+ "-F",
+ rebase_path("$root_out_dir/.", root_build_dir),
+ ]
+ lib_dirs = [ root_out_dir ]
+ libs = [ _framework_name ]
+ }
+
+ create_bundle(_framework_target) {
+ forward_variables_from(invoker,
+ [
+ "data_deps",
+ "deps",
+ "public_deps",
+ "testonly",
+ ])
+
+ if (defined(invoker.visibility)) {
+ visibility = invoker.visibility
+ visibility += [ ":$_target_name+link" ]
+ }
+
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += [ ":$_info_plist_bundle_data" ]
+
+ if (defined(invoker.bundle_deps)) {
+ deps += invoker.bundle_deps
+ }
+
+ if (!defined(public_deps)) {
+ public_deps = []
+ }
+ public_deps += [
+ ":$_framework_package_target",
+ ":$_shared_library_bundle_data",
+ ]
+
+ bundle_root_dir = _framework_base_dir
+ bundle_contents_dir = _framework_root_dir
+ bundle_resources_dir = "$bundle_contents_dir/Resources"
+ bundle_executable_dir = bundle_contents_dir
+ }
+
+ group(_target_name + "+link") {
+ forward_variables_from(invoker,
+ [
+ "public_configs",
+ "testonly",
+ "visibility",
+ ])
+ public_deps = [
+ ":$_target_name",
+ ]
+ if (!defined(public_configs)) {
+ public_configs = []
+ }
+ public_configs += [ ":$_framework_public_config" ]
+ }
+}
+
+set_defaults("mac_framework_bundle") {
+ configs = default_shared_library_configs
+}
+
+# Template to create a Mac executable application bundle.
+#
+# Arguments
+#
+# package_type:
+# (optional) string, the product package type to create. Options are:
+# "app" to create a .app bundle (default)
+# "xpc" to create an .xpc service bundle
+#
+# info_plist:
+# (optional) string, path to the Info.plist file that will be used for
+# the bundle.
+#
+# info_plist_target:
+# (optional) string, if the info_plist is generated from an action,
+# rather than a regular source file, specify the target name in lieu
+# of info_plist. The two arguments are mutually exclusive.
+#
+# output_name:
+# (optional) string, name of the generated app without the
+# .app suffix. If omitted, defaults to target_name.
+#
+# extra_configs:
+# (optional) list of label, additional configs to apply to the
+# executable target.
+#
+# remove_configs:
+# (optional) list of label, default configs to remove from the target.
+#
+# extra_substitutions:
+# (optional) string array, 'key=value' pairs for extra fields which are
+# specified in a source Info.plist template.
+template("mac_app_bundle") {
+ _target_name = target_name
+ _output_name = target_name
+ if (defined(invoker.output_name)) {
+ _output_name = invoker.output_name
+ }
+
+ _package_type = "app"
+ if (defined(invoker.package_type)) {
+ _package_type = invoker.package_type
+ }
+
+ if (_package_type == "app") {
+ _output_extension = "app"
+ _product_type = "com.apple.product-type.application"
+ _write_pkg_info = true
+ } else if (_package_type == "xpc") {
+ _output_extension = "xpc"
+ _product_type = "com.apple.product-type.xpc-service"
+ _write_pkg_info = false
+ } else {
+ assert(false, "Unsupported packge_type: " + packge_type)
+ }
+
+ _executable_target = target_name + "_executable"
+ _executable_bundle_data = _executable_target + "_bundle_data"
+
+ _info_plist_target = target_name + "_info_plist"
+
+ mac_info_plist(_info_plist_target) {
+ executable_name = _output_name
+ forward_variables_from(invoker,
+ [
+ "extra_substitutions",
+ "info_plist",
+ "info_plist_target",
+ "testonly",
+ ])
+ }
+
+ if (_write_pkg_info) {
+ _pkg_info_target = target_name + "_pkg_info"
+
+ action(_pkg_info_target) {
+ forward_variables_from(invoker, [ "testonly" ])
+ script = "//build/config/mac/write_pkg_info.py"
+ sources = get_target_outputs(":$_info_plist_target")
+ outputs = [
+ "$target_gen_dir/$_pkg_info_target",
+ ]
+ args = [ "--plist" ] + rebase_path(sources, root_build_dir) +
+ [ "--output" ] + rebase_path(outputs, root_build_dir)
+ deps = [
+ ":$_info_plist_target",
+ ]
+ }
+ }
+
+ executable(_executable_target) {
+ visibility = [ ":$_executable_bundle_data" ]
+ forward_variables_from(invoker,
+ "*",
+ [
+ "assert_no_deps",
+ "data_deps",
+ "info_plist",
+ "output_name",
+ "visibility",
+ ])
+ if (defined(extra_configs)) {
+ configs += extra_configs
+ }
+ if (defined(remove_configs)) {
+ configs -= remove_configs
+ }
+ output_name = _output_name
+ output_dir = "$target_out_dir/$_executable_target"
+ }
+
+ bundle_data(_executable_bundle_data) {
+ visibility = [ ":$_target_name" ]
+ forward_variables_from(invoker, [ "testonly" ])
+ sources = [
+ "$target_out_dir/$_executable_target/$_output_name",
+ ]
+ outputs = [
+ "{{bundle_executable_dir}}/$_output_name",
+ ]
+ public_deps = [
+ ":$_executable_target",
+ ]
+ }
+
+ _info_plist_bundle_data = _info_plist_target + "_bundle_data"
+
+ bundle_data(_info_plist_bundle_data) {
+ forward_variables_from(invoker, [ "testonly" ])
+ visibility = [ ":$_target_name" ]
+ sources = get_target_outputs(":$_info_plist_target")
+ outputs = [
+ "{{bundle_contents_dir}}/Info.plist",
+ ]
+ public_deps = [
+ ":$_info_plist_target",
+ ]
+ }
+
+ if (_write_pkg_info) {
+ _pkg_info_bundle_data = _pkg_info_target + "_bundle_data"
+
+ bundle_data(_pkg_info_bundle_data) {
+ forward_variables_from(invoker, [ "testonly" ])
+ visibility = [ ":$_target_name" ]
+ sources = get_target_outputs(":$_pkg_info_target")
+ outputs = [
+ "{{bundle_contents_dir}}/PkgInfo",
+ ]
+ public_deps = [
+ ":$_pkg_info_target",
+ ]
+ }
+ }
+
+ create_bundle(_target_name) {
+ forward_variables_from(invoker,
+ [
+ "data_deps",
+ "deps",
+ "public_deps",
+ "testonly",
+ ])
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += [
+ ":$_executable_bundle_data",
+ ":$_info_plist_bundle_data",
+ ]
+ if (_write_pkg_info) {
+ deps += [ ":$_pkg_info_bundle_data" ]
+ }
+ product_type = _product_type
+ bundle_root_dir = "$root_out_dir/${_output_name}.${_output_extension}"
+ bundle_contents_dir = "$bundle_root_dir/Contents"
+ bundle_resources_dir = "$bundle_contents_dir/Resources"
+ bundle_executable_dir = "$bundle_contents_dir/MacOS"
+ }
+}
+
+# Template to package a loadable_module into a .plugin bundle.
+#
+# This takes no extra arguments that differ from a loadable_module.
+template("mac_plugin_bundle") {
+ assert(defined(invoker.deps),
+ "Dependencies must be specified for $target_name")
+
+ _target_name = target_name
+ _loadable_module_target = _target_name + "_loadable_module"
+ _loadable_module_bundle_data = _loadable_module_target + "_bundle_data"
+
+ _output_name = _target_name
+ if (defined(invoker.output_name)) {
+ _output_name = invoker.output_name
+ }
+
+ loadable_module(_loadable_module_target) {
+ visibility = [ ":$_loadable_module_bundle_data" ]
+ forward_variables_from(invoker,
+ "*",
+ [
+ "assert_no_deps",
+ "data_deps",
+ "output_name",
+ "visibility",
+ ])
+ output_dir = "$target_out_dir"
+ output_name = _output_name
+ }
+
+ bundle_data(_loadable_module_bundle_data) {
+ forward_variables_from(invoker, [ "testonly" ])
+ visibility = [ ":$_target_name" ]
+ sources = [
+ "$target_out_dir/${_output_name}.so",
+ ]
+ outputs = [
+ "{{bundle_executable_dir}}/$_output_name",
+ ]
+ public_deps = [
+ ":$_loadable_module_target",
+ ]
+ }
+
+ create_bundle(_target_name) {
+ forward_variables_from(invoker,
+ [
+ "data_deps",
+ "deps",
+ "public_deps",
+ "testonly",
+ "visibility",
+ ])
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += [ ":$_loadable_module_bundle_data" ]
+
+ bundle_root_dir = "$root_out_dir/$_output_name.plugin"
+ bundle_contents_dir = "$bundle_root_dir/Contents"
+ bundle_executable_dir = "$bundle_contents_dir/MacOS"
+ }
+}
diff --git a/deps/v8/build/config/mac/sdk_info.py b/deps/v8/build/config/mac/sdk_info.py
new file mode 100644
index 0000000000..46dcec870c
--- /dev/null
+++ b/deps/v8/build/config/mac/sdk_info.py
@@ -0,0 +1,97 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import doctest
+import itertools
+import os
+import subprocess
+import sys
+
+# This script prints information about the build system, the operating
+# system and the iOS or Mac SDK (depending on the platform "iphonesimulator",
+# "iphoneos" or "macosx" generally).
+
+def SplitVersion(version):
+ """Splits the Xcode version to 3 values.
+
+ >>> list(SplitVersion('8.2.1.1'))
+ ['8', '2', '1']
+ >>> list(SplitVersion('9.3'))
+ ['9', '3', '0']
+ >>> list(SplitVersion('10.0'))
+ ['10', '0', '0']
+ """
+ version = version.split('.')
+ return itertools.islice(itertools.chain(version, itertools.repeat('0')), 0, 3)
+
+def FormatVersion(version):
+ """Converts Xcode version to a format required for DTXcode in Info.plist
+
+ >>> FormatVersion('8.2.1')
+ '0821'
+ >>> FormatVersion('9.3')
+ '0930'
+ >>> FormatVersion('10.0')
+ '1000'
+ """
+ major, minor, patch = SplitVersion(version)
+ return ('%2s%s%s' % (major, minor, patch)).replace(' ', '0')
+
+def FillXcodeVersion(settings):
+ """Fills the Xcode version and build number into |settings|."""
+ lines = subprocess.check_output(['xcodebuild', '-version']).splitlines()
+ settings['xcode_version'] = FormatVersion(lines[0].split()[-1])
+ settings['xcode_version_int'] = int(settings['xcode_version'], 10)
+ settings['xcode_build'] = lines[-1].split()[-1]
+
+
+def FillMachineOSBuild(settings):
+ """Fills OS build number into |settings|."""
+ settings['machine_os_build'] = subprocess.check_output(
+ ['sw_vers', '-buildVersion']).strip()
+
+
+def FillSDKPathAndVersion(settings, platform, xcode_version):
+ """Fills the SDK path and version for |platform| into |settings|."""
+ settings['sdk_path'] = subprocess.check_output([
+ 'xcrun', '-sdk', platform, '--show-sdk-path']).strip()
+ settings['sdk_version'] = subprocess.check_output([
+ 'xcrun', '-sdk', platform, '--show-sdk-version']).strip()
+ settings['sdk_platform_path'] = subprocess.check_output([
+ 'xcrun', '-sdk', platform, '--show-sdk-platform-path']).strip()
+ # TODO: unconditionally use --show-sdk-build-version once Xcode 7.2 or
+ # higher is required to build Chrome for iOS or OS X.
+ if xcode_version >= '0720':
+ settings['sdk_build'] = subprocess.check_output([
+ 'xcrun', '-sdk', platform, '--show-sdk-build-version']).strip()
+ else:
+ settings['sdk_build'] = settings['sdk_version']
+
+
+if __name__ == '__main__':
+ doctest.testmod()
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--developer_dir", required=False)
+ args, unknownargs = parser.parse_known_args()
+ if args.developer_dir:
+ os.environ['DEVELOPER_DIR'] = args.developer_dir
+
+ if len(unknownargs) != 1:
+ sys.stderr.write(
+ 'usage: %s [iphoneos|iphonesimulator|macosx]\n' %
+ os.path.basename(sys.argv[0]))
+ sys.exit(1)
+
+ settings = {}
+ FillMachineOSBuild(settings)
+ FillXcodeVersion(settings)
+ FillSDKPathAndVersion(settings, unknownargs[0], settings['xcode_version'])
+
+ for key in sorted(settings):
+ value = settings[key]
+ if isinstance(value, str):
+ value = '"%s"' % value
+ print '%s=%s' % (key, value)
diff --git a/deps/v8/build/config/mac/symbols.gni b/deps/v8/build/config/mac/symbols.gni
new file mode 100644
index 0000000000..6166b123d1
--- /dev/null
+++ b/deps/v8/build/config/mac/symbols.gni
@@ -0,0 +1,30 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+
+# This file declares arguments and configs that control whether dSYM debug
+# info is produced and whether build products are stripped.
+
+declare_args() {
+ # Produce dSYM files for targets that are configured to do so. dSYM
+ # generation is controlled globally as it is a linker output (produced via
+ # the //build/toolchain/mac/linker_driver.py. Enabling this will result in
+ # all shared library, loadable module, and executable targets having a dSYM
+ # generated.
+ enable_dsyms = is_official_build || using_sanitizer
+
+ # Strip symbols from linked targets by default. If this is enabled, the
+ # //build/config/mac:strip_all config will be applied to all linked targets.
+ # If custom stripping parameters are required, remove that config from a
+ # linked target and apply custom -Wcrl,strip flags. See
+ # //build/toolchain/mac/linker_driver.py for more information.
+ enable_stripping = is_official_build
+}
+
+# Save unstripped copies of targets with a ".unstripped" suffix. This is
+# useful to preserve the original output when enable_stripping=true but
+# we're not actually generating real dSYMs.
+save_unstripped_output = enable_stripping && !enable_dsyms
diff --git a/deps/v8/build/config/mac/write_pkg_info.py b/deps/v8/build/config/mac/write_pkg_info.py
new file mode 100644
index 0000000000..3e2c3c930e
--- /dev/null
+++ b/deps/v8/build/config/mac/write_pkg_info.py
@@ -0,0 +1,47 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import plist_util
+import sys
+
+# This script creates a PkgInfo file for an OS X .app bundle's plist.
+# Usage: python write_pkg_info.py --plist Foo.app/Contents/Info.plist \
+# --output Foo.app/Contents/PkgInfo
+
+def Main():
+ parser = argparse.ArgumentParser(
+ description='A script to write PkgInfo files for .app bundles.')
+ parser.add_argument('--plist', required=True,
+ help='Path to the Info.plist for the .app.')
+ parser.add_argument('--output', required=True,
+ help='Path to the desired output file.')
+ args = parser.parse_args()
+
+ # Remove the output if it exists already.
+ if os.path.exists(args.output):
+ os.unlink(args.output)
+
+ plist = plist_util.LoadPList(args.plist)
+ package_type = plist['CFBundlePackageType']
+ if package_type != 'APPL':
+ raise ValueError('Expected CFBundlePackageType to be %s, got %s' % \
+ ('AAPL', package_type))
+
+ # The format of PkgInfo is eight characters, representing the bundle type
+ # and bundle signature, each four characters. If that is missing, four
+ # '?' characters are used instead.
+ signature_code = plist.get('CFBundleSignature', '????')
+ if len(signature_code) != 4:
+ raise ValueError('CFBundleSignature should be exactly four characters, ' +
+ 'got %s' % signature_code)
+
+ with open(args.output, 'w') as fp:
+ fp.write('%s%s' % (package_type, signature_code))
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/deps/v8/build/config/mac/xcrun.py b/deps/v8/build/config/mac/xcrun.py
new file mode 100644
index 0000000000..1f8dc203b6
--- /dev/null
+++ b/deps/v8/build/config/mac/xcrun.py
@@ -0,0 +1,28 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import subprocess
+import sys
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ description='A script to execute a command via xcrun.')
+ parser.add_argument('--stamp', action='store', type=str,
+ help='Write a stamp file to this path on success.')
+ parser.add_argument('--developer_dir', required=False,
+ help='Path to Xcode.')
+ args, unknown_args = parser.parse_known_args()
+
+ if args.developer_dir:
+ os.environ['DEVELOPER_DIR'] = args.developer_dir
+
+ rv = subprocess.check_call(['xcrun'] + unknown_args)
+ if rv == 0 and args.stamp:
+ if os.path.exists(args.stamp):
+ os.unlink(args.stamp)
+ open(args.stamp, 'w+').close()
+
+ sys.exit(rv)
diff --git a/deps/v8/build/config/merge_for_jumbo.py b/deps/v8/build/config/merge_for_jumbo.py
new file mode 100755
index 0000000000..6d037a80eb
--- /dev/null
+++ b/deps/v8/build/config/merge_for_jumbo.py
@@ -0,0 +1,145 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This script creates a "jumbo" file which merges all incoming files
+for compiling.
+
+"""
+
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import argparse
+import hashlib
+import io
+import os
+
+def cut_ranges(boundaries):
+ # Given an increasing sequence of boundary indices, generate a sequence of
+ # non-overlapping ranges. The total range is inclusive of the first index
+ # and exclusive of the last index from the given sequence.
+ for start, stop in zip(boundaries, boundaries[1:]):
+ yield range(start, stop)
+
+
+def generate_chunk_stops(inputs, output_count, smart_merge=True):
+ # Note: In the comments below, unique numeric labels are assigned to files.
+ # Consider them as the sorted rank of the hash of each file path.
+ # Simple jumbo chunking generates uniformly sized chunks with the ceiling of:
+ # (output_index + 1) * input_count / output_count
+ input_count = len(inputs)
+ stops = [((i + 1) * input_count + output_count - 1) // output_count
+ for i in range(output_count)]
+ # This is disruptive at times because file insertions and removals can
+ # invalidate many chunks as all files are offset by one.
+ # For example, say we have 12 files in 4 uniformly sized chunks:
+ # 9, 4, 0; 7, 1, 11; 5, 10, 2; 6, 3, 8
+ # If we delete the first file we get:
+ # 4, 0, 7; 1, 11, 5; 10, 2, 6; 3, 8
+ # All of the chunks have new sets of inputs.
+
+ # With path-aware chunking, we start with the uniformly sized chunks:
+ # 9, 4, 0; 7, 1, 11; 5, 10, 2; 6, 3, 8
+ # First we find the smallest rank in each of the chunks. Their indices are
+ # stored in the |centers| list and in this example the ranks would be:
+ # 0, 1, 2, 3
+ # Then we find the largest rank between the centers. Their indices are stored
+ # in the |stops| list and in this example the ranks would be:
+ # 7, 11, 6
+ # These files mark the boundaries between chunks and these boundary files are
+ # often maintained even as files are added or deleted.
+ # In this example, 7, 11, and 6 are the first files in each chunk:
+ # 9, 4, 0; 7, 1; 11, 5, 10, 2; 6, 3, 8
+ # If we delete the first file and repeat the process we get:
+ # 4, 0; 7, 1; 11, 5, 10, 2; 6, 3, 8
+ # Only the first chunk has a new set of inputs.
+ if smart_merge:
+ # Starting with the simple chunks, every file is assigned a rank.
+ # This requires a hash function that is stable across runs.
+ hasher = lambda n: hashlib.md5(inputs[n].encode()).hexdigest()
+ # In each chunk there is a key file with lowest rank; mark them.
+ # Note that they will not easily change.
+ centers = [min(indices, key=hasher) for indices in cut_ranges([0] + stops)]
+ # Between each pair of key files there is a file with highest rank.
+ # Mark these to be used as border files. They also will not easily change.
+ # Forget the inital chunks and create new chunks by splitting the list at
+ # every border file.
+ stops = [max(indices, key=hasher) for indices in cut_ranges(centers)]
+ stops.append(input_count)
+ return stops
+
+
+def write_jumbo_files(inputs, outputs, written_input_set, written_output_set):
+ chunk_stops = generate_chunk_stops(inputs, len(outputs))
+
+ written_inputs = 0
+ for output_index, output_file in enumerate(outputs):
+ written_output_set.add(output_file)
+ if os.path.isfile(output_file):
+ with open(output_file, "r") as current:
+ current_jumbo_file = current.read()
+ else:
+ current_jumbo_file = None
+
+ out = io.StringIO()
+ out.write("/* This is a Jumbo file. Don't edit. */\n\n")
+ out.write("/* Generated with merge_for_jumbo.py. */\n\n")
+ input_limit = chunk_stops[output_index]
+ while written_inputs < input_limit:
+ filename = inputs[written_inputs]
+ written_inputs += 1
+ out.write("#include \"%s\"\n" % filename)
+ written_input_set.add(filename)
+ new_jumbo_file = out.getvalue()
+ out.close()
+
+ if new_jumbo_file != current_jumbo_file:
+ with open(output_file, "w") as out:
+ out.write(new_jumbo_file)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--outputs", nargs="+", required=True,
+ help='List of output files to split input into')
+ parser.add_argument("--file-list", required=True)
+ parser.add_argument("--verbose", action="store_true")
+ args = parser.parse_args()
+
+ lines = []
+ # If written with gn |write_file| each file is on its own line.
+ with open(args.file_list) as file_list_file:
+ lines = [line.strip() for line in file_list_file if line.strip()]
+ # If written with gn |response_file_contents| the files are space separated.
+ all_inputs = []
+ for line in lines:
+ all_inputs.extend(line.split())
+
+ written_output_set = set() # Just for double checking
+ written_input_set = set() # Just for double checking
+ for language_ext in (".cc", ".c", ".mm",):
+ if language_ext == ".cc":
+ ext_pattern = (".cc", ".cpp")
+ else:
+ ext_pattern = tuple([language_ext])
+
+ outputs = [x for x in args.outputs if x.endswith(ext_pattern)]
+ inputs = [x for x in all_inputs if x.endswith(ext_pattern)]
+
+ if not outputs:
+ assert not inputs
+ continue
+
+ write_jumbo_files(inputs, outputs, written_input_set, written_output_set)
+
+ assert set(args.outputs) == written_output_set, "Did not fill all outputs"
+ assert set(all_inputs) == written_input_set, "Did not use all inputs"
+ if args.verbose:
+ print("Generated %s (%d files) based on %s" % (
+ str(args.outputs), len(written_input_set), args.file_list))
+
+if __name__ == "__main__":
+ main()
diff --git a/deps/v8/build/config/mips.gni b/deps/v8/build/config/mips.gni
new file mode 100644
index 0000000000..6365088b14
--- /dev/null
+++ b/deps/v8/build/config/mips.gni
@@ -0,0 +1,67 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/v8_target_cpu.gni")
+
+# These are primarily relevant in current_cpu == "mips*" contexts, where
+# MIPS code is being compiled. But they can also be relevant in the
+# other contexts when the code will change its behavior based on the
+# cpu it wants to generate code for.
+declare_args() {
+ # MIPS MultiMedia Instruction compilation flag.
+ mips_use_mmi = false
+}
+
+if (current_cpu == "mipsel" || v8_current_cpu == "mipsel" ||
+ current_cpu == "mips" || v8_current_cpu == "mips") {
+ declare_args() {
+ # MIPS arch variant. Possible values are:
+ # "r1"
+ # "r2"
+ # "r6"
+ # "loongson3"
+ mips_arch_variant = "r1"
+
+ # MIPS DSP ASE revision. Possible values are:
+ # 0: unavailable
+ # 1: revision 1
+ # 2: revision 2
+ mips_dsp_rev = 0
+
+ # MIPS SIMD Arch compilation flag.
+ mips_use_msa = false
+
+ # MIPS floating-point ABI. Possible values are:
+ # "hard": sets the GCC -mhard-float option.
+ # "soft": sets the GCC -msoft-float option.
+ mips_float_abi = "hard"
+
+ # MIPS32 floating-point register width. Possible values are:
+ # "fp32": sets the GCC -mfp32 option.
+ # "fp64": sets the GCC -mfp64 option.
+ # "fpxx": sets the GCC -mfpxx option.
+ mips_fpu_mode = "fp32"
+ }
+} else if (current_cpu == "mips64el" || v8_current_cpu == "mips64el" ||
+ current_cpu == "mips64" || v8_current_cpu == "mips64") {
+ # MIPS arch variant. Possible values are:
+ # "r2"
+ # "r6"
+ # "loongson3"
+ if (current_os == "android" || target_os == "android") {
+ declare_args() {
+ mips_arch_variant = "r6"
+
+ # MIPS SIMD Arch compilation flag.
+ mips_use_msa = true
+ }
+ } else {
+ declare_args() {
+ mips_arch_variant = "r2"
+
+ # MIPS SIMD Arch compilation flag.
+ mips_use_msa = false
+ }
+ }
+}
diff --git a/deps/v8/build/config/nacl/BUILD.gn b/deps/v8/build/config/nacl/BUILD.gn
new file mode 100644
index 0000000000..d7b22ecf2c
--- /dev/null
+++ b/deps/v8/build/config/nacl/BUILD.gn
@@ -0,0 +1,143 @@
+# Copyright (c) 2014 The Native Client Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/nacl/config.gni")
+
+# Native Client Definitions
+config("nacl_defines") {
+ if (is_linux || is_android || is_nacl) {
+ defines = [
+ "_POSIX_C_SOURCE=199506",
+ "_XOPEN_SOURCE=600",
+ "_GNU_SOURCE=1",
+ "__STDC_LIMIT_MACROS=1",
+ ]
+ } else if (is_win) {
+ defines = [ "__STDC_LIMIT_MACROS=1" ]
+ }
+
+ if (current_cpu == "pnacl" && !is_nacl_nonsfi) {
+ # TODO: Remove the following definition once NACL_BUILD_ARCH and
+ # NACL_BUILD_SUBARCH are defined by the PNaCl toolchain.
+ defines += [ "NACL_BUILD_ARCH=pnacl" ]
+ }
+}
+
+config("nexe_defines") {
+ defines = [
+ "DYNAMIC_ANNOTATIONS_ENABLED=1",
+ "DYNAMIC_ANNOTATIONS_PREFIX=NACL_",
+ ]
+}
+
+config("nacl_warnings") {
+ if (is_win) {
+ # Some NaCl code uses forward declarations of static const variables,
+ # with initialized definitions later on. (The alternative would be
+ # many, many more forward declarations of everything used in that
+ # const variable's initializer before the definition.) The Windows
+ # compiler is too stupid to notice that there is an initializer later
+ # in the file, and warns about the forward declaration.
+ cflags = [ "/wd4132" ]
+ }
+}
+
+# The base target that all targets in the NaCl build should depend on.
+# This allows configs to be modified for everything in the NaCl build, even when
+# the NaCl build is composed into the Chrome build. (GN has no functionality to
+# add flags to everything in //native_client, having a base target works around
+# that limitation.)
+source_set("nacl_base") {
+ public_configs = [
+ ":nacl_defines",
+ ":nacl_warnings",
+ ]
+ if (current_os == "nacl") {
+ public_configs += [ ":nexe_defines" ]
+ }
+}
+
+config("compiler") {
+ configs = []
+ cflags = []
+ ldflags = []
+ libs = []
+
+ if (is_clang && current_cpu != "pnacl") {
+ # -no-integrated-as is the default in nacl-clang for historical
+ # compatibility with inline assembly code and so forth. But there
+ # are no such cases in Chromium code, and -integrated-as is nicer in
+ # general. Moreover, the IRT must be built using LLVM's assembler
+ # on x86-64 to preserve sandbox base address hiding. Use it
+ # everywhere for consistency (and possibly quicker builds).
+ cflags += [ "-integrated-as" ]
+ }
+ if (is_nacl_nonsfi) {
+ cflags += [ "--pnacl-allow-translate" ]
+ ldflags += [
+ "--pnacl-allow-translate",
+ "--pnacl-allow-native",
+ "-Wl,--noirt",
+ "-Wt,--noirt",
+ "-Wt,--noirtshim",
+
+ # The clang driver automatically injects -lpthread when using libc++, but
+ # the toolchain doesn't have it yet. To get around this, use
+ # -nodefaultlibs and make each executable target depend on
+ # "//native_client/src/nonsfi/irt:nacl_sys_private".
+ "-nodefaultlibs",
+ ]
+ libs += [
+ "c++",
+ "m",
+ "c",
+ "pnaclmm",
+ ]
+ include_dirs = [ "//native_client/src/public/linux_syscalls" ]
+ }
+
+ asmflags = cflags
+}
+
+config("compiler_codegen") {
+ cflags = []
+
+ if (is_nacl_irt) {
+ cflags += [
+ # A debugger should be able to unwind IRT call frames. This is
+ # the default behavior on x86-64 and when compiling C++ with
+ # exceptions enabled; the change is for the benefit of x86-32 C.
+ # The frame pointer is unnecessary when unwind tables are used.
+ "-fasynchronous-unwind-tables",
+ "-fomit-frame-pointer",
+ ]
+
+ if (current_cpu == "x86") {
+ # The x86-32 IRT needs to be callable with an under-aligned
+ # stack; so we disable SSE instructions, which can fault on
+ # misaligned addresses. See
+ # https://code.google.com/p/nativeclient/issues/detail?id=3935
+ cflags += [
+ "-mstackrealign",
+ "-mno-sse",
+ ]
+ }
+ }
+
+ asmflags = cflags
+}
+
+config("irt_optimize") {
+ cflags = [
+ # Optimize for space, keep the IRT nexe small.
+ "-Os",
+
+ # These are omitted from non-IRT libraries to keep the libraries
+ # themselves small.
+ "-ffunction-sections",
+ "-fdata-sections",
+ ]
+
+ ldflags = [ "-Wl,--gc-sections" ]
+}
diff --git a/deps/v8/build/config/nacl/config.gni b/deps/v8/build/config/nacl/config.gni
new file mode 100644
index 0000000000..77e15fc51c
--- /dev/null
+++ b/deps/v8/build/config/nacl/config.gni
@@ -0,0 +1,55 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+ # Native Client supports both Newlib and Glibc C libraries where Newlib
+ # is assumed to be the default one; use this to determine whether Glibc
+ # is being used instead.
+ is_nacl_glibc = false
+}
+
+is_nacl_irt = false
+is_nacl_nonsfi = false
+
+nacl_toolchain_dir = "//native_client/toolchain/${host_os}_x86"
+
+if (is_nacl_glibc) {
+ if (current_cpu == "x86" || current_cpu == "x64") {
+ nacl_toolchain_package = "nacl_x86_glibc"
+ } else if (current_cpu == "arm") {
+ nacl_toolchain_package = "nacl_arm_glibc"
+ }
+} else {
+ nacl_toolchain_package = "pnacl_newlib"
+}
+
+if (current_cpu == "pnacl") {
+ _nacl_tuple = "pnacl"
+} else if (current_cpu == "x86" || current_cpu == "x64") {
+ _nacl_tuple = "x86_64-nacl"
+} else if (current_cpu == "arm") {
+ _nacl_tuple = "arm-nacl"
+} else if (current_cpu == "mipsel") {
+ _nacl_tuple = "mipsel-nacl"
+} else {
+ # In order to allow this file to be included unconditionally
+ # from build files that can't depend on //components/nacl/features.gni
+ # we provide a dummy value that should be harmless if nacl isn't needed.
+ # If nacl *is* needed this will result in a real error, indicating that
+ # people need to set the toolchain path correctly.
+ _nacl_tuple = "unknown"
+}
+
+nacl_toolchain_bindir = "${nacl_toolchain_dir}/${nacl_toolchain_package}/bin"
+nacl_toolchain_tooldir =
+ "${nacl_toolchain_dir}/${nacl_toolchain_package}/${_nacl_tuple}"
+nacl_toolprefix = "${nacl_toolchain_bindir}/${_nacl_tuple}-"
+
+nacl_irt_toolchain = "//build/toolchain/nacl:irt_" + target_cpu
+is_nacl_irt = current_toolchain == nacl_irt_toolchain
+
+# Non-SFI mode is a lightweight sandbox used by Chrome OS for running ARC
+# applications.
+nacl_nonsfi_toolchain = "//build/toolchain/nacl:newlib_pnacl_nonsfi"
+is_nacl_nonsfi = current_toolchain == nacl_nonsfi_toolchain
diff --git a/deps/v8/build/config/nacl/rules.gni b/deps/v8/build/config/nacl/rules.gni
new file mode 100644
index 0000000000..9bb4ede89c
--- /dev/null
+++ b/deps/v8/build/config/nacl/rules.gni
@@ -0,0 +1,188 @@
+# Copyright 2015 The Native Client Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/nacl/config.gni")
+
+# Generate a nmf file
+#
+# Native Client Manifest (nmf) is a JSON file that tells the browser where to
+# download and load Native Client application files and libraries.
+#
+# Variables:
+# executables: .nexe/.pexe/.bc executables to generate nmf for
+# lib_prefix: path to prepend to shared libraries in the nmf
+# nmf: the name and the path of the output file
+# nmfflags: additional flags for the nmf generator
+# stage_dependencies: directory for staging libraries
+template("generate_nmf") {
+ assert(defined(invoker.executables), "Must define executables")
+ assert(defined(invoker.nmf), "Must define nmf")
+
+ action(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "data_deps",
+ "executables",
+ "lib_prefix",
+ "nmf",
+ "nmfflags",
+ "public_deps",
+ "stage_dependencies",
+ "testonly",
+ "visibility",
+ ])
+ if (!defined(nmfflags)) {
+ nmfflags = []
+ }
+
+ # TODO(phosek): Remove this conditional once
+ # https://bugs.chromium.org/p/nativeclient/issues/detail?id=4339 is
+ # resolved.
+ if (current_cpu == "pnacl") {
+ objdump = rebase_path("${nacl_toolchain_bindir}/x86_64-nacl-objdump")
+ } else {
+ objdump = rebase_path("${nacl_toolprefix}objdump")
+ }
+ if (host_os == "win") {
+ objdump += ".exe"
+ }
+
+ script = "//native_client_sdk/src/tools/create_nmf.py"
+ inputs = [
+ objdump,
+ ]
+ sources = executables
+ outputs = [
+ nmf,
+ ]
+ if (is_nacl_glibc) {
+ if (defined(stage_dependencies)) {
+ nmfflags += [ "--stage-dependencies=" +
+ rebase_path(stage_dependencies, root_build_dir) ]
+ lib_path = stage_dependencies
+ } else {
+ lib_path = root_build_dir
+ }
+ if (defined(lib_prefix)) {
+ nmfflags += [ "--lib-prefix=" + lib_prefix ]
+ lib_path += "/${lib_prefix}"
+ }
+
+ # Starts empty so the code below can use += everywhere.
+ data = []
+
+ nmfflags +=
+ [ "--library-path=" + rebase_path(root_out_dir, root_build_dir) ]
+
+ # NOTE: There is no explicit dependency for the lib directory
+ # (lib32 and lib64 for x86/x64) created in the product directory.
+ # They are created as a side-effect of nmf creation.
+ if (current_cpu != "x86" && current_cpu != "x64") {
+ nmfflags +=
+ [ "--library-path=" +
+ rebase_path("${nacl_toolchain_tooldir}/lib", root_build_dir) ]
+ if (current_cpu == "arm") {
+ data += [ "${lib_path}/libarm/" ]
+ } else {
+ data += [ "${lib_path}/lib/" ]
+ }
+ } else {
+ # For x86-32, the lib/ directory is called lib32/ instead.
+ if (current_cpu == "x86") {
+ nmfflags +=
+ [ "--library-path=" +
+ rebase_path("${nacl_toolchain_tooldir}/lib32", root_build_dir) ]
+ data += [ "${lib_path}/lib32/" ]
+ }
+
+ # x86-32 Windows needs to build both x86-32 and x86-64 NaCl
+ # binaries into the same nmf covering both architectures. That
+ # gets handled at a higher level (see the nacl_test_data template),
+ # so a single generate_nmf invocation gets both x86-32 and x86-64
+ # nexes listed in executables.
+ if (current_cpu == "x64" || target_os == "win") {
+ # For x86-64, the lib/ directory is called lib64/ instead
+ # when copied by create_nmf.py.
+ glibc_tc = "//build/toolchain/nacl:glibc"
+ assert(current_toolchain == "${glibc_tc}_${current_cpu}")
+ if (current_cpu == "x64") {
+ x64_out_dir = root_out_dir
+ } else {
+ x64_out_dir = get_label_info(":${target_name}(${glibc_tc}_x64)",
+ "root_out_dir")
+ }
+ nmfflags += [
+ "--library-path=" + rebase_path(x64_out_dir, root_build_dir),
+ "--library-path=" +
+ rebase_path("${nacl_toolchain_tooldir}/lib", root_build_dir),
+ ]
+ data += [ "${lib_path}/lib64/" ]
+ }
+ }
+ }
+ args = [
+ "--no-default-libpath",
+ "--objdump=" + rebase_path(objdump, root_build_dir),
+ "--output=" + rebase_path(nmf, root_build_dir),
+ ] + nmfflags + rebase_path(sources, root_build_dir)
+ if (is_nacl_glibc && current_cpu == "arm") {
+ deps += [ "//native_client/src/untrusted/elf_loader:elf_loader" ]
+ }
+ }
+}
+
+# Generate a nmf file for Non-SFI tests
+#
+# Non-SFI tests use a different manifest format from regular Native Client and
+# as such requires a different generator.
+#
+# Variables:
+# executable: Non-SFI .nexe executable to generate nmf for
+# nmf: the name and the path of the output file
+# nmfflags: additional flags for the nmf generator
+template("generate_nonsfi_test_nmf") {
+ assert(defined(invoker.executable), "Must define executable")
+ assert(defined(invoker.nmf), "Must define nmf")
+
+ action(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "data_deps",
+ "executable",
+ "nmf",
+ "testonly",
+ "public_deps",
+ "visibility",
+ ])
+
+ script = "//ppapi/tests/create_nonsfi_test_nmf.py"
+ sources = [
+ executable,
+ ]
+ outputs = [
+ nmf,
+ ]
+
+ # NOTE: We use target_cpu rather than current_cpu on purpose because
+ # current_cpu is always going to be pnacl for Non-SFI, but the Non-SFI
+ # .nexe executable is always translated to run on the target machine.
+ if (target_cpu == "x86") {
+ arch = "x86-32"
+ } else if (target_cpu == "x64") {
+ arch = "x86-64"
+ } else {
+ arch = target_cpu
+ }
+ args = [
+ "--program=" + rebase_path(executable, root_build_dir),
+ "--arch=${arch}",
+ "--output=" + rebase_path(nmf, root_build_dir),
+ ]
+ if (defined(invoker.nmfflags)) {
+ args += invoker.nmfflags
+ }
+ }
+}
diff --git a/deps/v8/build/config/pch.gni b/deps/v8/build/config/pch.gni
new file mode 100644
index 0000000000..93bd2fedc3
--- /dev/null
+++ b/deps/v8/build/config/pch.gni
@@ -0,0 +1,12 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/goma.gni")
+
+declare_args() {
+ # Precompiled header file support is by default available,
+ # but for distributed build system uses (like goma) or when
+ # doing official builds.
+ enable_precompiled_headers = !is_official_build && !use_goma
+}
diff --git a/deps/v8/build/config/posix/BUILD.gn b/deps/v8/build/config/posix/BUILD.gn
new file mode 100644
index 0000000000..ee42d421b2
--- /dev/null
+++ b/deps/v8/build/config/posix/BUILD.gn
@@ -0,0 +1,75 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+
+# This build configuration is used by both Fuchsia and POSIX systems.
+assert(is_posix || is_fuchsia)
+
+group("posix") {
+ visibility = [ "//:optimize_gn_gen" ]
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Posix-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+ asmflags = []
+ cflags = []
+ cflags_c = []
+ cflags_cc = []
+ cflags_objc = []
+ cflags_objcc = []
+ defines = []
+ ldflags = []
+
+ if (!is_mac && !is_ios && sysroot != "") {
+ # Pass the sysroot to all C compiler variants, the assembler, and linker.
+ sysroot_flags = [ "--sysroot=" + rebase_path(sysroot, root_build_dir) ]
+ if (is_linux) {
+ # This is here so that all files get recompiled after a sysroot roll and
+ # when turning the sysroot on or off. (defines are passed via the command
+ # line, and build system rebuilds things when their commandline
+ # changes). Nothing should ever read this define.
+ sysroot_hash =
+ exec_script("//build/linux/sysroot_scripts/install-sysroot.py",
+ [ "--print-hash=$current_cpu" ],
+ "trim string",
+ [ "//build/linux/sysroot_scripts/sysroots.json" ])
+ defines += [ "CR_SYSROOT_HASH=$sysroot_hash" ]
+ }
+ asmflags += sysroot_flags
+
+ link_sysroot_flags =
+ [ "--sysroot=" + rebase_path(link_sysroot, root_build_dir) ]
+ ldflags += link_sysroot_flags
+
+ # When use_custom_libcxx=true, some -isystem flags get passed to
+ # cflags_cc to set up libc++ include paths. We want to make sure
+ # the sysroot includes take lower precendence than the libc++
+ # ones, so they must appear later in the command line. However,
+ # the gn reference states "These variant-specific versions of
+ # cflags* will be appended on the compiler command line after
+ # 'cflags'." Because of this, we must set the sysroot flags for
+ # all cflags variants instead of using 'cflags' directly.
+ cflags_c += sysroot_flags
+ cflags_cc += sysroot_flags
+ cflags_objc += sysroot_flags
+ cflags_objcc += sysroot_flags
+
+ # Need to get some linker flags out of the sysroot.
+ ld_paths = exec_script("sysroot_ld_path.py",
+ [
+ rebase_path("//build/linux/sysroot_ld_path.sh",
+ root_build_dir),
+ rebase_path(link_sysroot),
+ ],
+ "list lines")
+ foreach(ld_path, ld_paths) {
+ ld_path = rebase_path(ld_path, root_build_dir)
+ ldflags += [ "-L" + ld_path ]
+ }
+ }
+}
diff --git a/deps/v8/build/config/posix/sysroot_ld_path.py b/deps/v8/build/config/posix/sysroot_ld_path.py
new file mode 100644
index 0000000000..7056207a23
--- /dev/null
+++ b/deps/v8/build/config/posix/sysroot_ld_path.py
@@ -0,0 +1,21 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file takes two arguments, the relative location of the shell script that
+# does the checking, and the name of the sysroot.
+
+# TODO(brettw) the build/linux/sysroot_ld_path.sh script should be rewritten in
+# Python in this file.
+
+import subprocess
+import sys
+
+if len(sys.argv) != 3:
+ print "Need two arguments"
+ sys.exit(1)
+
+result = subprocess.check_output([sys.argv[1], sys.argv[2]]).strip()
+result = result.replace(" ", "\n")
+if result != "":
+ print result
diff --git a/deps/v8/build/config/python.gni b/deps/v8/build/config/python.gni
new file mode 100644
index 0000000000..81a1690076
--- /dev/null
+++ b/deps/v8/build/config/python.gni
@@ -0,0 +1,165 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Creates a group() that lists Python sources as |data|.
+# Having such targets serves two purposes:
+# 1) Causes files to be included in runtime_deps, so that they are uploaded to
+# swarming when running tests remotely.
+# 2) Causes "gn analyze" to know about all Python inputs so that tests will be
+# re-run when relevant Python files change.
+#
+# All non-trivial Python scripts should use a "pydeps" file to track their
+# sources. To create a .pydep file for a target in //example:
+#
+# build/print_python_deps.py \
+# --root example \
+# --output example/$target_name.pydeps \
+# path/to/your/script.py
+#
+# Keep the .pydep file up-to-date by adding to //PRESUBMIT.py under one of:
+# _ANDROID_SPECIFIC_PYDEPS_FILES, _GENERIC_PYDEPS_FILES
+#
+# Variables
+# pydeps_file: Path to .pydeps file to read sources from (optional).
+# data: Additional files to include in data. E.g. non-.py files needed by the
+# library, or .py files that are conditionally / lazily imported.
+#
+# Example
+# python_library("my_library_py") {
+# pydeps_file = "my_library.pydeps"
+# data = [ "foo.dat" ]
+# }
+template("python_library") {
+ group(target_name) {
+ forward_variables_from(invoker,
+ [
+ "data_deps",
+ "deps",
+ "testonly",
+ "visibility",
+ ])
+
+ if (defined(invoker.pydeps_file)) {
+ _py_files = read_file(invoker.pydeps_file, "list lines")
+
+ # Filter out comments.
+ set_sources_assignment_filter([ "#*" ])
+ sources = _py_files
+
+ # Even though the .pydep file is not used at runtime, it must be added
+ # so that "gn analyze" will mark the target as changed when .py files
+ # are removed but none are added or modified.
+ data = sources + [ invoker.pydeps_file ]
+ } else {
+ data = []
+ }
+ if (defined(invoker.data)) {
+ data += invoker.data
+ }
+ }
+}
+
+# A template used for actions that execute a Python script, which has an
+# associated .pydeps file. In other words:
+#
+# - This is very similar to just an action(), except that |script| must point
+# to a Python script (e.g. "//build/.../foo.py") that has a corresponding
+# .pydeps file in the source tree (e.g. "//build/.../foo.pydeps").
+#
+# - The .pydeps file contains a list of python dependencies (imports really)
+# and is generated _manually_ by using a command like:
+#
+# build/print_python_deps.py --inplace build/android/gyp/foo.py
+#
+# Example
+# action_with_pydeps("create_foo") {
+# script = "myscript.py"
+# args = [...]
+# }
+template("action_with_pydeps") {
+ # Read the .pydeps file now. Note that this is done everytime this
+ # template is called, but benchmarking doesn't show any impact on overall
+ # 'gn gen' speed anyway.
+ _pydeps_file = invoker.script + "deps"
+ _pydeps_raw = read_file(_pydeps_file, "list lines")
+
+ # Filter out comments.
+ set_sources_assignment_filter([ "#*" ])
+ sources = _pydeps_raw
+ _pydeps = sources
+ sources = []
+
+ action(target_name) {
+ # Forward all variables. Ensure that testonly and visibility are forwarded
+ # explicitly, since this performs recursive scope lookups, which is
+ # required to ensure their definition from scopes above the caller are
+ # properly handled. All other variables are forwarded with "*", which
+ # doesn't perform recursive lookups at all. See https://crbug.com/862232
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+ ])
+ forward_variables_from(invoker,
+ "*",
+ [
+ "testonly",
+ "visibility",
+ ])
+
+ if (!defined(inputs)) {
+ inputs = []
+ }
+
+ # Dependencies are listed relative to the script directory, but inputs
+ # expects paths that are relative to the current BUILD.gn
+ _script_dir = get_path_info(_pydeps_file, "dir")
+ inputs += rebase_path(_pydeps, ".", _script_dir)
+ }
+}
+
+template("action_foreach_with_pydeps") {
+ _pydeps_file = invoker.script + "deps"
+ _pydeps_raw = read_file(_pydeps_file, "list lines")
+
+ # Filter out comments.
+ # This is a bit convoluted to preserve the value of sources if defined.
+ _old_sources = []
+ if (defined(sources)) {
+ _old_sources = sources
+ }
+ set_sources_assignment_filter([ "#*" ])
+ sources = _pydeps_raw
+ _pydeps = sources
+ set_sources_assignment_filter([])
+ sources = _old_sources
+
+ action_foreach(target_name) {
+ # Forward all variables. Ensure that testonly and visibility are forwarded
+ # explicitly, since this performs recursive scope lookups, which is
+ # required to ensure their definition from scopes above the caller are
+ # properly handled. All other variables are forwarded with "*", which
+ # doesn't perform recursive lookups at all. See https://crbug.com/862232
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+ ])
+ forward_variables_from(invoker,
+ "*",
+ [
+ "testonly",
+ "visibility",
+ ])
+
+ if (!defined(inputs)) {
+ inputs = []
+ }
+
+ # Dependencies are listed relative to the script directory, but inputs
+ # expects paths that are relative to the current BUILD.gn
+ _script_dir = get_path_info(script, "dir")
+ inputs += rebase_path(_pydeps, ".", _script_dir)
+ }
+}
diff --git a/deps/v8/build/config/sanitizers/BUILD.gn b/deps/v8/build/config/sanitizers/BUILD.gn
new file mode 100644
index 0000000000..fd54262251
--- /dev/null
+++ b/deps/v8/build/config/sanitizers/BUILD.gn
@@ -0,0 +1,650 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/toolchain/toolchain.gni")
+import("//build_overrides/build.gni")
+
+if (is_ios) {
+ import("//build/config/ios/ios_sdk.gni")
+}
+
+# Contains the dependencies needed for sanitizers to link into executables and
+# shared_libraries.
+group("deps") {
+ if (using_sanitizer) {
+ public_configs = [
+ ":sanitizer_options_link_helper",
+
+ # Even when a target removes default_sanitizer_flags, it may be depending
+ # on a library that did not remove default_sanitizer_flags. Thus, we need
+ # to add the ldflags here as well as in default_sanitizer_flags.
+ ":default_sanitizer_ldflags",
+ ]
+ deps = [
+ ":options_sources",
+ ]
+ if (is_win) {
+ exe = ".exe"
+ } else {
+ exe = ""
+ }
+ data = [
+ "//tools/valgrind/asan/",
+ "$clang_base_path/bin/llvm-symbolizer${exe}",
+ ]
+ if (use_prebuilt_instrumented_libraries ||
+ use_locally_built_instrumented_libraries) {
+ deps += [ "//third_party/instrumented_libraries:deps" ]
+ }
+
+ # ASAN is supported on iOS but the runtime library depends on the compiler
+ # used (Chromium version of clang versus Xcode version of clang). Only copy
+ # the ASAN runtime on iOS if building with Chromium clang.
+ if (is_win || is_mac || (is_ios && !use_xcode_clang)) {
+ data_deps = [
+ ":copy_asan_runtime",
+ ]
+ }
+ if (is_mac || (is_ios && !use_xcode_clang)) {
+ public_deps = [
+ ":asan_runtime_bundle_data",
+ ]
+ }
+ }
+}
+
+if ((is_mac || is_win || (is_ios && !use_xcode_clang)) && using_sanitizer) {
+ if (is_mac) {
+ _clang_rt_dso_path = "darwin/libclang_rt.asan_osx_dynamic.dylib"
+ } else if (is_ios) {
+ _clang_rt_dso_path = "darwin/libclang_rt.asan_iossim_dynamic.dylib"
+ } else if (is_win && target_cpu == "x86") {
+ _clang_rt_dso_path = "windows/clang_rt.asan_dynamic-i386.dll"
+ } else if (is_win && target_cpu == "x64") {
+ _clang_rt_dso_path = "windows/clang_rt.asan_dynamic-x86_64.dll"
+ }
+
+ _clang_rt_dso_full_path =
+ "$clang_base_path/lib/clang/$clang_version/lib/$_clang_rt_dso_path"
+
+ if (!is_ios) {
+ copy("copy_asan_runtime") {
+ sources = [
+ _clang_rt_dso_full_path,
+ ]
+ outputs = [
+ "$root_out_dir/{{source_file_part}}",
+ ]
+ }
+ } else {
+ # On iOS, the runtime library need to be code signed (adhoc signature)
+ # starting with Xcode 8, so use an action instead of a copy on iOS.
+ action("copy_asan_runtime") {
+ script = "//build/config/ios/codesign.py"
+ sources = [
+ _clang_rt_dso_full_path,
+ ]
+ outputs = [
+ "$root_out_dir/" + get_path_info(sources[0], "file"),
+ ]
+ args = [
+ "code-sign-file",
+ "--identity=" + ios_code_signing_identity,
+ "--output=" + rebase_path(outputs[0], root_build_dir),
+ rebase_path(sources[0], root_build_dir),
+ ]
+ }
+ }
+
+ if (is_mac || is_ios) {
+ bundle_data("asan_runtime_bundle_data") {
+ sources = get_target_outputs(":copy_asan_runtime")
+ outputs = [
+ "{{bundle_executable_dir}}/{{source_file_part}}",
+ ]
+ public_deps = [
+ ":copy_asan_runtime",
+ ]
+ }
+ }
+}
+
+config("sanitizer_options_link_helper") {
+ if (is_mac || is_ios) {
+ ldflags = [ "-Wl,-U,_sanitizer_options_link_helper" ]
+ } else if (!is_win) {
+ ldflags = [ "-Wl,-u_sanitizer_options_link_helper" ]
+ }
+}
+
+static_library("options_sources") {
+ # This is a static_library instead of a source_set, as it shouldn't be
+ # unconditionally linked into targets.
+ visibility = [
+ ":deps",
+ "//:gn_visibility",
+ ]
+ sources = [
+ "//build/sanitizers/sanitizer_options.cc",
+ ]
+
+ # Don't compile this target with any sanitizer code. It can be called from
+ # the sanitizer runtimes, so instrumenting these functions could cause
+ # recursive calls into the runtime if there is an error.
+ configs -= [ "//build/config/sanitizers:default_sanitizer_flags" ]
+
+ if (is_asan) {
+ if (!defined(asan_suppressions_file)) {
+ asan_suppressions_file = "//build/sanitizers/asan_suppressions.cc"
+ }
+ sources += [ asan_suppressions_file ]
+ }
+
+ if (is_lsan) {
+ if (!defined(lsan_suppressions_file)) {
+ lsan_suppressions_file = "//build/sanitizers/lsan_suppressions.cc"
+ }
+ sources += [ lsan_suppressions_file ]
+ }
+
+ if (is_tsan) {
+ if (!defined(tsan_suppressions_file)) {
+ tsan_suppressions_file = "//build/sanitizers/tsan_suppressions.cc"
+ }
+ sources += [ tsan_suppressions_file ]
+ }
+}
+
+# Applies linker flags necessary when either :deps or :default_sanitizer_flags
+# are used.
+config("default_sanitizer_ldflags") {
+ visibility = [
+ ":default_sanitizer_flags",
+ ":deps",
+ ]
+
+ if (is_posix || is_fuchsia) {
+ ldflags = []
+ if (is_asan) {
+ ldflags += [ "-fsanitize=address" ]
+ if (is_mac) {
+ # https://crbug.com/708707
+ ldflags += [ "-fno-sanitize-address-use-after-scope" ]
+ } else {
+ ldflags += [ "-fsanitize-address-use-after-scope" ]
+ }
+ }
+ if (is_hwasan) {
+ ldflags += [ "-fsanitize=hwaddress" ]
+ }
+ if (is_lsan) {
+ ldflags += [ "-fsanitize=leak" ]
+ }
+ if (is_tsan) {
+ ldflags += [ "-fsanitize=thread" ]
+ }
+ if (is_msan) {
+ ldflags += [ "-fsanitize=memory" ]
+ }
+ if (is_ubsan || is_ubsan_security) {
+ ldflags += [ "-fsanitize=undefined" ]
+ }
+ if (is_ubsan_null) {
+ ldflags += [ "-fsanitize=null" ]
+ }
+ if (is_ubsan_vptr) {
+ ldflags += [ "-fsanitize=vptr" ]
+ }
+
+ if (use_sanitizer_coverage) {
+ if (use_libfuzzer) {
+ ldflags += [ "-fsanitize=fuzzer-no-link" ]
+ if (is_mac) {
+ # TODO(crbug.com/926588): on macOS, dead code stripping does not work
+ # well with `pc-table` instrumentation enabled by `fuzzer-no-link`.
+ ldflags += [ "-fno-sanitize-coverage=pc-table" ]
+ }
+ } else {
+ ldflags += [ "-fsanitize-coverage=$sanitizer_coverage_flags" ]
+ }
+ }
+
+ if (is_cfi && current_toolchain == default_toolchain) {
+ ldflags += [ "-fsanitize=cfi-vcall" ]
+ if (use_cfi_cast) {
+ ldflags += [
+ "-fsanitize=cfi-derived-cast",
+ "-fsanitize=cfi-unrelated-cast",
+ ]
+ }
+ if (use_cfi_icall) {
+ ldflags += [ "-fsanitize=cfi-icall" ]
+ }
+ if (use_cfi_diag) {
+ ldflags += [ "-fno-sanitize-trap=cfi" ]
+ if (use_cfi_recover) {
+ ldflags += [ "-fsanitize-recover=cfi" ]
+ }
+ }
+ }
+ } else if (is_win) {
+ # Windows directly calls link.exe instead of the compiler driver when
+ # linking. Hence, pass the runtime libraries instead of -fsanitize=address
+ # or -fsanitize=fuzzer.
+ if (is_asan && is_component_build) {
+ # In the static-library build, ASan libraries are different for
+ # executables and dlls, see link_executable and link_shared_library below.
+ # This here handles only the component build.
+ if (target_cpu == "x64") {
+ # Windows 64-bit.
+ libs = [
+ "clang_rt.asan_dynamic-x86_64.lib",
+ "clang_rt.asan_dynamic_runtime_thunk-x86_64.lib",
+ ]
+ } else {
+ assert(target_cpu == "x86", "WinASan unsupported architecture")
+ libs = [
+ "clang_rt.asan_dynamic-i386.lib",
+ "clang_rt.asan_dynamic_runtime_thunk-i386.lib",
+ ]
+ }
+ }
+ if (use_libfuzzer) {
+ assert(target_cpu == "x64", "LibFuzzer unsupported architecture")
+ assert(!is_component_build,
+ "LibFuzzer only supports non-component builds on Windows")
+
+ # Incremental linking causes padding that messes up SanitizerCoverage.
+ # Don't do it.
+ ldflags = [ "/INCREMENTAL:NO" ]
+ }
+ }
+}
+
+config("common_sanitizer_flags") {
+ cflags = []
+
+ if (using_sanitizer) {
+ assert(is_clang, "sanitizers only supported with clang")
+ assert(!is_official_build, "sanitizers not supported in official builds")
+
+ cflags += [
+ # Column info in debug data confuses Visual Studio's debugger, so don't
+ # use this by default. However, clusterfuzz needs it for good
+ # attribution of reports to CLs, so turn it on there.
+ "-gcolumn-info",
+ ]
+
+ # Frame pointers are controlled in //build/config/compiler:default_stack_frames
+ }
+}
+
+config("asan_flags") {
+ cflags = []
+ if (is_asan) {
+ cflags += [ "-fsanitize=address" ]
+ if (!is_mac) {
+ cflags += [ "-fsanitize-address-use-after-scope" ]
+ } else {
+ # https://crbug.com/708707
+ cflags += [ "-fno-sanitize-address-use-after-scope" ]
+ }
+ if (!asan_globals) {
+ cflags += [
+ "-mllvm",
+ "-asan-globals=0",
+ ]
+ }
+ if (asan_use_blacklist) {
+ if (is_win) {
+ if (!defined(asan_win_blacklist_path)) {
+ asan_win_blacklist_path =
+ rebase_path("//tools/memory/asan/blacklist_win.txt",
+ root_build_dir)
+ }
+ cflags += [ "-fsanitize-blacklist=$asan_win_blacklist_path" ]
+ } else {
+ # TODO(rnk): Remove this as discussed in http://crbug.com/427202.
+ if (!defined(asan_blacklist_path)) {
+ asan_blacklist_path =
+ rebase_path("//tools/memory/asan/blacklist.txt", root_build_dir)
+ }
+ cflags += [ "-fsanitize-blacklist=$asan_blacklist_path" ]
+ }
+ }
+ }
+}
+
+config("link_executable") {
+ if (is_asan && is_win && !is_component_build) {
+ if (target_cpu == "x64") {
+ ldflags = [ "-wholearchive:clang_rt.asan-x86_64.lib" ]
+ } else {
+ assert(target_cpu == "x86", "WinASan unsupported architecture")
+ ldflags = [ "-wholearchive:clang_rt.asan-i386.lib" ]
+ }
+ }
+}
+
+config("link_shared_library") {
+ if (is_asan && is_win && !is_component_build) {
+ if (target_cpu == "x64") {
+ libs = [ "clang_rt.asan_dll_thunk-x86_64.lib" ]
+ } else {
+ assert(target_cpu == "x86", "WinASan unsupported architecture")
+ libs = [ "clang_rt.asan_dll_thunk-i386.lib" ]
+ }
+ }
+}
+
+config("cfi_flags") {
+ cflags = []
+ if (is_cfi && current_toolchain == default_toolchain) {
+ if (!defined(cfi_blacklist_path)) {
+ cfi_blacklist_path =
+ rebase_path("//tools/cfi/blacklist.txt", root_build_dir)
+ }
+ cflags += [
+ "-fsanitize=cfi-vcall",
+ "-fsanitize-blacklist=$cfi_blacklist_path",
+ ]
+
+ if (use_cfi_cast) {
+ cflags += [
+ "-fsanitize=cfi-derived-cast",
+ "-fsanitize=cfi-unrelated-cast",
+ ]
+ }
+
+ if (use_cfi_icall) {
+ cflags += [ "-fsanitize=cfi-icall" ]
+ }
+
+ if (use_cfi_diag) {
+ cflags += [ "-fno-sanitize-trap=cfi" ]
+ if (is_win) {
+ cflags += [
+ "/Oy-",
+ "/Ob0",
+ ]
+ } else {
+ cflags += [
+ "-fno-inline-functions",
+ "-fno-inline",
+ "-fno-omit-frame-pointer",
+ "-O1",
+ ]
+ }
+ if (use_cfi_recover) {
+ cflags += [ "-fsanitize-recover=cfi" ]
+ }
+ }
+ }
+}
+
+# crbug.com/785442: Fix cfi-icall failures for code that casts pointer argument
+# types in function pointer type signatures.
+config("cfi_icall_generalize_pointers") {
+ if (is_clang && is_cfi && use_cfi_icall) {
+ cflags = [ "-fsanitize-cfi-icall-generalize-pointers" ]
+ }
+}
+
+config("cfi_icall_disable") {
+ if (is_clang && is_cfi && use_cfi_icall) {
+ cflags = [ "-fno-sanitize=cfi-icall" ]
+ }
+}
+
+config("coverage_flags") {
+ cflags = []
+ if (use_sanitizer_coverage) {
+ # Used by sandboxing code to allow coverage dump to be written on the disk.
+ defines = [ "SANITIZER_COVERAGE" ]
+
+ if (use_libfuzzer) {
+ cflags += [ "-fsanitize=fuzzer-no-link" ]
+ if (is_mac) {
+ # TODO(crbug.com/926588): on macOS, dead code stripping does not work
+ # well with `pc-table` instrumentation enabled by `fuzzer-no-link`.
+ cflags += [ "-fno-sanitize-coverage=pc-table" ]
+ }
+ } else {
+ cflags += [
+ "-fsanitize-coverage=$sanitizer_coverage_flags",
+ "-mllvm",
+ "-sanitizer-coverage-prune-blocks=1",
+ ]
+ if (current_cpu == "arm") {
+ # http://crbug.com/517105
+ cflags += [
+ "-mllvm",
+ "-sanitizer-coverage-block-threshold=0",
+ ]
+ }
+ }
+ }
+}
+
+config("hwasan_flags") {
+ if (is_hwasan) {
+ cflags = [ "-fsanitize=hwaddress" ]
+ }
+}
+
+config("lsan_flags") {
+ if (is_lsan) {
+ cflags = [ "-fsanitize=leak" ]
+ }
+}
+
+config("msan_flags") {
+ if (is_msan) {
+ assert(is_linux, "msan only supported on linux x86_64")
+ if (!defined(msan_blacklist_path)) {
+ msan_blacklist_path =
+ rebase_path("//tools/msan/blacklist.txt", root_build_dir)
+ }
+ cflags = [
+ "-fsanitize=memory",
+ "-fsanitize-memory-track-origins=$msan_track_origins",
+ "-fsanitize-blacklist=$msan_blacklist_path",
+ ]
+ }
+}
+
+config("tsan_flags") {
+ if (is_tsan) {
+ assert(is_linux, "tsan only supported on linux x86_64")
+ if (!defined(tsan_blacklist_path)) {
+ tsan_blacklist_path =
+ rebase_path("//tools/memory/tsan_v2/ignores.txt", root_build_dir)
+ }
+ cflags = [
+ "-fsanitize=thread",
+ "-fsanitize-blacklist=$tsan_blacklist_path",
+ ]
+ }
+}
+
+config("ubsan_flags") {
+ cflags = []
+ if (is_ubsan) {
+ if (!defined(ubsan_blacklist_path)) {
+ ubsan_blacklist_path =
+ rebase_path("//tools/ubsan/blacklist.txt", root_build_dir)
+ }
+ cflags += [
+ # Yasm dies with an "Illegal instruction" error when bounds checking is
+ # enabled. See http://crbug.com/489901
+ # "-fsanitize=bounds",
+ "-fsanitize=float-divide-by-zero",
+ "-fsanitize=integer-divide-by-zero",
+ "-fsanitize=null",
+ "-fsanitize=object-size",
+ "-fsanitize=pointer-overflow",
+ "-fsanitize=return",
+ "-fsanitize=returns-nonnull-attribute",
+ "-fsanitize=shift-exponent",
+ "-fsanitize=signed-integer-overflow",
+ "-fsanitize=unreachable",
+ "-fsanitize=vla-bound",
+ "-fsanitize-blacklist=$ubsan_blacklist_path",
+ ]
+
+ # Chromecast ubsan builds fail to compile with these
+ # experimental flags, so only add them to non-chromecast ubsan builds.
+ if (!is_chromecast) {
+ cflags += [
+ # Employ the experimental PBQP register allocator to avoid slow
+ # compilation on files with too many basic blocks.
+ # See http://crbug.com/426271.
+ "-mllvm",
+ "-regalloc=pbqp",
+
+ # Speculatively use coalescing to slightly improve the code generated
+ # by PBQP regallocator. May increase compile time.
+ "-mllvm",
+ "-pbqp-coalescing",
+ ]
+ }
+ }
+}
+
+config("ubsan_no_recover") {
+ if (is_ubsan_no_recover) {
+ cflags = [ "-fno-sanitize-recover=undefined" ]
+ }
+}
+
+config("ubsan_security_flags") {
+ if (is_ubsan_security) {
+ if (!defined(ubsan_security_blacklist_path)) {
+ ubsan_security_blacklist_path =
+ rebase_path("//tools/ubsan/security_blacklist.txt", root_build_dir)
+ }
+ cflags = [
+ "-fsanitize=function",
+ "-fsanitize=pointer-overflow",
+ "-fsanitize=shift",
+ "-fsanitize=signed-integer-overflow",
+ "-fsanitize=vla-bound",
+ "-fsanitize=vptr",
+ "-fsanitize-blacklist=$ubsan_security_blacklist_path",
+ ]
+ }
+}
+
+config("ubsan_null_flags") {
+ if (is_ubsan_null) {
+ cflags = [ "-fsanitize=null" ]
+ }
+}
+
+config("ubsan_vptr_flags") {
+ if (is_ubsan_vptr) {
+ if (!defined(ubsan_vptr_blacklist_path)) {
+ ubsan_vptr_blacklist_path =
+ rebase_path("//tools/ubsan/vptr_blacklist.txt", root_build_dir)
+ }
+ cflags = [
+ "-fsanitize=vptr",
+ "-fsanitize-blacklist=$ubsan_vptr_blacklist_path",
+ ]
+ }
+}
+
+config("fuzzing_build_mode") {
+ if (use_fuzzing_engine && optimize_for_fuzzing) {
+ defines = [ "FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION" ]
+ }
+}
+
+all_sanitizer_configs = [
+ ":common_sanitizer_flags",
+ ":coverage_flags",
+ ":default_sanitizer_ldflags",
+ ":asan_flags",
+ ":cfi_flags",
+ ":hwasan_flags",
+ ":lsan_flags",
+ ":msan_flags",
+ ":tsan_flags",
+ ":ubsan_flags",
+ ":ubsan_no_recover",
+ ":ubsan_null_flags",
+ ":ubsan_security_flags",
+ ":ubsan_vptr_flags",
+ ":fuzzing_build_mode",
+]
+
+# This config is applied by default to all targets. It sets the compiler flags
+# for sanitizer usage, or, if no sanitizer is set, does nothing.
+#
+# This needs to be in a separate config so that targets can opt out of
+# sanitizers (by removing the config) if they desire. Even if a target
+# removes this config, executables & shared libraries should still depend on
+# :deps if any of their dependencies have not opted out of sanitizers.
+# Keep this list in sync with default_sanitizer_flags_but_ubsan_vptr.
+config("default_sanitizer_flags") {
+ configs = all_sanitizer_configs
+
+ if (use_sanitizer_configs_without_instrumentation) {
+ configs = []
+ }
+}
+
+# This config is equivalent to default_sanitizer_flags, but excludes ubsan_vptr.
+# This allows to selectively disable ubsan_vptr, when needed. In particular,
+# if some third_party code is required to be compiled without rtti, which
+# is a requirement for ubsan_vptr.
+config("default_sanitizer_flags_but_ubsan_vptr") {
+ configs = all_sanitizer_configs - [ ":ubsan_vptr_flags" ]
+
+ if (use_sanitizer_configs_without_instrumentation) {
+ configs = []
+ }
+}
+
+config("default_sanitizer_flags_but_coverage") {
+ configs = all_sanitizer_configs - [ ":coverage_flags" ]
+
+ if (use_sanitizer_configs_without_instrumentation) {
+ configs = []
+ }
+}
+
+# This config is used by parts of code that aren't targeted in fuzzers and
+# therefore don't need coverage instrumentation and possibly wont need
+# sanitizer instrumentation either. The config also tells the compiler to
+# perform additional optimizations on the configured code and ensures that
+# linking it to the rest of the binary which is instrumented with sanitizers
+# works. The config only does anything if the build is a fuzzing build.
+config("not_fuzzed") {
+ if (use_fuzzing_engine) {
+ # Since we aren't instrumenting with coverage, code size is less of a
+ # concern, so use a more aggressive optimization level than
+ # optimize_for_fuzzing (-O1). When given multiple optimization flags, clang
+ # obeys the last one, so as long as this flag comes after -O1, it should work.
+ # Since this config will always be depended on after
+ # "//build/config/compiler:default_optimization" (which adds -O1 when
+ # optimize_for_fuzzing is true), -O2 should always be the second flag. Even
+ # though this sounds fragile, it isn't a big deal if it breaks, since proto
+ # fuzzers will still work, they will just be slightly slower.
+ cflags = [ "-O2" ]
+
+ # We need to include this config when we remove default_sanitizer_flags or
+ # else there will be linking errors. We would remove default_sanitizer_flags
+ # here as well, but gn doesn't permit this.
+ if (!is_msan) {
+ # We don't actually remove sanitization when MSan is being used so there
+ # is no need to add default_sanitizer_ldflags in that case
+ configs = [ ":default_sanitizer_ldflags" ]
+ }
+ }
+}
diff --git a/deps/v8/build/config/sanitizers/OWNERS b/deps/v8/build/config/sanitizers/OWNERS
new file mode 100644
index 0000000000..0a25a01040
--- /dev/null
+++ b/deps/v8/build/config/sanitizers/OWNERS
@@ -0,0 +1,4 @@
+inferno@chromium.org
+metzman@chromium.org
+mmoroz@chromium.org
+ochang@chromium.org
diff --git a/deps/v8/build/config/sanitizers/sanitizers.gni b/deps/v8/build/config/sanitizers/sanitizers.gni
new file mode 100644
index 0000000000..8cff727191
--- /dev/null
+++ b/deps/v8/build/config/sanitizers/sanitizers.gni
@@ -0,0 +1,277 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build/config/coverage/coverage.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+ # Compile for Address Sanitizer to find memory bugs.
+ is_asan = false
+
+ # Compile for Hardware-Assisted Address Sanitizer to find memory bugs
+ # (android/arm64 only).
+ # See http://clang.llvm.org/docs/HardwareAssistedAddressSanitizerDesign.html
+ is_hwasan = false
+
+ # Compile for Leak Sanitizer to find leaks.
+ is_lsan = false
+
+ # Compile for Memory Sanitizer to find uninitialized reads.
+ is_msan = false
+
+ # Compile for Thread Sanitizer to find threading bugs.
+ is_tsan = false
+
+ # Compile for Undefined Behaviour Sanitizer to find various types of
+ # undefined behaviour (excludes vptr checks).
+ is_ubsan = false
+
+ # Halt the program if a problem is detected.
+ is_ubsan_no_recover = false
+
+ # Compile for Undefined Behaviour Sanitizer's null pointer checks.
+ is_ubsan_null = false
+
+ # Compile for Undefined Behaviour Sanitizer's vptr checks.
+ is_ubsan_vptr = false
+
+ # Track where uninitialized memory originates from. From fastest to slowest:
+ # 0 - no tracking, 1 - track only the initial allocation site, 2 - track the
+ # chain of stores leading from allocation site to use site.
+ msan_track_origins = 2
+
+ # Use dynamic libraries instrumented by one of the sanitizers instead of the
+ # standard system libraries. Set this flag to download prebuilt binaries from
+ # GCS.
+ use_prebuilt_instrumented_libraries = false
+
+ # Use dynamic libraries instrumented by one of the sanitizers instead of the
+ # standard system libraries. Set this flag to build the libraries from source.
+ use_locally_built_instrumented_libraries = false
+
+ # Compile with Control Flow Integrity to protect virtual calls and casts.
+ # See http://clang.llvm.org/docs/ControlFlowIntegrity.html
+ #
+ # TODO(pcc): Remove this flag if/when CFI is enabled in all official builds.
+ is_cfi = target_os == "linux" && !is_chromeos && target_cpu == "x64" &&
+ is_official_build
+
+ # Enable checks for bad casts: derived cast and unrelated cast.
+ # TODO(krasin): remove this, when we're ready to add these checks by default.
+ # https://crbug.com/626794
+ use_cfi_cast = false
+
+ # Enable checks for indirect function calls via a function pointer.
+ # TODO(pcc): remove this when we're ready to add these checks by default.
+ # https://crbug.com/701919
+ use_cfi_icall = target_os == "linux" && !is_chromeos && target_cpu == "x64" &&
+ is_official_build
+
+ # Print detailed diagnostics when Control Flow Integrity detects a violation.
+ use_cfi_diag = false
+
+ # Let Control Flow Integrity continue execution instead of crashing when
+ # printing diagnostics (use_cfi_diag = true).
+ use_cfi_recover = false
+
+ # Compile for fuzzing with LLVM LibFuzzer.
+ # See http://www.chromium.org/developers/testing/libfuzzer
+ use_libfuzzer = false
+
+ # Compile for fuzzing with AFL.
+ use_afl = false
+
+ # Enables core ubsan security features. Will later be removed once it matches
+ # is_ubsan.
+ is_ubsan_security = false
+
+ # Helper variable for testing builds with disabled libfuzzer.
+ # Not for client use.
+ disable_libfuzzer = false
+
+ # Optimize for coverage guided fuzzing (balance between speed and number of
+ # branches). Can be also used to remove non-determinism and other issues.
+ optimize_for_fuzzing = false
+
+ # Value for -fsanitize-coverage flag. Setting this causes
+ # use_sanitizer_coverage to be enabled.
+ # This flag is not used for libFuzzer (use_libfuzzer=true). Instead, we use:
+ # -fsanitize=fuzzer-no-link
+ # Default value when unset and use_fuzzing_engine=true:
+ # trace-pc-guard
+ # Default value when unset and use_sanitizer_coverage=true:
+ # trace-pc-guard,indirect-calls
+ sanitizer_coverage_flags = ""
+
+ # Use blacklists from tools/memory/asan when compiling with asan.
+ asan_use_blacklist = true
+
+ # When enabled, only relevant sanitizer defines are set, but compilation
+ # happens with no extra flags. This is useful when in component build
+ # enabling sanitizers only in some of the components.
+ use_sanitizer_configs_without_instrumentation = false
+
+ # When true, seed corpora archives are built.
+ archive_seed_corpus = true
+}
+
+# Disable sanitizers for non-default toolchains.
+if (current_toolchain != default_toolchain) {
+ is_asan = false
+ is_cfi = false
+ is_hwasan = false
+ is_lsan = false
+ is_msan = false
+ is_tsan = false
+ is_ubsan = false
+ is_ubsan_null = false
+ is_ubsan_no_recover = false
+ is_ubsan_security = false
+ is_ubsan_vptr = false
+ msan_track_origins = 0
+ sanitizer_coverage_flags = ""
+ use_afl = false
+ use_cfi_diag = false
+ use_cfi_recover = false
+ use_libfuzzer = false
+ use_prebuilt_instrumented_libraries = false
+ use_locally_built_instrumented_libraries = false
+ use_sanitizer_coverage = false
+}
+
+# Whether we are doing a fuzzer build. Normally this should be checked instead
+# of checking "use_libfuzzer || use_afl" because often developers forget to
+# check for "use_afl".
+use_fuzzing_engine = use_libfuzzer || use_afl
+
+# Args that are in turn dependent on other args must be in a separate
+# declare_args block. User overrides are only applied at the end of a
+# declare_args block.
+declare_args() {
+ use_sanitizer_coverage =
+ !use_clang_coverage &&
+ (use_fuzzing_engine || sanitizer_coverage_flags != "")
+
+ # Detect overflow/underflow for global objects.
+ #
+ # Mac: http://crbug.com/352073
+ asan_globals = !is_mac
+}
+
+if (use_fuzzing_engine && sanitizer_coverage_flags == "") {
+ sanitizer_coverage_flags = "trace-pc-guard"
+} else if (use_sanitizer_coverage && sanitizer_coverage_flags == "") {
+ sanitizer_coverage_flags = "trace-pc-guard,indirect-calls"
+}
+
+# Whether we are linking against a sanitizer runtime library. Among other
+# things, this changes the default symbol level and other settings in order to
+# prepare to create stack traces "live" using the sanitizer runtime.
+using_sanitizer = is_asan || is_hwasan || is_lsan || is_tsan || is_msan ||
+ is_ubsan || is_ubsan_null || is_ubsan_vptr ||
+ is_ubsan_security || use_sanitizer_coverage || use_cfi_diag
+
+assert(!using_sanitizer || is_clang,
+ "Sanitizers (is_*san) require setting is_clang = true in 'gn args'")
+
+assert(!is_cfi || is_clang,
+ "is_cfi requires setting is_clang = true in 'gn args'")
+
+prebuilt_instrumented_libraries_available =
+ is_msan && (msan_track_origins == 0 || msan_track_origins == 2)
+
+if (use_libfuzzer && is_linux) {
+ if (is_asan) {
+ # We do leak checking with libFuzzer on Linux. Set is_lsan for code that
+ # relies on LEAK_SANITIZER define to avoid false positives.
+ is_lsan = true
+ }
+ if (is_msan) {
+ use_prebuilt_instrumented_libraries = true
+ }
+}
+
+# MSan only links Chrome properly in release builds (brettw -- 9/1/2015). The
+# same is possibly true for the other non-ASan sanitizers. But regardless of
+# whether it links, one would normally never run a sanitizer in debug mode.
+# Running in debug mode probably indicates you forgot to set the "is_debug =
+# false" flag in the build args. ASan seems to run fine in debug mode.
+#
+# If you find a use-case where you want to compile a sanitizer in debug mode
+# and have verified it works, ask brettw and we can consider removing it from
+# this condition. We may also be able to find another way to enable your case
+# without having people accidentally get broken builds by compiling an
+# unsupported or unadvisable configurations.
+#
+# For one-off testing, just comment this assertion out.
+assert(!is_debug || !(is_msan || is_ubsan || is_ubsan_null || is_ubsan_vptr),
+ "Sanitizers should generally be used in release (set is_debug=false).")
+
+assert(!is_hwasan || (is_android && current_cpu == "arm64"),
+ "HWASan only supported on Android ARM64 builds.")
+
+assert(!is_msan || (is_linux && current_cpu == "x64"),
+ "MSan currently only works on 64-bit Linux and ChromeOS builds.")
+
+assert(!is_lsan || is_asan, "is_lsan = true requires is_asan = true also.")
+
+# ASAN build on Windows is not working in debug mode. Intercepting memory
+# allocation functions is hard on Windows and not yet implemented in LLVM.
+assert(!is_win || !is_debug || !is_asan,
+ "ASan on Windows doesn't work in debug (set is_debug=false).")
+
+# libFuzzer targets can fail to build or behave incorrectly when built without
+# ASAN on Windows.
+assert(!is_win || !use_libfuzzer || is_asan,
+ "use_libfuzzer on Windows requires setting is_asan = true")
+
+# Make sure that if we recover on detection (i.e. not crash), diagnostics are
+# printed.
+assert(!use_cfi_recover || use_cfi_diag,
+ "Only use CFI recovery together with diagnostics.")
+
+# TODO(crbug.com/753445): the use_sanitizer_coverage arg is currently
+# not supported by the Chromium mac_clang_x64 toolchain on iOS distribution.
+# The coverage works with iOS toolchain but it is broken when the mac
+# toolchain is used as a secondary one on iOS distribution. E.g., it should be
+# possible to build the "net" target for iOS with the sanitizer coverage
+# enabled.
+assert(
+ !(use_sanitizer_coverage && is_mac && target_os == "ios"),
+ "crbug.com/753445: use_sanitizer_coverage=true is not supported by the " +
+ "Chromium mac_clang_x64 toolchain on iOS distribution. Please set " +
+ "the argument value to false.")
+
+# Use these lists of configs to disable instrumenting code that is part of a
+# fuzzer, but which isn't being targeted (such as libprotobuf-mutator, *.pb.cc
+# and libprotobuf when they are built as part of a proto fuzzer). Adding or
+# removing these lists does not have any effect if use_libfuzzer or use_afl are
+# not passed as arguments to gn.
+not_fuzzed_remove_configs = []
+not_fuzzed_remove_nonasan_configs = []
+
+if (use_fuzzing_engine) {
+ # Removing coverage should always just work.
+ not_fuzzed_remove_configs += [ "//build/config/coverage:default_coverage" ]
+ not_fuzzed_remove_nonasan_configs +=
+ [ "//build/config/coverage:default_coverage" ]
+
+ if (!is_msan) {
+ # Allow sanitizer instrumentation to be removed if we are not using MSan
+ # since binaries cannot be partially instrumented with MSan.
+ not_fuzzed_remove_configs +=
+ [ "//build/config/sanitizers:default_sanitizer_flags" ]
+
+ # Certain parts of binaries must be instrumented with ASan if the rest of
+ # the binary is. For these, only remove non-ASan sanitizer instrumentation.
+ if (!is_asan) {
+ not_fuzzed_remove_nonasan_configs +=
+ [ "//build/config/sanitizers:default_sanitizer_flags" ]
+
+ assert(not_fuzzed_remove_nonasan_configs == not_fuzzed_remove_configs)
+ }
+ }
+}
diff --git a/deps/v8/build/config/sysroot.gni b/deps/v8/build/config/sysroot.gni
new file mode 100644
index 0000000000..701c66082e
--- /dev/null
+++ b/deps/v8/build/config/sysroot.gni
@@ -0,0 +1,101 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This header file defines the "sysroot" variable which is the absolute path
+# of the sysroot. If no sysroot applies, the variable will be an empty string.
+
+import("//build/config/chrome_build.gni")
+
+declare_args() {
+ # The absolute path of the sysroot that is applied when compiling using
+ # the target toolchain.
+ target_sysroot = ""
+
+ # The absolute path to directory containing linux sysroot images
+ target_sysroot_dir = "//build/linux"
+
+ use_sysroot = current_cpu == "x86" || current_cpu == "x64" ||
+ current_cpu == "arm" || current_cpu == "arm64" ||
+ current_cpu == "mipsel" || current_cpu == "mips64el"
+}
+
+if (current_os == target_os && current_cpu == target_cpu &&
+ target_sysroot != "") {
+ sysroot = target_sysroot
+} else if (is_android) {
+ import("//build/config/android/config.gni")
+
+ # Android uses unified headers, and thus a single compile time sysroot
+ sysroot = "$android_ndk_root/sysroot"
+} else if (is_linux && use_sysroot) {
+ # By default build against a sysroot image downloaded from Cloud Storage
+ # during gclient runhooks.
+ if (current_cpu == "x64") {
+ sysroot = "$target_sysroot_dir/debian_sid_amd64-sysroot"
+ } else if (current_cpu == "x86") {
+ sysroot = "$target_sysroot_dir/debian_sid_i386-sysroot"
+ } else if (current_cpu == "mipsel") {
+ sysroot = "$target_sysroot_dir/debian_sid_mips-sysroot"
+ } else if (current_cpu == "mips64el") {
+ sysroot = "$target_sysroot_dir/debian_sid_mips64el-sysroot"
+ } else if (current_cpu == "arm") {
+ sysroot = "$target_sysroot_dir/debian_sid_arm-sysroot"
+ } else if (current_cpu == "arm64") {
+ sysroot = "$target_sysroot_dir/debian_sid_arm64-sysroot"
+ } else {
+ assert(false, "No linux sysroot for cpu: $target_cpu")
+ }
+
+ if (sysroot != "") {
+ _script_arch = current_cpu
+ if (_script_arch == "x86") {
+ _script_arch = "i386"
+ } else if (_script_arch == "x64") {
+ _script_arch = "amd64"
+ }
+ assert(
+ exec_script("//build/dir_exists.py",
+ [ rebase_path(sysroot) ],
+ "string") == "True",
+ "Missing sysroot ($sysroot). To fix, run: build/linux/sysroot_scripts/install-sysroot.py --arch=$_script_arch")
+ }
+} else if (is_mac) {
+ import("//build/config/mac/mac_sdk.gni")
+ sysroot = mac_sdk_path
+} else if (is_ios) {
+ import("//build/config/ios/ios_sdk.gni")
+ sysroot = ios_sdk_path
+} else if (is_fuchsia) {
+ import("//build/config/fuchsia/config.gni")
+ if (current_cpu == "arm64" || current_cpu == "x64") {
+ sysroot = fuchsia_sdk + "/arch/$current_cpu/sysroot"
+ } else {
+ sysroot = ""
+ }
+} else {
+ sysroot = ""
+}
+
+if (is_android) {
+ # Android uses unified headers in NDK r16 and later, meaning that the
+ # compile time sysroot and link time sysroot are different
+ link_sysroot = sysroot
+ if (current_cpu == "arm") {
+ link_sysroot = "$android_ndk_root/$arm_android_sysroot_subdir"
+ } else if (current_cpu == "arm64") {
+ link_sysroot = "$android_ndk_root/$arm64_android_sysroot_subdir"
+ } else if (current_cpu == "x86") {
+ link_sysroot = "$android_ndk_root/$x86_android_sysroot_subdir"
+ } else if (current_cpu == "x64") {
+ link_sysroot = "$android_ndk_root/$x86_64_android_sysroot_subdir"
+ } else if (current_cpu == "mipsel") {
+ link_sysroot = "$android_ndk_root/$mips_android_sysroot_subdir"
+ } else if (current_cpu == "mips64el") {
+ link_sysroot = "$android_ndk_root/$mips64_android_sysroot_subdir"
+ } else {
+ assert(false, "No android link sysroot for cpu: $target_cpu")
+ }
+} else {
+ link_sysroot = sysroot
+}
diff --git a/deps/v8/build/config/ui.gni b/deps/v8/build/config/ui.gni
new file mode 100644
index 0000000000..547b42fb5c
--- /dev/null
+++ b/deps/v8/build/config/ui.gni
@@ -0,0 +1,60 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# =============================================
+# PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
+# =============================================
+#
+# These flags are effectively global. Your feature flag should go near the
+# code it controls. Most of these items are here now because they control
+# legacy global #defines passed to the compiler (now replaced with generated
+# buildflag headers -- see //build/buildflag_header.gni).
+#
+# These flags are ui-related so should eventually be moved to various places
+# in //ui/*.
+#
+# There is more advice on where to put build flags in the "Build flag" section
+# of //build/config/BUILDCONFIG.gn.
+
+import("//build/config/chromecast_build.gni")
+
+declare_args() {
+ # Indicates if Ozone is enabled. Ozone is a low-level library layer for Linux
+ # that does not require X11. Enabling this feature disables use of glib, x11,
+ # Pango, and Cairo.
+ use_ozone = is_chromeos || (is_chromecast && !is_android) || is_fuchsia
+
+ # Indicates if Aura is enabled. Aura is a low-level windowing library, sort
+ # of a replacement for GDI or GTK.
+ use_aura = is_win || is_linux || is_fuchsia
+
+ # Whether we should use glib, a low level C utility library.
+ use_glib = is_linux
+}
+
+declare_args() {
+ # True means the UI is built using the "views" framework.
+ toolkit_views = (is_mac || is_win || is_chromeos || use_aura) &&
+ !is_chromecast && !is_fuchsia
+}
+
+# Additional dependent variables -----------------------------------------------
+#
+# These variables depend on other variables and can't be set externally.
+
+# Indicates if the UI toolkit depends on X11.
+use_x11 = is_linux && !use_ozone
+
+# Turn off glib if Ozone is enabled.
+if (use_ozone) {
+ use_glib = false
+}
+
+# Whether to use atk, the Accessibility ToolKit library
+use_atk = is_desktop_linux && use_x11
+# =============================================
+# PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
+# =============================================
+#
+# See comment at the top.
diff --git a/deps/v8/build/config/v8_target_cpu.gni b/deps/v8/build/config/v8_target_cpu.gni
new file mode 100644
index 0000000000..305981f3fa
--- /dev/null
+++ b/deps/v8/build/config/v8_target_cpu.gni
@@ -0,0 +1,61 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+declare_args() {
+ # This arg is used when we want to tell the JIT-generating v8 code
+ # that we want to have it generate for an architecture that is different
+ # than the architecture that v8 will actually run on; we then run the
+ # code under an emulator. For example, we might run v8 on x86, but
+ # generate arm code and run that under emulation.
+ #
+ # This arg is defined here rather than in the v8 project because we want
+ # some of the common architecture-specific args (like arm_float_abi or
+ # mips_arch_variant) to be set to their defaults either if the current_cpu
+ # applies *or* if the v8_current_cpu applies.
+ #
+ # As described below, you can also specify the v8_target_cpu to use
+ # indirectly by specifying a `custom_toolchain` that contains v8_$cpu in the
+ # name after the normal toolchain.
+ #
+ # For example, `gn gen --args="custom_toolchain=...:clang_x64_v8_arm64"`
+ # is equivalent to setting --args=`v8_target_cpu="arm64"`. Setting
+ # `custom_toolchain` is more verbose but makes the toolchain that is
+ # (effectively) being used explicit.
+ #
+ # v8_target_cpu can only be used to target one architecture in a build,
+ # so if you wish to build multiple copies of v8 that are targeting
+ # different architectures, you will need to do something more
+ # complicated involving multiple toolchains along the lines of
+ # custom_toolchain, above.
+ v8_target_cpu = ""
+}
+
+if (v8_target_cpu == "") {
+ if (current_toolchain == "//build/toolchain/linux:clang_x64_v8_arm64") {
+ v8_target_cpu = "arm64"
+ } else if (current_toolchain == "//build/toolchain/linux:clang_x86_v8_arm") {
+ v8_target_cpu = "arm"
+ } else if (current_toolchain ==
+ "//build/toolchain/linux:clang_x86_v8_mips64el") {
+ v8_target_cpu = "mips64el"
+ } else if (current_toolchain ==
+ "//build/toolchain/linux:clang_x86_v8_mipsel") {
+ v8_target_cpu = "mipsel"
+ } else if (is_msan) {
+ # If we're running under a sanitizer, if we configure v8 to generate
+ # code that will be run under a simulator, then the generated code
+ # also gets the benefits of the sanitizer.
+ v8_target_cpu = "arm64"
+ } else {
+ v8_target_cpu = target_cpu
+ }
+}
+
+declare_args() {
+ # This argument is declared here so that it can be overridden in toolchains.
+ # It should never be explicitly set by the user.
+ v8_current_cpu = v8_target_cpu
+}
diff --git a/deps/v8/build/config/win/BUILD.gn b/deps/v8/build/config/win/BUILD.gn
new file mode 100644
index 0000000000..b5a58459ac
--- /dev/null
+++ b/deps/v8/build/config/win/BUILD.gn
@@ -0,0 +1,558 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/win/visual_studio_version.gni")
+import("//build/timestamp.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+
+assert(is_win)
+
+declare_args() {
+ # Turn this on to have the linker output extra timing information.
+ win_linker_timing = false
+
+ # possible values for target_winuwp_version:
+ # "10" - Windows UWP 10
+ # "8.1" - Windows RT 8.1
+ # "8.0" - Windows RT 8.0
+ target_winuwp_version = "10"
+
+ # possible values:
+ # "app" - Windows Store Applications
+ # "phone" - Windows Phone Applications
+ # "system" - Windows Drivers and Tools
+ # "server" - Windows Server Applications
+ # "desktop" - Windows Desktop Applications
+ target_winuwp_family = "app"
+
+ # Set this to use clang-style diagnostics format instead of MSVC-style, which
+ # is useful in e.g. Emacs compilation mode.
+ # E.g.:
+ # Without this, clang emits a diagnostic message like this:
+ # foo/bar.cc(12,34): error: something went wrong
+ # and with this switch, clang emits it like this:
+ # foo/bar.cc:12:34: error: something went wrong
+ use_clang_diagnostics_format = false
+
+ # Use absolute file paths in the compiler diagnostics and (for non-clang)
+ # __FILE__ macro if needed. Note that enabling this makes your build
+ # dependent your checkout's path, which means you won't get to use goma's
+ # global cache, and without clang your builds won't be deterministic.
+ msvc_use_absolute_paths = false
+}
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic that is
+# Windows-only.
+config("compiler") {
+ if (current_cpu == "x86") {
+ asmflags = [
+ # When /safeseh is specified, the linker will only produce an image if it
+ # can also produce a table of the image's safe exception handlers. This
+ # table specifies for the operating system which exception handlers are
+ # valid for the image. Note that /SAFESEH isn't accepted on the command
+ # line, only /safeseh. This is only accepted by ml.exe, not ml64.exe.
+ "/safeseh",
+ ]
+ }
+
+ cflags = [
+ "/Gy", # Enable function-level linking.
+ "/FS", # Preserve previous PDB behavior.
+ "/bigobj", # Some of our files are bigger than the regular limits.
+ "/utf-8", # Assume UTF-8 by default to avoid code page dependencies.
+ ]
+
+ # Force C/C++ mode for the given GN detected file type. This is necessary
+ # for precompiled headers where the same source file is compiled in both
+ # modes.
+ cflags_c = [ "/TC" ]
+ cflags_cc = [ "/TP" ]
+
+ cflags += [
+ # Work around crbug.com/526851, bug in VS 2015 RTM compiler.
+ "/Zc:sizedDealloc-",
+ ]
+
+ if (msvc_use_absolute_paths) {
+ # Pass /FC flag to the compiler if needed.
+ cflags += [ "/FC" ]
+
+ # Print absolute paths in diagnostics. There is no precedent for doing this
+ # on Linux/Mac (GCC doesn't support it), but MSVC does this with /FC and
+ # Windows developers rely on it (crbug.com/636109) so only do this on
+ # Windows.
+ # TODO(thakis): This comment no longer really make sense after
+ # https://chromium-review.googlesource.com/c/chromium/src/+/558871/
+ # See if we can remove msvc_use_absolute_paths. See also discussion in
+ # https://reviews.llvm.org/D23816
+ if (is_clang) {
+ cflags += [ "-fdiagnostics-absolute-paths" ]
+ }
+ }
+
+ if (is_clang) {
+ # Don't look for includes in %INCLUDE%.
+ cflags += [ "/X" ]
+
+ # Tell clang which version of MSVC to emulate.
+ cflags += [ "-fmsc-version=1911" ]
+
+ # Emit table of address-taken functions for Control-Flow Guard (CFG). We
+ # don't emit the CFG checks themselves, but this enables the functions to
+ # be called by code that is built with those checks enabled, such as system
+ # libraries.
+ cflags += [ "/guard:cf,nochecks" ]
+
+ if (is_component_build) {
+ cflags += [
+ # Do not export inline member functions. This makes component builds
+ # faster. This is similar to -fvisibility-inlines-hidden.
+ "/Zc:dllexportInlines-",
+ ]
+ }
+
+ if (current_cpu == "x86") {
+ cflags += [ "-m32" ]
+ } else if (current_cpu == "x64") {
+ cflags += [ "-m64" ]
+ } else if (current_cpu == "arm64") {
+ cflags += [ "--target=arm64-windows" ]
+ } else {
+ assert(false, "unknown current_cpu " + current_cpu)
+ }
+
+ if (exec_script("//build/win/use_ansi_codes.py", [], "trim string") ==
+ "True") {
+ cflags += [
+ # cmd.exe doesn't understand ANSI escape codes by default,
+ # so only enable them if something emulating them is around.
+ "-fansi-escape-codes",
+ ]
+ }
+
+ if (use_clang_diagnostics_format) {
+ cflags += [ "/clang:-fdiagnostics-format=clang" ]
+ }
+
+ # Clang runtime libraries, such as the sanitizer runtimes, live here.
+ lib_dirs = [ "$clang_base_path/lib/clang/$clang_version/lib/windows" ]
+ }
+
+ if (use_lld && !use_thin_lto && (is_clang || !use_goma)) {
+ # /Brepro lets the compiler not write the mtime field in the .obj output.
+ # link.exe /incremental relies on this field to work correctly, but lld
+ # never looks at this timestamp, so it's safe to pass this flag with
+ # lld and get more deterministic compiler output in return.
+ # In LTO builds, the compiler doesn't write .obj files containing mtimes,
+ # so /Brepro is ignored there.
+ cflags += [ "/Brepro" ]
+ }
+
+ ldflags = []
+
+ if (use_lld) {
+ # lld defaults to writing the current time in the pe/coff header.
+ # For build reproducibility, pass an explicit timestamp. See
+ # build/compute_build_timestamp.py for how the timestamp is chosen.
+ # (link.exe also writes the current time, but it doesn't have a flag to
+ # override that behavior.)
+ ldflags += [ "/TIMESTAMP:" + build_timestamp ]
+ }
+
+ if (!is_debug && !is_component_build) {
+ # Enable standard linker optimizations like GC (/OPT:REF) and ICF in static
+ # release builds. These are implied by /PROFILE below, but /PROFILE is
+ # incompatible with /debug:fastlink.
+ # Release builds always want these optimizations, so enable them explicitly.
+ ldflags += [
+ "/OPT:REF",
+ "/OPT:ICF",
+ "/INCREMENTAL:NO",
+ "/FIXED:NO",
+ ]
+
+ if (use_lld) {
+ # String tail merging leads to smaller binaries, but they don't compress
+ # as well, leading to increased mini_installer size (crbug.com/838449).
+ ldflags += [ "/OPT:NOLLDTAILMERGE" ]
+ }
+
+ # TODO(siggi): Is this of any use anymore?
+ # /PROFILE ensures that the PDB file contains FIXUP information (growing the
+ # PDB file by about 5%) but does not otherwise alter the output binary. It
+ # is enabled opportunistically for builds where it is not prohibited (not
+ # supported when incrementally linking, or using /debug:fastlink).
+ if (!is_win_fastlink) {
+ ldflags += [ "/PROFILE" ]
+ }
+ }
+
+ # arflags apply only to static_libraries. The normal linker configs are only
+ # set for executable and shared library targets so arflags must be set
+ # elsewhere. Since this is relatively contained, we just apply them in this
+ # more general config and they will only have an effect on static libraries.
+ arflags = [
+ # "No public symbols found; archive member will be inaccessible." This
+ # means that one or more object files in the library can never be
+ # pulled in to targets that link to this library. It's just a warning that
+ # the source file is a no-op.
+ "/ignore:4221",
+ ]
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Windows-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+ cflags = []
+ cflags_cc = []
+
+ # Defines that set up the CRT.
+ defines = [
+ "__STD_C",
+ "_CRT_RAND_S",
+ "_CRT_SECURE_NO_DEPRECATE",
+ "_SCL_SECURE_NO_DEPRECATE",
+ ]
+
+ # Defines that set up the Windows SDK.
+ defines += [
+ "_ATL_NO_OPENGL",
+ "_WINDOWS",
+ "CERT_CHAIN_PARA_HAS_EXTRA_FIELDS",
+ "PSAPI_VERSION=2",
+ "WIN32",
+ "_SECURE_ATL",
+ ]
+
+ # This is required for ATL to use XP-safe versions of its functions.
+ # TODO(thakis): We no longer support XP; try removing this.
+ defines += [ "_USING_V110_SDK71_" ]
+
+ if (current_os == "winuwp") {
+ # When targeting Windows Runtime, certain compiler/linker flags are
+ # necessary.
+ defines += [
+ "WINUWP",
+ "__WRL_NO_DEFAULT_LIB__",
+ ]
+ if (target_winuwp_family == "app") {
+ defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_PC_APP" ]
+ } else if (target_winuwp_family == "phone") {
+ defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_PHONE_APP" ]
+ } else if (target_winuwp_family == "system") {
+ defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_SYSTEM" ]
+ } else if (target_winuwp_family == "server") {
+ defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_SERVER" ]
+ } else {
+ defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP" ]
+ }
+ cflags_cc += [
+ "/ZW",
+ "/EHsc",
+ ]
+
+ # This warning is given because the linker cannot tell the difference
+ # between consuming WinRT APIs versus authoring WinRT within static
+ # libraries as such this warning is always given by the linker. Since
+ # consuming WinRT APIs within a library is legitimate but authoring
+ # WinRT APis is not allowed, this warning is disabled to ignore the
+ # legitimate consumption of WinRT APIs within static library builds.
+ arflags = [ "/IGNORE:4264" ]
+
+ if (target_winuwp_version == "10") {
+ defines += [ "WIN10=_WIN32_WINNT_WIN10" ]
+ } else if (target_winuwp_version == "8.1") {
+ defines += [ "WIN8_1=_WIN32_WINNT_WINBLUE" ]
+ } else if (target_winuwp_version == "8.0") {
+ defines += [ "WIN8=_WIN32_WINNT_WIN8" ]
+ }
+ } else {
+ # When not targeting Windows Runtime, make sure the WINAPI family is set
+ # to desktop.
+ defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP" ]
+ }
+}
+
+# Chromium supports running on Windows 7, but if these constants are set to
+# Windows 7, then newer APIs aren't made available by the Windows SDK.
+# So we set this to Windows 10 and then are careful to check at runtime
+# to only call newer APIs when they're available.
+# Some third-party libraries assume that these defines set what version of
+# Windows is available at runtime. Targets using these libraries need to
+# manually override this config for their compiles.
+config("winver") {
+ defines = [
+ "NTDDI_VERSION=NTDDI_WIN10_RS2",
+
+ # We can't say `=_WIN32_WINNT_WIN10` here because some files do
+ # `#if WINVER < 0x0600` without including windows.h before,
+ # and then _WIN32_WINNT_WIN10 isn't yet known to be 0x0A00.
+ "_WIN32_WINNT=0x0A00",
+ "WINVER=0x0A00",
+ ]
+}
+
+# Linker flags for Windows SDK setup, this is applied only to EXEs and DLLs.
+config("sdk_link") {
+ assert(current_cpu == "x64" || current_cpu == "x86" || current_cpu == "arm" ||
+ current_cpu == "arm64",
+ "Only supports x64, x86, arm and arm64 CPUs")
+ if (current_cpu == "x64") {
+ ldflags = [ "/MACHINE:X64" ]
+ } else if (current_cpu == "x86") {
+ ldflags = [
+ "/MACHINE:X86",
+ "/SAFESEH", # Not compatible with x64 so use only for x86.
+ "/largeaddressaware",
+ ]
+ } else if (current_cpu == "arm") {
+ ldflags = [ "/MACHINE:ARM" ]
+ } else if (current_cpu == "arm64") {
+ ldflags = [ "/MACHINE:ARM64" ]
+ }
+
+ vcvars_toolchain_data = exec_script("../../toolchain/win/setup_toolchain.py",
+ [
+ visual_studio_path,
+ windows_sdk_path,
+ visual_studio_runtime_dirs,
+ current_os,
+ current_cpu,
+ "none",
+ ],
+ "scope")
+
+ vc_lib_path = vcvars_toolchain_data.vc_lib_path
+ if (defined(vcvars_toolchain_data.vc_lib_atlmfc_path)) {
+ vc_lib_atlmfc_path = vcvars_toolchain_data.vc_lib_atlmfc_path
+ }
+ vc_lib_um_path = vcvars_toolchain_data.vc_lib_um_path
+
+ lib_dirs = [
+ "$vc_lib_um_path",
+ "$vc_lib_path",
+ ]
+ if (defined(vc_lib_atlmfc_path)) {
+ lib_dirs += [ "$vc_lib_atlmfc_path" ]
+ }
+}
+
+# This default linker setup is provided separately from the SDK setup so
+# targets who want different library configurations can remove this and specify
+# their own.
+config("common_linker_setup") {
+ ldflags = [
+ "/FIXED:NO",
+ "/ignore:4199",
+ "/ignore:4221",
+ "/NXCOMPAT",
+ "/DYNAMICBASE",
+ ]
+
+ if (win_linker_timing) {
+ ldflags += [
+ "/time",
+ "/verbose:incr",
+ ]
+ }
+}
+
+config("cfi_linker") {
+ # Control Flow Guard (CFG)
+ # https://msdn.microsoft.com/en-us/library/windows/desktop/mt637065.aspx
+ # /DYNAMICBASE (ASLR) is turned off in debug builds, therefore CFG can’t be
+ # turned on either.
+ # CFG seems to lead to random corruption with incremental linking so turn off
+ # CFG in component builds. https://crbug.com/812421
+ # ASan and CFG leads to slow process startup. Chromium's test runner uses
+ # lots of child processes, so this means things are really slow. Disable CFG
+ # for now. https://crbug.com/846966
+ if (!is_debug && !is_component_build && !is_asan) {
+ # Turn on CFG, except for longjmp because it relies on compiler support
+ # which clang doesn't have yet.
+ ldflags = [ "/guard:cf,nolongjmp" ]
+ }
+}
+
+# CRT --------------------------------------------------------------------------
+
+# Configures how the runtime library (CRT) is going to be used.
+# See https://msdn.microsoft.com/en-us/library/2kzt1wy3.aspx for a reference of
+# what each value does.
+config("default_crt") {
+ if (is_component_build) {
+ # Component mode: dynamic CRT. Since the library is shared, it requires
+ # exceptions or will give errors about things not matching, so keep
+ # exceptions on.
+ configs = [ ":dynamic_crt" ]
+ } else {
+ if (current_os == "winuwp") {
+ # https://blogs.msdn.microsoft.com/vcblog/2014/06/10/the-great-c-runtime-crt-refactoring/
+ # contains a details explanation of what is happening with the Windows
+ # CRT in Visual Studio releases related to Windows store applications.
+ configs = [ ":dynamic_crt" ]
+ } else {
+ # Desktop Windows: static CRT.
+ configs = [ ":static_crt" ]
+ }
+ }
+}
+
+# Use this to force use of the release CRT when building perf-critical build
+# tools that need to be fully optimized even in debug builds, for those times
+# when the debug CRT is part of the bottleneck. This also avoids *implicitly*
+# defining _DEBUG.
+config("release_crt") {
+ if (is_component_build) {
+ cflags = [ "/MD" ]
+ } else {
+ cflags = [ "/MT" ]
+ }
+}
+
+config("dynamic_crt") {
+ if (is_debug) {
+ # This pulls in the DLL debug CRT and defines _DEBUG
+ cflags = [ "/MDd" ]
+ } else {
+ cflags = [ "/MD" ]
+ }
+}
+
+config("static_crt") {
+ if (is_debug) {
+ # This pulls in the static debug CRT and defines _DEBUG
+ cflags = [ "/MTd" ]
+ } else {
+ cflags = [ "/MT" ]
+ }
+}
+
+# Subsystem --------------------------------------------------------------------
+
+# This is appended to the subsystem to specify a minimum version.
+if (current_cpu == "x64") {
+ # The number after the comma is the minimum required OS version.
+ # 5.02 = Windows Server 2003.
+ subsystem_version_suffix = ",5.02"
+} else if (current_cpu == "arm64") {
+ # Windows ARM64 requires Windows 10.
+ subsystem_version_suffix = ",10.0"
+} else {
+ # 5.01 = Windows XP.
+ subsystem_version_suffix = ",5.01"
+}
+
+config("console") {
+ ldflags = [ "/SUBSYSTEM:CONSOLE$subsystem_version_suffix" ]
+}
+config("windowed") {
+ ldflags = [ "/SUBSYSTEM:WINDOWS$subsystem_version_suffix" ]
+}
+
+# Incremental linking ----------------------------------------------------------
+
+incremental_linking_on_switch = [ "/INCREMENTAL" ]
+if ((!is_debug && !is_component_build) || !use_lld) {
+ incremental_linking_off_switch = [ "/INCREMENTAL:NO" ]
+}
+if (use_lld) {
+ incremental_linking_on_switch += [ "/OPT:NOREF" ]
+}
+
+# Enable incremental linking for debug builds and all component builds - any
+# builds where performance is not job one.
+if (is_debug || is_component_build) {
+ default_incremental_linking_switch = incremental_linking_on_switch
+} else {
+ default_incremental_linking_switch = incremental_linking_off_switch
+}
+
+# Applies incremental linking or not depending on the current configuration.
+config("default_incremental_linking") {
+ ldflags = default_incremental_linking_switch
+}
+
+# Explicitly on or off incremental linking
+config("incremental_linking") {
+ ldflags = incremental_linking_on_switch
+}
+config("no_incremental_linking") {
+ # Projects disable incremental linking to work around ilk file issues with
+ # link.exe. lld doesn't use ilk files and doesn't really have an incremental
+ # link mode; the only effect of the flag is that the .lib file timestamp isn't
+ # updated if the .lib doesn't change.
+ if (!use_lld) {
+ ldflags = incremental_linking_off_switch
+ }
+}
+
+# Some large modules can't handle incremental linking in some situations. This
+# config should be applied to large modules to turn off incremental linking
+# when it won't work.
+config("default_large_module_incremental_linking") {
+ if (use_lld || symbol_level == 0 ||
+ (current_cpu == "x86" && is_component_build)) {
+ # In these configurations, ilk file sizes stay low enough that we can
+ # link incrementally.
+ ldflags = default_incremental_linking_switch
+ } else {
+ ldflags = incremental_linking_off_switch
+ }
+}
+
+# Character set ----------------------------------------------------------------
+
+# Not including this config means "ansi" (8-bit system codepage).
+config("unicode") {
+ defines = [
+ "_UNICODE",
+ "UNICODE",
+ ]
+}
+
+# Lean and mean ----------------------------------------------------------------
+
+# Some third party code might not compile with WIN32_LEAN_AND_MEAN so we have
+# to have a separate config for it. Remove this config from your target to
+# get the "bloaty and accommodating" version of windows.h.
+config("lean_and_mean") {
+ defines = [ "WIN32_LEAN_AND_MEAN" ]
+}
+
+# Nominmax --------------------------------------------------------------------
+
+# Some third party code defines NOMINMAX before including windows.h, which
+# then causes warnings when it's been previously defined on the command line.
+# For such targets, this config can be removed.
+
+config("nominmax") {
+ defines = [ "NOMINMAX" ]
+}
+
+# Generating order files -------------------------------------------------------
+
+config("default_cygprofile_instrumentation") {
+ if (generate_order_files) {
+ assert(is_clang, "cygprofile instrumentation only works with clang")
+ assert(is_official_build, "order files should be made w/ official builds")
+ assert(!is_chrome_branded, "order files could leak internal symbol names")
+ configs = [ ":cygprofile_instrumentation" ]
+ }
+}
+
+config("cygprofile_instrumentation") {
+ cflags = [
+ "-Xclang",
+ "-finstrument-functions-after-inlining",
+ ]
+}
diff --git a/deps/v8/build/config/win/console_app.gni b/deps/v8/build/config/win/console_app.gni
new file mode 100644
index 0000000000..cac2ef5d73
--- /dev/null
+++ b/deps/v8/build/config/win/console_app.gni
@@ -0,0 +1,18 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+declare_args() {
+ # If true, builds as a console app (rather than a windowed app), which allows
+ # logging to be printed to the user. This will cause a terminal window to pop
+ # up when the executable is not run from the command line, so should only be
+ # used for development. Only has an effect on Windows builds.
+ win_console_app = false
+}
+
+if (is_win && is_asan) {
+ # AddressSanitizer build should be a console app since it writes to stderr.
+ win_console_app = true
+}
diff --git a/deps/v8/build/config/win/manifest.gni b/deps/v8/build/config/win/manifest.gni
new file mode 100644
index 0000000000..b18a4a1412
--- /dev/null
+++ b/deps/v8/build/config/win/manifest.gni
@@ -0,0 +1,112 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# HOW MANIFESTS WORK IN THE GN BUILD
+#
+# Use the windows_manifest template to declare a manifest generation step.
+# This will combine all listed .manifest files. To link this manifest, just
+# depend on the manifest target from your executable or shared library.
+#
+# This will define an empty placeholder target on non-Windows platforms so
+# the manifest declarations and dependencies do not need to be inside of OS
+# conditionals.
+#
+# A binary can depend on only one manifest target, but the manifest target
+# can depend on many individual .manifest files which will be merged. As a
+# result, only executables and shared libraries should depend on manifest
+# targets. If you want to add a manifest to a component, put the dependency
+# behind a "if (is_component_build)" conditional.
+#
+# Generally you will just want the defaults for the Chrome build. In this case
+# the binary should just depend on one of the targets in //build/win/. There
+# are also individual manifest files in that directory you can reference via
+# the *_manifest variables defined below to pick and choose only some defaults.
+# You might combine these with a custom manifest file to get specific behavior.
+
+# Reference this manifest as a source from windows_manifest targets to get
+# the default Chrome OS compatibility list.
+default_compatibility_manifest = "//build/win/compatibility.manifest"
+
+# Reference this manifest as a source from windows_manifest targets to get
+# the default Chrome common constrols compatibility.
+common_controls_manifest = "//build/win/common_controls.manifest"
+
+# Reference this manifest to request that Windows not perform any elevation
+# when running your program. Otherwise, it might do some autodetection and
+# request elevated privileges from the user. This is normally what you want.
+as_invoker_manifest = "//build/win/as_invoker.manifest"
+
+# An alternative to as_invoker_manifest when you want the application to always
+# elevate.
+require_administrator_manifest = "//build/win/require_administrator.manifest"
+
+# Construct a target to combine the given manifest files into a .rc file.
+#
+# Variables for the windows_manifest template:
+#
+# sources: (required)
+# List of source .manifest files to add.
+#
+# deps: (optional)
+# visibility: (optional)
+# Normal meaning.
+#
+# Example:
+#
+# windows_manifest("doom_melon_manifest") {
+# sources = [
+# "doom_melon.manifest", # Custom values in here.
+# default_compatibility_manifest, # Want the normal OS compat list.
+# ]
+# }
+#
+# executable("doom_melon") {
+# deps = [ ":doom_melon_manifest" ]
+# ...
+# }
+
+if (is_win) {
+ template("windows_manifest") {
+ config_name = "${target_name}__config"
+ source_set_name = target_name
+
+ config(config_name) {
+ visibility = [ ":$source_set_name" ]
+ assert(defined(invoker.sources),
+ "\"sources\" must be defined for a windows_manifest target")
+ manifests = []
+ foreach(i, rebase_path(invoker.sources, root_build_dir)) {
+ manifests += [ "/manifestinput:" + i ]
+ }
+ ldflags = [
+ "/manifest:embed",
+
+ # We handle UAC by adding explicit .manifest files instead.
+ "/manifestuac:no",
+ ] + manifests
+ }
+
+ # This source set only exists to add a dep on the invoker's deps and to
+ # add a public_config that sets ldflags on dependents.
+ source_set(source_set_name) {
+ forward_variables_from(invoker, [ "visibility" ])
+ public_configs = [ ":$config_name" ]
+
+ # Apply any dependencies from the invoker to this target, since those
+ # dependencies may have created the input manifest files.
+ forward_variables_from(invoker, [ "deps" ])
+ }
+ }
+} else {
+ # Make a no-op group on non-Windows platforms so windows_manifest
+ # instantiations don't need to be inside windows blocks.
+ template("windows_manifest") {
+ group(target_name) {
+ # Prevent unused variable warnings on non-Windows platforms.
+ assert(invoker.sources != "")
+ assert(!defined(invoker.deps) || invoker.deps != "")
+ assert(!defined(invoker.visibility) || invoker.visibility != "")
+ }
+ }
+}
diff --git a/deps/v8/build/config/win/visual_studio_version.gni b/deps/v8/build/config/win/visual_studio_version.gni
new file mode 100644
index 0000000000..982fbe8d3f
--- /dev/null
+++ b/deps/v8/build/config/win/visual_studio_version.gni
@@ -0,0 +1,39 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+ # Path to Visual Studio. If empty, the default is used which is to use the
+ # automatic toolchain in depot_tools. If set, you must also set the
+ # visual_studio_version and wdk_path.
+ visual_studio_path = ""
+
+ # Version of Visual Studio pointed to by the visual_studio_path.
+ # Currently always "2015".
+ visual_studio_version = ""
+
+ # Directory of the Windows driver kit. If visual_studio_path is empty, this
+ # will be auto-filled.
+ wdk_path = ""
+
+ # Full path to the Windows SDK, not including a backslash at the end.
+ # This value is the default location, override if you have a different
+ # installation location.
+ windows_sdk_path = "C:\Program Files (x86)\Windows Kits\10"
+}
+
+if (visual_studio_path == "") {
+ toolchain_data =
+ exec_script("../../vs_toolchain.py", [ "get_toolchain_dir" ], "scope")
+ visual_studio_path = toolchain_data.vs_path
+ windows_sdk_path = toolchain_data.sdk_path
+ visual_studio_version = toolchain_data.vs_version
+ wdk_path = toolchain_data.wdk_dir
+ visual_studio_runtime_dirs = toolchain_data.runtime_dirs
+} else {
+ assert(visual_studio_version != "",
+ "You must set the visual_studio_version if you set the path")
+ assert(wdk_path != "",
+ "You must set the wdk_path if you set the visual studio path")
+ visual_studio_runtime_dirs = []
+}
diff --git a/deps/v8/build/config/zip.gni b/deps/v8/build/config/zip.gni
new file mode 100644
index 0000000000..58cb692f71
--- /dev/null
+++ b/deps/v8/build/config/zip.gni
@@ -0,0 +1,50 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("python.gni")
+
+# Creates a zip archive of the inputs.
+#
+# output (required)
+# Path to output zip.
+# inputs (required)
+# List of input files to zip.
+# base_dir (optional)
+# If provided, the archive paths will be relative to this directory.
+# Applies only to |inputs|.
+#
+# deps, public_deps, data, data_deps, testonly, visibility
+# Normal meaning.
+template("zip") {
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "data",
+ "data_deps",
+ "deps",
+ "public_deps",
+ "testonly",
+ "visibility",
+ ])
+ script = "//build/android/gyp/zip.py"
+ inputs = invoker.inputs
+ outputs = [
+ invoker.output,
+ ]
+
+ args = [
+ "--output",
+ rebase_path(invoker.output, root_build_dir),
+ ]
+
+ _rebased_inputs = rebase_path(invoker.inputs, root_build_dir)
+ args += [ "--input-files=$_rebased_inputs" ]
+ if (defined(invoker.base_dir)) {
+ args += [
+ "--input-files-base-dir",
+ rebase_path(invoker.base_dir, root_build_dir),
+ ]
+ }
+ }
+}
diff --git a/deps/v8/build/copy_test_data_ios.py b/deps/v8/build/copy_test_data_ios.py
new file mode 100755
index 0000000000..6f0302f950
--- /dev/null
+++ b/deps/v8/build/copy_test_data_ios.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies test data files or directories into a given output directory."""
+
+import optparse
+import os
+import shutil
+import sys
+
+class WrongNumberOfArgumentsException(Exception):
+ pass
+
+def EscapePath(path):
+ """Returns a path with spaces escaped."""
+ return path.replace(" ", "\\ ")
+
+def ListFilesForPath(path):
+ """Returns a list of all the files under a given path."""
+ output = []
+ # Ignore revision control metadata directories.
+ if (os.path.basename(path).startswith('.git') or
+ os.path.basename(path).startswith('.svn')):
+ return output
+
+ # Files get returned without modification.
+ if not os.path.isdir(path):
+ output.append(path)
+ return output
+
+ # Directories get recursively expanded.
+ contents = os.listdir(path)
+ for item in contents:
+ full_path = os.path.join(path, item)
+ output.extend(ListFilesForPath(full_path))
+ return output
+
+def CalcInputs(inputs):
+ """Computes the full list of input files for a set of command-line arguments.
+ """
+ # |inputs| is a list of paths, which may be directories.
+ output = []
+ for input in inputs:
+ output.extend(ListFilesForPath(input))
+ return output
+
+def CopyFiles(relative_filenames, output_basedir):
+ """Copies files to the given output directory."""
+ for file in relative_filenames:
+ relative_dirname = os.path.dirname(file)
+ output_dir = os.path.join(output_basedir, relative_dirname)
+ output_filename = os.path.join(output_basedir, file)
+
+ # In cases where a directory has turned into a file or vice versa, delete it
+ # before copying it below.
+ if os.path.exists(output_dir) and not os.path.isdir(output_dir):
+ os.remove(output_dir)
+ if os.path.exists(output_filename) and os.path.isdir(output_filename):
+ shutil.rmtree(output_filename)
+
+ if not os.path.exists(output_dir):
+ os.makedirs(output_dir)
+ shutil.copy(file, output_filename)
+
+def DoMain(argv):
+ parser = optparse.OptionParser()
+ usage = 'Usage: %prog -o <output_dir> [--inputs] [--outputs] <input_files>'
+ parser.set_usage(usage)
+ parser.add_option('-o', dest='output_dir')
+ parser.add_option('--inputs', action='store_true', dest='list_inputs')
+ parser.add_option('--outputs', action='store_true', dest='list_outputs')
+ options, arglist = parser.parse_args(argv)
+
+ if len(arglist) == 0:
+ raise WrongNumberOfArgumentsException('<input_files> required.')
+
+ files_to_copy = CalcInputs(arglist)
+ escaped_files = [EscapePath(x) for x in CalcInputs(arglist)]
+ if options.list_inputs:
+ return '\n'.join(escaped_files)
+
+ if not options.output_dir:
+ raise WrongNumberOfArgumentsException('-o required.')
+
+ if options.list_outputs:
+ outputs = [os.path.join(options.output_dir, x) for x in escaped_files]
+ return '\n'.join(outputs)
+
+ CopyFiles(files_to_copy, options.output_dir)
+ return
+
+def main(argv):
+ try:
+ result = DoMain(argv[1:])
+ except WrongNumberOfArgumentsException, e:
+ print >>sys.stderr, e
+ return 1
+ if result:
+ print result
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/deps/v8/build/cp.py b/deps/v8/build/cp.py
new file mode 100755
index 0000000000..0f32536b62
--- /dev/null
+++ b/deps/v8/build/cp.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copy a file.
+
+This module works much like the cp posix command - it takes 2 arguments:
+(src, dst) and copies the file with path |src| to |dst|.
+"""
+
+import os
+import shutil
+import sys
+
+
+def Main(src, dst):
+ # Use copy instead of copyfile to ensure the executable bit is copied.
+ return shutil.copy(src, os.path.normpath(dst))
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv[1], sys.argv[2]))
diff --git a/deps/v8/build/detect_host_arch.py b/deps/v8/build/detect_host_arch.py
new file mode 100755
index 0000000000..1c13aa653e
--- /dev/null
+++ b/deps/v8/build/detect_host_arch.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Outputs host CPU architecture in format recognized by gyp."""
+
+import platform
+import re
+import sys
+
+
+def HostArch():
+ """Returns the host architecture with a predictable string."""
+ host_arch = platform.machine()
+
+ # Convert machine type to format recognized by gyp.
+ if re.match(r'i.86', host_arch) or host_arch == 'i86pc':
+ host_arch = 'ia32'
+ elif host_arch in ['x86_64', 'amd64']:
+ host_arch = 'x64'
+ elif host_arch.startswith('arm'):
+ host_arch = 'arm'
+ elif host_arch.startswith('aarch64'):
+ host_arch = 'arm64'
+ elif host_arch.startswith('mips64'):
+ host_arch = 'mips64'
+ elif host_arch.startswith('mips'):
+ host_arch = 'mips'
+ elif host_arch.startswith('ppc'):
+ host_arch = 'ppc'
+ elif host_arch.startswith('s390'):
+ host_arch = 's390'
+
+
+ # platform.machine is based on running kernel. It's possible to use 64-bit
+ # kernel with 32-bit userland, e.g. to give linker slightly more memory.
+ # Distinguish between different userland bitness by querying
+ # the python binary.
+ if host_arch == 'x64' and platform.architecture()[0] == '32bit':
+ host_arch = 'ia32'
+ if host_arch == 'arm64' and platform.architecture()[0] == '32bit':
+ host_arch = 'arm'
+
+ return host_arch
+
+def DoMain(_):
+ """Hook to be called from gyp without starting a separate python
+ interpreter."""
+ return HostArch()
+
+if __name__ == '__main__':
+ print DoMain([])
diff --git a/deps/v8/build/dir_exists.py b/deps/v8/build/dir_exists.py
new file mode 100755
index 0000000000..70d367ec26
--- /dev/null
+++ b/deps/v8/build/dir_exists.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes True if the argument is a directory."""
+
+import os.path
+import sys
+
+def main():
+ sys.stdout.write(_is_dir(sys.argv[1]))
+ return 0
+
+def _is_dir(dir_name):
+ return str(os.path.isdir(dir_name))
+
+def DoMain(args):
+ """Hook to be called from gyp without starting a separate python
+ interpreter."""
+ return _is_dir(args[0])
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/docs/debugging_slow_builds.md b/deps/v8/build/docs/debugging_slow_builds.md
new file mode 100644
index 0000000000..315690c32f
--- /dev/null
+++ b/deps/v8/build/docs/debugging_slow_builds.md
@@ -0,0 +1,19 @@
+# Debugging slow builds
+
+Some tips for debugging slow build times:
+* Use [ninjatracing](https://github.com/nico/ninjatracing) and chrome:tracing to
+ view a timeline of the most recent build.
+ * Many bots output a build trace (look for a `"ninja_log"` link).
+* Use `gn gen --tracelog trace.json` to create a similar trace for `gn gen`.
+* Depot Tool's `autoninja` has logic for summarizing slow steps. Enable it via:
+ * `NINJA_SUMMARIZE_BUILD=1 autoninja -C out/Debug my_target`
+* Many Android templates make use of
+ [`md5_check.py`](https://cs.chromium.org/chromium/src/build/android/gyp/util/md5_check.py)
+ to optimize incremental builds.
+ * Set `PRINT_BUILD_EXPLANATIONS=1` to have these commands log which inputs
+ changed.
+* If you suspect files are being rebuilt unnecessarily during incremental
+ builds:
+ * Use `ninja -n -d explain` to figure out why ninja thinks a target is dirty.
+ * Ensure actions are taking advantage of ninja's `restat=1` feature by not
+ updating timestamps on outputs when their content does not change.
diff --git a/deps/v8/build/docs/mac_hermetic_toolchain.md b/deps/v8/build/docs/mac_hermetic_toolchain.md
new file mode 100644
index 0000000000..b1ee13ef67
--- /dev/null
+++ b/deps/v8/build/docs/mac_hermetic_toolchain.md
@@ -0,0 +1,44 @@
+# Mac and iOS hermetic toolchain instructions
+
+The following is a short explanation of why we use a the hermetic toolchain
+and instructions on how to roll a new toolchain.
+
+## How to roll a new hermetic toolchain.
+
+1. Download a new version of Xcode, and confirm either mac or ios builds
+ properly with this new version.
+
+2. Run the following command:
+
+ ```
+ src/build/package_mac_toolchain.py /path/to/Xcode.app/ [ios|mac]
+ ```
+
+ The script will create a subset of the toolchain necessary for a build, and
+ upload them to be used by hermetic builds.
+
+ If for some reason this toolchain version has already been uploaded, the
+ script will ask if we should create sub revision. This can be necessary when
+ the package script has been updated to compress additional files.
+
+2. Create a CL with updated [MAC|IOS]_TOOLCHAIN_VERSION and _SUB_REVISION in
+ src/build/mac_toolchain.py with the version created by the previous command.
+
+3. Run the CL through the trybots to confirm the roll works.
+
+## Why we use a hermetic toolchain.
+
+Building Chrome Mac currently requires many binaries that come bundled with
+Xcode, as well the macOS and iphoneOS SDK [also bundled with Xcode]. Note that
+Chrome ships its own version of clang [compiler], but is dependent on Xcode
+for these other binaries.
+
+Chrome should be built against the latest SDK available, but historically,
+updating the SDK has been nontrivially difficult. Additionally, bot system
+installs can range from Xcode 5 on some bots, to the latest and
+greatest. Using a hermetic toolchain has two main benefits:
+
+1. Build Chrome with a well-defined toolchain [rather than whatever happens to
+be installed on the machine].
+
+2. Easily roll/update the toolchain.
diff --git a/deps/v8/build/docs/writing_gn_templates.md b/deps/v8/build/docs/writing_gn_templates.md
new file mode 100644
index 0000000000..1d1e68bf20
--- /dev/null
+++ b/deps/v8/build/docs/writing_gn_templates.md
@@ -0,0 +1,259 @@
+# Writing GN Templates
+GN and Ninja are documented here:
+* GN: https://gn.googlesource.com/gn/+/master/docs/
+* Ninja: https://ninja-build.org/manual.html
+
+[TOC]
+
+## Things to Consider When Writing Templates
+### Inputs and Depfiles
+* List all files read (or executed) by an action as `inputs`.
+ * It is [not enough](https://chromium-review.googlesource.com/c/chromium/src/+/1090231)
+ to have inputs listed by dependent targets. They must be listed directly by targets that use them.
+ * Non-system Python imports are inputs! For scripts that import such modules,
+ use [`action_with_pydeps`](https://cs.chromium.org/chromium/src/build/config/python.gni?rcl=320ee4295eb7fabaa112f08d1aacc88efd1444e5&l=75)
+ to ensure all dependent Python files are captured as inputs.
+* For action inputs that are not computable during "gn gen", actions can write
+ depfiles (.d files) to add additional input files as dependencies for
+ subsequent builds. They are relevant only for incremental builds.
+ * Depfiles should not list files that GN already lists as `inputs`.
+ * Besides being redundant, listing them also makes it harder to remove
+ inputs, since removing them from GN does not immediately remove them from
+ depfiles.
+ * Stale paths in depfiles can cause ninja to complain of circular
+ dependencies [in some cases](https://bugs.chromium.org/p/chromium/issues/detail?id=639042).
+
+### Ensuring "gn analyze" Knows About your Inputs
+"gn analyze" is used by bots to run only affected tests and build only affected
+targets. Try it out locally via:
+```bash
+echo "compute_inputs_for_analyze = true" >> out/Debug/args.gn
+gn analyze //out/Debug <(echo '{
+ "files": ["//BUILD.gn"],
+ "test_targets": ["//base"],
+ "additional_compile_targets":[]}') result.txt; cat result.txt
+```
+* For analyze to work properly, GN must know about all inputs.
+* Inputs added by depfiles are *not available* to "gn analyze".
+ * When paths listed in a target's depfile are listed as `inputs` to a
+ dependent target, analyze will be correct.
+ * Example: An `AndroidManifest.xml` file is an input to an
+ `android_library()` and is included in an `android_apk()`'s depfile.
+ `gn analyze` will know that a change to the file will require the APK
+ to be rebuilt, because the file is marked as an input to the library, and
+ the library is a dep of the APK.
+ * When paths listed in a target's depfile are *not* listed as `inputs` to a
+ dependent target, a few options exist:
+ * Rather than putting the inputs in a depfile, force users of your template
+ to list them, and then have your action re-compute them and assert that
+ they were correct.
+ * `jinja_template()` does this.
+ * Rather than putting the inputs in a depfile, compute them beforehand and
+ save them to a text file. Have your template Use `read_file()` to read
+ them in.
+ * `action_with_pydeps()` does this.
+ * Continue using a depfile, but use an `exec_script()` to compute them when
+ [`compute_inputs_for_analyze`](https://cs.chromium.org/chromium/src/build/config/compute_inputs_for_analyze.gni)
+ is set.
+ * `grit()` does this.
+
+### Outputs
+#### What to List as Outputs
+Do not list files as `outputs` unless they are important. Outputs are important
+if they are:
+ * used as an input by another target, or
+ * are roots in the dependency graph (e.g. binaries, apks, etc).
+
+Example:
+* An action runs a binary that creates an output as well as a log file. Do not
+ list the log file as an output.
+
+#### Where to Place Outputs
+**Option 1:** To make outputs visible in codesearch (e.g. generated sources):
+* use `$target_gen_dir/$target_name.$EXTENSION`.
+
+**Option 2:** Otherwise (for binary files):
+* use `$target_out_dir/$target_name.$EXTENSION`.
+
+**Option 3:** For outputs that are required at runtime
+(e.g. [runtime_deps](https://gn.googlesource.com/gn/+/master/docs/reference.md#runtime_deps)),
+options 1 & 2 do not work because they are not archived in builder/tester bot
+configurations. In this case:
+* use `$root_out_dir/gen.runtime` or `$root_out_dir/obj.runtime`.
+
+Example:
+```python
+# This .json file is used at runtime and thus cannot go in target_gen_dir.
+_target_dir_name = rebase_path(get_label_info(":$target_name", "dir"), "//")
+_output_path = "$root_out_dir/gen.runtime/$_target_dir_name/$target_name.json"
+```
+
+**Option 4:** For outputs that map 1:1 with executables, and whose paths cannot
+be derived at runtime:
+* use `$root_build_dir/YOUR_NAME_HERE/$target_name`.
+
+Examples:
+```python
+# Wrapper scripts for apks:
+_output_path = "$root_build_dir/bin/$target_name"
+# Metadata for apks. Used by binary size tools.
+_output_path = "$root_build_dir/size-info/${invoker.name}.apk.jar.info"
+```
+
+## Best Practices for Python Actions
+Outputs should be atomic and take advantage of `restat=1`.
+* Make outputs atomic by writing to temporary files and then moving them to
+ their final location.
+ * Rationale: An interrupted write can leave a file with an updated timestamp
+ and corrupt contents. Ninja looks only at timestamps.
+* Do not overwrite an existing output with identical contents.
+ * Rationale: `restat=1` is a ninja feature enabled for all actions that
+ short-circuits a build when output timestamps do not change. This feature is
+ the reason that the total number of build steps sometimes decreases when
+ building..
+* Use [`build_utils.AtomicOutput()`](https://cs.chromium.org/chromium/src/build/android/gyp/util/build_utils.py?rcl=7d6ba28e92bec865a7b7876c35b4621d56fb37d8&l=128)
+ to perform both of these techniques.
+
+Actions should be deterministic in order to avoid hard-to-reproduce bugs.
+Given identical inputs, they should produce byte-for-byte identical outputs.
+* Some common mistakes:
+ * Depending on filesystem iteration order.
+ * Writing timestamps in files (or in zip entries).
+ * Writing absolute paths in outputs.
+
+## Style Guide
+Chromium GN files follow
+[GN's Style Guide](https://gn.googlesource.com/gn/+/master/docs/style_guide.md)
+with a few additions.
+
+### Action Granularity
+ * Prefer writing new Python scripts that do what you want over
+ composing multiple separate actions within a template.
+ * Fewer targets makes for a simpler build graph.
+ * GN logic and build logic winds up much simpler.
+
+Bad:
+```python
+template("generate_zipped_sources") {
+ generate_files("${target_name}__gen") {
+ ...
+ outputs = [ "$target_gen_dir/$target_name.temp" ]
+ }
+ zip(target_name) {
+ deps = [ ":${target_name}__gen" ]
+ inputs = [ "$target_gen_dir/$target_name.temp" ]
+ outputs = [ invoker.output_zip ]
+ }
+}
+```
+
+Good:
+```python
+template("generate_zipped_sources") {
+ action(target_name) {
+ script = "generate_and_zip.py"
+ ...
+ outputs = [ invoker.output_zip ]
+ }
+}
+```
+
+### Naming for Intermediate Targets
+Targets that are not relevant to users of your template should be named as:
+`${target_name}__$something`.
+
+Example:
+```python
+template("my_template") {
+ action("${target_name}__helper") {
+ ...
+ }
+ action(target_name) {
+ deps = [ ":${target_name}__helper" ]
+ ...
+ }
+}
+```
+
+### Variables
+Prefix variables within templates and targets with an underscore. For example:
+
+```python
+template("example") {
+ _outer_sources = invoker.extra_sources
+
+ source_set(target_name) {
+ _inner_sources = invoker.sources
+ sources = _outer_sources + _inner_sources
+ }
+}
+```
+
+This convention conveys that `sources` is relevant to `source_set`, while
+`_outer_sources` and `_inner_sources` are not.
+
+### Passing Arguments to Targets
+Pass arguments to targets by assigning them directly within target definitions.
+
+When a GN template goes to resolve `invoker.FOO`, GN will look in all enclosing
+scopes of the target's definition. It is hard to figure out where `invoker.FOO`
+is coming from when it is not assigned directly within the target definition.
+
+Bad:
+```python
+template("hello") {
+ script = "..."
+ action(target_name) {
+ # This action will see "script" from the enclosing scope.
+ }
+}
+```
+
+Good:
+```python
+template("hello") {
+ action(target_name) {
+ script = "..." # This is equivalent, but much more clear.
+ }
+}
+```
+
+**Exception:** `testonly` and `visibility` can be set in the outer scope so that
+they are implicitly passed to all targets within a template.
+
+This is okay:
+```python
+template("hello") {
+ testonly = true # Applies to all nested targets.
+ action(target_name) {
+ script = "..."
+ }
+}
+```
+
+### Using forward_variables_from()
+Using `forward_variables_from()` is encouraged, but `testonly` and `visibility`
+should always be listed explicitly in case they are assigned in an enclosing
+scope (applies to the `"*"` variant of `forward_variables_from()`).
+See [this bug](https://bugs.chromium.org/p/chromium/issues/detail?id=862232)
+for more context.
+
+```python
+template("action_wrapper") {
+ action(target_name) {
+ forward_variables_from(invoker, "*", [ "testonly", "visibility" ])
+ forward_variables_from(invoker, [ "testonly", "visibility" ])
+ ...
+ }
+}
+```
+
+## Useful Ninja Flags
+Useful ninja flags when developing build rules:
+* `ninja -v` - log the full command-line of every target.
+* `ninja -v -n` - log the full command-line of every target without having
+ to wait for a build.
+* `ninja -w dupbuild=err` - fail if multiple targets have the same output.
+* `ninja -d keeprsp` - prevent ninja from deleting response files.
+* `ninja -n -d explain` - print why ninja thinks a target is dirty.
+* `ninja -j1` - execute only one command at a time.
diff --git a/deps/v8/build/dotfile_settings.gni b/deps/v8/build/dotfile_settings.gni
new file mode 100644
index 0000000000..407a9cd144
--- /dev/null
+++ b/deps/v8/build/dotfile_settings.gni
@@ -0,0 +1,38 @@
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains variables that can be imported into a repo's dotfile (.gn)
+# to make it easier to roll new versions of //build in.
+
+build_dotfile_settings = {
+ exec_script_whitelist = [
+ "//build/config/android/config.gni",
+ "//build/config/android/internal_rules.gni",
+ "//build/config/android/rules.gni",
+ "//build/config/compiler/BUILD.gn",
+ "//build/config/gcc/gcc_version.gni",
+ "//build/config/host_byteorder.gni",
+ "//build/config/ios/ios_sdk.gni",
+ "//build/config/linux/BUILD.gn",
+ "//build/config/linux/pkg_config.gni",
+ "//build/config/linux/atk/BUILD.gn",
+ "//build/config/linux/atspi2/BUILD.gn",
+ "//build/config/linux/dri/BUILD.gn",
+ "//build/config/mac/mac_sdk.gni",
+ "//build/config/mac/rules.gni",
+ "//build/config/posix/BUILD.gn",
+ "//build/config/sysroot.gni",
+ "//build/config/win/BUILD.gn",
+ "//build/config/win/visual_studio_version.gni",
+ "//build/timestamp.gni",
+ "//build/toolchain/BUILD.gn",
+ "//build/toolchain/concurrent_links.gni",
+ "//build/toolchain/mac/BUILD.gn",
+ "//build/toolchain/nacl/BUILD.gn",
+ "//build/toolchain/toolchain.gni",
+ "//build/toolchain/win/BUILD.gn",
+ "//build/util/branding.gni",
+ "//build/util/version.gni",
+ ]
+}
diff --git a/deps/v8/build/download_nacl_toolchains.py b/deps/v8/build/download_nacl_toolchains.py
new file mode 100755
index 0000000000..286a92a27e
--- /dev/null
+++ b/deps/v8/build/download_nacl_toolchains.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shim to run nacl toolchain download script only if there is a nacl dir."""
+
+from __future__ import print_function
+
+import os
+import shutil
+import sys
+
+
+def Main(args):
+ script_dir = os.path.dirname(os.path.abspath(__file__))
+ src_dir = os.path.dirname(script_dir)
+ nacl_dir = os.path.join(src_dir, 'native_client')
+ nacl_build_dir = os.path.join(nacl_dir, 'build')
+ package_version_dir = os.path.join(nacl_build_dir, 'package_version')
+ package_version = os.path.join(package_version_dir, 'package_version.py')
+ if not os.path.exists(package_version):
+ print("Can't find '%s'" % package_version)
+ print('Presumably you are intentionally building without NativeClient.')
+ print('Skipping NativeClient toolchain download.')
+ sys.exit(0)
+ sys.path.insert(0, package_version_dir)
+ import package_version
+
+ # BUG:
+ # We remove this --optional-pnacl argument, and instead replace it with
+ # --no-pnacl for most cases. However, if the bot name is an sdk
+ # bot then we will go ahead and download it. This prevents increasing the
+ # gclient sync time for developers, or standard Chrome bots.
+ if '--optional-pnacl' in args:
+ args.remove('--optional-pnacl')
+ use_pnacl = False
+ buildbot_name = os.environ.get('BUILDBOT_BUILDERNAME', '')
+ if 'pnacl' in buildbot_name and 'sdk' in buildbot_name:
+ use_pnacl = True
+ if use_pnacl:
+ print('\n*** DOWNLOADING PNACL TOOLCHAIN ***\n')
+ else:
+ args = ['--exclude', 'pnacl_newlib'] + args
+
+ # Only download the ARM gcc toolchain if we are building for ARM
+ # TODO(olonho): we need to invent more reliable way to get build
+ # configuration info, to know if we're building for ARM.
+ if 'target_arch=arm' not in os.environ.get('GYP_DEFINES', ''):
+ args = ['--exclude', 'nacl_arm_newlib'] + args
+
+ return package_version.main(args)
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv[1:]))
diff --git a/deps/v8/build/download_translation_unit_tool.py b/deps/v8/build/download_translation_unit_tool.py
new file mode 100755
index 0000000000..b60d33a19f
--- /dev/null
+++ b/deps/v8/build/download_translation_unit_tool.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script to download Clang translation_unit tool from google storage."""
+
+import find_depot_tools
+import json
+import os
+import shutil
+import subprocess
+import sys
+import tarfile
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+CHROME_SRC = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
+
+
+DEPOT_PATH = find_depot_tools.add_depot_tools_to_path()
+GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py')
+
+LLVM_BUILD_PATH = os.path.join(CHROME_SRC, 'third_party', 'llvm-build',
+ 'Release+Asserts')
+CLANG_UPDATE_PY = os.path.join(CHROME_SRC, 'tools', 'clang', 'scripts',
+ 'update.py')
+
+CLANG_BUCKET = 'gs://chromium-browser-clang'
+
+
+def main():
+ clang_revision = subprocess.check_output([sys.executable, CLANG_UPDATE_PY,
+ '--print-revision']).rstrip()
+ targz_name = 'translation_unit-%s.tgz' % clang_revision
+
+ if sys.platform == 'win32' or sys.platform == 'cygwin':
+ cds_full_url = CLANG_BUCKET + '/Win/' + targz_name
+ elif sys.platform == 'darwin':
+ cds_full_url = CLANG_BUCKET + '/Mac/' + targz_name
+ else:
+ assert sys.platform.startswith('linux')
+ cds_full_url = CLANG_BUCKET + '/Linux_x64/' + targz_name
+
+ os.chdir(LLVM_BUILD_PATH)
+
+ subprocess.check_call([sys.executable, GSUTIL_PATH,
+ 'cp', cds_full_url, targz_name])
+ tarfile.open(name=targz_name, mode='r:gz').extractall(path=LLVM_BUILD_PATH)
+
+ os.remove(targz_name)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/env_dump.py b/deps/v8/build/env_dump.py
new file mode 100755
index 0000000000..3f8217398c
--- /dev/null
+++ b/deps/v8/build/env_dump.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script can either source a file and dump the enironment changes done by
+# it, or just simply dump the current environment as JSON into a file.
+
+import json
+import optparse
+import os
+import pipes
+import subprocess
+import sys
+
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option('-f', '--output-json',
+ help='File to dump the environment as JSON into.')
+ parser.add_option(
+ '-d', '--dump-mode', action='store_true',
+ help='Dump the environment to sys.stdout and exit immediately.')
+
+ parser.disable_interspersed_args()
+ options, args = parser.parse_args()
+ if options.dump_mode:
+ if args or options.output_json:
+ parser.error('Cannot specify args or --output-json with --dump-mode.')
+ json.dump(dict(os.environ), sys.stdout)
+ else:
+ if not options.output_json:
+ parser.error('Requires --output-json option.')
+
+ envsetup_cmd = ' '.join(map(pipes.quote, args))
+ full_cmd = [
+ 'bash', '-c',
+ '. %s > /dev/null; %s -d' % (envsetup_cmd, os.path.abspath(__file__))
+ ]
+ try:
+ output = subprocess.check_output(full_cmd)
+ except Exception as e:
+ sys.exit('Error running %s and dumping environment.' % envsetup_cmd)
+
+ env_diff = {}
+ new_env = json.loads(output)
+ for k, val in new_env.items():
+ if k == '_' or (k in os.environ and os.environ[k] == val):
+ continue
+ env_diff[k] = val
+ with open(options.output_json, 'w') as f:
+ json.dump(env_diff, f)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/extract_from_cab.py b/deps/v8/build/extract_from_cab.py
new file mode 100755
index 0000000000..080370ca9a
--- /dev/null
+++ b/deps/v8/build/extract_from_cab.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Extracts a single file from a CAB archive."""
+
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+def run_quiet(*args):
+ """Run 'expand' suppressing noisy output. Returns returncode from process."""
+ popen = subprocess.Popen(args, stdout=subprocess.PIPE)
+ out, _ = popen.communicate()
+ if popen.returncode:
+ # expand emits errors to stdout, so if we fail, then print that out.
+ print out
+ return popen.returncode
+
+def main():
+ if len(sys.argv) != 4:
+ print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
+ return 1
+
+ [cab_path, archived_file, output_dir] = sys.argv[1:]
+
+ # Expand.exe does its work in a fixed-named temporary directory created within
+ # the given output directory. This is a problem for concurrent extractions, so
+ # create a unique temp dir within the desired output directory to work around
+ # this limitation.
+ temp_dir = tempfile.mkdtemp(dir=output_dir)
+
+ try:
+ # Invoke the Windows expand utility to extract the file.
+ level = run_quiet('expand', cab_path, '-F:' + archived_file, temp_dir)
+ if level == 0:
+ # Move the output file into place, preserving expand.exe's behavior of
+ # paving over any preexisting file.
+ output_file = os.path.join(output_dir, archived_file)
+ try:
+ os.remove(output_file)
+ except OSError:
+ pass
+ os.rename(os.path.join(temp_dir, archived_file), output_file)
+ finally:
+ shutil.rmtree(temp_dir, True)
+
+ if level != 0:
+ return level
+
+ # The expand utility preserves the modification date and time of the archived
+ # file. Touch the extracted file. This helps build systems that compare the
+ # modification times of input and output files to determine whether to do an
+ # action.
+ os.utime(os.path.join(output_dir, archived_file), None)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/find_depot_tools.py b/deps/v8/build/find_depot_tools.py
new file mode 100755
index 0000000000..5c496e7c79
--- /dev/null
+++ b/deps/v8/build/find_depot_tools.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Small utility function to find depot_tools and add it to the python path.
+
+Will throw an ImportError exception if depot_tools can't be found since it
+imports breakpad.
+
+This can also be used as a standalone script to print out the depot_tools
+directory location.
+"""
+
+import os
+import sys
+
+
+# Path to //src
+SRC = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
+
+
+def IsRealDepotTools(path):
+ expanded_path = os.path.expanduser(path)
+ return os.path.isfile(os.path.join(expanded_path, 'gclient.py'))
+
+
+def add_depot_tools_to_path():
+ """Search for depot_tools and add it to sys.path."""
+ # First, check if we have a DEPS'd in "depot_tools".
+ deps_depot_tools = os.path.join(SRC, 'third_party', 'depot_tools')
+ if IsRealDepotTools(deps_depot_tools):
+ # Put the pinned version at the start of the sys.path, in case there
+ # are other non-pinned versions already on the sys.path.
+ sys.path.insert(0, deps_depot_tools)
+ return deps_depot_tools
+
+ # Then look if depot_tools is already in PYTHONPATH.
+ for i in sys.path:
+ if i.rstrip(os.sep).endswith('depot_tools') and IsRealDepotTools(i):
+ return i
+ # Then look if depot_tools is in PATH, common case.
+ for i in os.environ['PATH'].split(os.pathsep):
+ if IsRealDepotTools(i):
+ sys.path.append(i.rstrip(os.sep))
+ return i
+ # Rare case, it's not even in PATH, look upward up to root.
+ root_dir = os.path.dirname(os.path.abspath(__file__))
+ previous_dir = os.path.abspath(__file__)
+ while root_dir and root_dir != previous_dir:
+ i = os.path.join(root_dir, 'depot_tools')
+ if IsRealDepotTools(i):
+ sys.path.append(i)
+ return i
+ previous_dir = root_dir
+ root_dir = os.path.dirname(root_dir)
+ print >> sys.stderr, 'Failed to find depot_tools'
+ return None
+
+DEPOT_TOOLS_PATH = add_depot_tools_to_path()
+
+# pylint: disable=W0611
+import breakpad
+
+
+def main():
+ if DEPOT_TOOLS_PATH is None:
+ return 1
+ print DEPOT_TOOLS_PATH
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/find_isolated_tests.py b/deps/v8/build/find_isolated_tests.py
new file mode 100755
index 0000000000..c5b3ab77a9
--- /dev/null
+++ b/deps/v8/build/find_isolated_tests.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Scans build output directory for .isolated files, calculates their SHA1
+hashes, stores final list in JSON document and then removes *.isolated files
+found (to ensure no stale *.isolated stay around on the next build).
+
+Used to figure out what tests were build in isolated mode to trigger these
+tests to run on swarming.
+
+For more info see:
+https://sites.google.com/a/chromium.org/dev/developers/testing/isolated-testing
+"""
+
+import glob
+import hashlib
+import json
+import optparse
+import os
+import re
+import sys
+
+
+def hash_file(filepath):
+ """Calculates the hash of a file without reading it all in memory at once."""
+ digest = hashlib.sha1()
+ with open(filepath, 'rb') as f:
+ while True:
+ chunk = f.read(1024*1024)
+ if not chunk:
+ break
+ digest.update(chunk)
+ return digest.hexdigest()
+
+
+def main():
+ parser = optparse.OptionParser(
+ usage='%prog --build-dir <path> --output-json <path>',
+ description=sys.modules[__name__].__doc__)
+ parser.add_option(
+ '--build-dir',
+ help='Path to a directory to search for *.isolated files.')
+ parser.add_option(
+ '--output-json',
+ help='File to dump JSON results into.')
+
+ options, _ = parser.parse_args()
+ if not options.build_dir:
+ parser.error('--build-dir option is required')
+ if not options.output_json:
+ parser.error('--output-json option is required')
+
+ result = {}
+
+ # Get the file hash values and output the pair.
+ pattern = os.path.join(options.build_dir, '*.isolated')
+ for filepath in sorted(glob.glob(pattern)):
+ test_name = os.path.splitext(os.path.basename(filepath))[0]
+ if re.match(r'^.+?\.\d$', test_name):
+ # It's a split .isolated file, e.g. foo.0.isolated. Ignore these.
+ continue
+
+ # TODO(csharp): Remove deletion once the isolate tracked dependencies are
+ # inputs for the isolated files.
+ sha1_hash = hash_file(filepath)
+ os.remove(filepath)
+ result[test_name] = sha1_hash
+
+ with open(options.output_json, 'wb') as f:
+ json.dump(result, f)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/fix_gn_headers.py b/deps/v8/build/fix_gn_headers.py
new file mode 100755
index 0000000000..01ff764e06
--- /dev/null
+++ b/deps/v8/build/fix_gn_headers.py
@@ -0,0 +1,218 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Fix header files missing in GN.
+
+This script takes the missing header files from check_gn_headers.py, and
+try to fix them by adding them to the GN files.
+Manual cleaning up is likely required afterwards.
+"""
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+
+
+def GitGrep(pattern):
+ p = subprocess.Popen(
+ ['git', 'grep', '-En', pattern, '--', '*.gn', '*.gni'],
+ stdout=subprocess.PIPE)
+ out, _ = p.communicate()
+ return out, p.returncode
+
+
+def ValidMatches(basename, cc, grep_lines):
+ """Filter out 'git grep' matches with header files already."""
+ matches = []
+ for line in grep_lines:
+ gnfile, linenr, contents = line.split(':')
+ linenr = int(linenr)
+ new = re.sub(cc, basename, contents)
+ lines = open(gnfile).read().splitlines()
+ assert contents in lines[linenr - 1]
+ # Skip if it's already there. It could be before or after the match.
+ if lines[linenr] == new:
+ continue
+ if lines[linenr - 2] == new:
+ continue
+ print ' ', gnfile, linenr, new
+ matches.append((gnfile, linenr, new))
+ return matches
+
+
+def AddHeadersNextToCC(headers, skip_ambiguous=True):
+ """Add header files next to the corresponding .cc files in GN files.
+
+ When skip_ambiguous is True, skip if multiple .cc files are found.
+ Returns unhandled headers.
+
+ Manual cleaning up is likely required, especially if not skip_ambiguous.
+ """
+ edits = {}
+ unhandled = []
+ for filename in headers:
+ filename = filename.strip()
+ if not (filename.endswith('.h') or filename.endswith('.hh')):
+ continue
+ basename = os.path.basename(filename)
+ print filename
+ cc = r'\b' + os.path.splitext(basename)[0] + r'\.(cc|cpp|mm)\b'
+ out, returncode = GitGrep('(/|")' + cc + '"')
+ if returncode != 0 or not out:
+ unhandled.append(filename)
+ continue
+
+ matches = ValidMatches(basename, cc, out.splitlines())
+
+ if len(matches) == 0:
+ continue
+ if len(matches) > 1:
+ print '\n[WARNING] Ambiguous matching for', filename
+ for i in enumerate(matches, 1):
+ print '%d: %s' % (i[0], i[1])
+ print
+ if skip_ambiguous:
+ continue
+
+ picked = raw_input('Pick the matches ("2,3" for multiple): ')
+ try:
+ matches = [matches[int(i) - 1] for i in picked.split(',')]
+ except (ValueError, IndexError):
+ continue
+
+ for match in matches:
+ gnfile, linenr, new = match
+ print ' ', gnfile, linenr, new
+ edits.setdefault(gnfile, {})[linenr] = new
+
+ for gnfile in edits:
+ lines = open(gnfile).read().splitlines()
+ for l in sorted(edits[gnfile].keys(), reverse=True):
+ lines.insert(l, edits[gnfile][l])
+ open(gnfile, 'w').write('\n'.join(lines) + '\n')
+
+ return unhandled
+
+
+def AddHeadersToSources(headers, skip_ambiguous=True):
+ """Add header files to the sources list in the first GN file.
+
+ The target GN file is the first one up the parent directories.
+ This usually does the wrong thing for _test files if the test and the main
+ target are in the same .gn file.
+ When skip_ambiguous is True, skip if multiple sources arrays are found.
+
+ "git cl format" afterwards is required. Manually cleaning up duplicated items
+ is likely required.
+ """
+ for filename in headers:
+ filename = filename.strip()
+ print filename
+ dirname = os.path.dirname(filename)
+ while not os.path.exists(os.path.join(dirname, 'BUILD.gn')):
+ dirname = os.path.dirname(dirname)
+ rel = filename[len(dirname) + 1:]
+ gnfile = os.path.join(dirname, 'BUILD.gn')
+
+ lines = open(gnfile).read().splitlines()
+ matched = [i for i, l in enumerate(lines) if ' sources = [' in l]
+ if skip_ambiguous and len(matched) > 1:
+ print '[WARNING] Multiple sources in', gnfile
+ continue
+
+ if len(matched) < 1:
+ continue
+ print ' ', gnfile, rel
+ index = matched[0]
+ lines.insert(index + 1, '"%s",' % rel)
+ open(gnfile, 'w').write('\n'.join(lines) + '\n')
+
+
+def RemoveHeader(headers, skip_ambiguous=True):
+ """Remove non-existing headers in GN files.
+
+ When skip_ambiguous is True, skip if multiple matches are found.
+ """
+ edits = {}
+ unhandled = []
+ for filename in headers:
+ filename = filename.strip()
+ if not (filename.endswith('.h') or filename.endswith('.hh')):
+ continue
+ basename = os.path.basename(filename)
+ print filename
+ out, returncode = GitGrep('(/|")' + basename + '"')
+ if returncode != 0 or not out:
+ unhandled.append(filename)
+ print ' Not found'
+ continue
+
+ grep_lines = out.splitlines()
+ matches = []
+ for line in grep_lines:
+ gnfile, linenr, contents = line.split(':')
+ print ' ', gnfile, linenr, contents
+ linenr = int(linenr)
+ lines = open(gnfile).read().splitlines()
+ assert contents in lines[linenr - 1]
+ matches.append((gnfile, linenr, contents))
+
+ if len(matches) == 0:
+ continue
+ if len(matches) > 1:
+ print '\n[WARNING] Ambiguous matching for', filename
+ for i in enumerate(matches, 1):
+ print '%d: %s' % (i[0], i[1])
+ print
+ if skip_ambiguous:
+ continue
+
+ picked = raw_input('Pick the matches ("2,3" for multiple): ')
+ try:
+ matches = [matches[int(i) - 1] for i in picked.split(',')]
+ except (ValueError, IndexError):
+ continue
+
+ for match in matches:
+ gnfile, linenr, contents = match
+ print ' ', gnfile, linenr, contents
+ edits.setdefault(gnfile, set()).add(linenr)
+
+ for gnfile in edits:
+ lines = open(gnfile).read().splitlines()
+ for l in sorted(edits[gnfile], reverse=True):
+ lines.pop(l - 1)
+ open(gnfile, 'w').write('\n'.join(lines) + '\n')
+
+ return unhandled
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('input_file', help="missing or non-existing headers, "
+ "output of check_gn_headers.py")
+ parser.add_argument('--prefix',
+ help="only handle path name with this prefix")
+ parser.add_argument('--remove', action='store_true',
+ help="treat input_file as non-existing headers")
+
+ args, _extras = parser.parse_known_args()
+
+ headers = open(args.input_file).readlines()
+
+ if args.prefix:
+ headers = [i for i in headers if i.startswith(args.prefix)]
+
+ if args.remove:
+ RemoveHeader(headers, False)
+ else:
+ unhandled = AddHeadersNextToCC(headers)
+ AddHeadersToSources(unhandled)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/fuchsia/OWNERS b/deps/v8/build/fuchsia/OWNERS
new file mode 100644
index 0000000000..22e1b69b8f
--- /dev/null
+++ b/deps/v8/build/fuchsia/OWNERS
@@ -0,0 +1,9 @@
+jamesr@chromium.org
+kmarshall@chromium.org
+scottmg@chromium.org
+sergeyu@chromium.org
+thakis@chromium.org
+wez@chromium.org
+
+# TEAM: cr-fuchsia@chromium.org
+# COMPONENT: Internals>PlatformIntegration
diff --git a/deps/v8/build/fuchsia/__init__.py b/deps/v8/build/fuchsia/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/deps/v8/build/fuchsia/__init__.py
diff --git a/deps/v8/build/fuchsia/boot_data.py b/deps/v8/build/fuchsia/boot_data.py
new file mode 100644
index 0000000000..7ff1efcbe6
--- /dev/null
+++ b/deps/v8/build/fuchsia/boot_data.py
@@ -0,0 +1,118 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions used to provision Fuchsia boot images."""
+
+import common
+import logging
+import os
+import subprocess
+import tempfile
+import time
+import uuid
+
+_SSH_CONFIG_TEMPLATE = """
+Host *
+ CheckHostIP no
+ StrictHostKeyChecking no
+ ForwardAgent no
+ ForwardX11 no
+ UserKnownHostsFile {known_hosts}
+ User fuchsia
+ IdentitiesOnly yes
+ IdentityFile {identity}
+ ServerAliveInterval 2
+ ServerAliveCountMax 5
+ ControlMaster auto
+ ControlPersist 1m
+ ControlPath /tmp/ssh-%r@%h:%p
+ ConnectTimeout 5
+ """
+
+FVM_TYPE_QCOW = 'qcow'
+FVM_TYPE_SPARSE = 'sparse'
+
+
+def _TargetCpuToSdkBinPath(target_arch):
+ """Returns the path to the SDK 'target' file directory for |target_cpu|."""
+
+ return os.path.join(common.SDK_ROOT, 'target', target_arch)
+
+
+def _GetPubKeyPath(output_dir):
+ """Returns a path to the generated SSH public key."""
+
+ return os.path.join(output_dir, 'id_ed25519.pub')
+
+
+def ProvisionSSH(output_dir):
+ """Generates a keypair and config file for SSH."""
+
+ host_key_path = os.path.join(output_dir, 'ssh_key')
+ host_pubkey_path = host_key_path + '.pub'
+ id_key_path = os.path.join(output_dir, 'id_ed25519')
+ id_pubkey_path = _GetPubKeyPath(output_dir)
+ known_hosts_path = os.path.join(output_dir, 'known_hosts')
+ ssh_config_path = os.path.join(output_dir, 'ssh_config')
+
+ logging.debug('Generating SSH credentials.')
+ if not os.path.isfile(host_key_path):
+ subprocess.check_call(['ssh-keygen', '-t', 'ed25519', '-h', '-f',
+ host_key_path, '-P', '', '-N', ''],
+ stdout=open(os.devnull))
+ if not os.path.isfile(id_key_path):
+ subprocess.check_call(['ssh-keygen', '-t', 'ed25519', '-f', id_key_path,
+ '-P', '', '-N', ''], stdout=open(os.devnull))
+
+ with open(ssh_config_path, "w") as ssh_config:
+ ssh_config.write(
+ _SSH_CONFIG_TEMPLATE.format(identity=id_key_path,
+ known_hosts=known_hosts_path))
+
+ if os.path.exists(known_hosts_path):
+ os.remove(known_hosts_path)
+
+
+def _MakeQcowDisk(output_dir, disk_path):
+ """Creates a QEMU copy-on-write version of |disk_path| in the output
+ directory."""
+
+ qimg_path = os.path.join(common.GetQemuRootForPlatform(), 'bin', 'qemu-img')
+ output_path = os.path.join(output_dir,
+ os.path.basename(disk_path) + '.qcow2')
+ subprocess.check_call([qimg_path, 'create', '-q', '-f', 'qcow2',
+ '-b', disk_path, output_path])
+ return output_path
+
+
+def GetTargetFile(target_arch, filename):
+ """Computes a path to |filename| in the Fuchsia target directory specific to
+ |target_arch|."""
+
+ return os.path.join(_TargetCpuToSdkBinPath(target_arch), filename)
+
+
+def GetSSHConfigPath(output_dir):
+ return output_dir + '/ssh_config'
+
+
+def GetBootImage(output_dir, target_arch):
+ """"Gets a path to the Zircon boot image, with the SSH client public key
+ added."""
+
+ ProvisionSSH(output_dir)
+ pubkey_path = _GetPubKeyPath(output_dir)
+ zbi_tool = os.path.join(common.SDK_ROOT, 'tools', 'zbi')
+ image_source_path = GetTargetFile(target_arch, 'fuchsia.zbi')
+ image_dest_path = os.path.join(output_dir, 'gen', 'fuchsia-with-keys.zbi')
+
+ cmd = [ zbi_tool, '-o', image_dest_path, image_source_path,
+ '-e', 'data/ssh/authorized_keys=' + pubkey_path ]
+ subprocess.check_call(cmd)
+
+ return image_dest_path
+
+
+def GetKernelArgs(output_dir):
+ return ['devmgr.epoch=%d' % time.time()]
diff --git a/deps/v8/build/fuchsia/common.py b/deps/v8/build/fuchsia/common.py
new file mode 100644
index 0000000000..1993374b30
--- /dev/null
+++ b/deps/v8/build/fuchsia/common.py
@@ -0,0 +1,86 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import platform
+import socket
+import subprocess
+import sys
+
+DIR_SOURCE_ROOT = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+SDK_ROOT = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'fuchsia-sdk', 'sdk')
+
+def EnsurePathExists(path):
+ """Checks that the file |path| exists on the filesystem and returns the path
+ if it does, raising an exception otherwise."""
+
+ if not os.path.exists(path):
+ raise IOError('Missing file: ' + path)
+
+ return path
+
+def GetHostOsFromPlatform():
+ host_platform = sys.platform
+ if host_platform.startswith('linux'):
+ return 'linux'
+ elif host_platform.startswith('darwin'):
+ return 'mac'
+ raise Exception('Unsupported host platform: %s' % host_platform)
+
+def GetHostArchFromPlatform():
+ host_arch = platform.machine()
+ if host_arch == 'x86_64':
+ return 'x64'
+ elif host_arch == 'aarch64':
+ return 'arm64'
+ raise Exception('Unsupported host architecture: %s' % host_arch)
+
+def GetQemuRootForPlatform():
+ return os.path.join(DIR_SOURCE_ROOT, 'third_party',
+ 'qemu-' + GetHostOsFromPlatform() + '-' +
+ GetHostArchFromPlatform())
+
+def ConnectPortForwardingTask(target, local_port, remote_port = 0):
+ """Establishes a port forwarding SSH task to a localhost TCP endpoint hosted
+ at port |local_port|. Blocks until port forwarding is established.
+
+ Returns the remote port number."""
+
+ forwarding_flags = ['-O', 'forward', # Send SSH mux control signal.
+ '-R', '%d:localhost:%d' % (remote_port, local_port),
+ '-v', # Get forwarded port info from stderr.
+ '-NT'] # Don't execute command; don't allocate terminal.
+
+ if remote_port != 0:
+ # Forward to a known remote port.
+ task = target.RunCommand([], ssh_args=forwarding_flags)
+ if task.returncode != 0:
+ raise Exception('Could not establish a port forwarding connection.')
+ return
+
+ task = target.RunCommandPiped([],
+ ssh_args=forwarding_flags,
+ stdout=subprocess.PIPE,
+ stderr=open('/dev/null'))
+ output = task.stdout.readlines()
+ task.wait()
+ if task.returncode != 0:
+ raise Exception('Got an error code when requesting port forwarding: %d' %
+ task.returncode)
+
+ parsed_port = int(output[0].strip())
+ logging.debug('Port forwarding established (local=%d, device=%d)' %
+ (local_port, parsed_port))
+ return parsed_port
+
+
+def GetAvailableTcpPort():
+ """Finds a (probably) open port by opening and closing a listen socket."""
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.bind(("", 0))
+ port = sock.getsockname()[1]
+ sock.close()
+ return port
diff --git a/deps/v8/build/fuchsia/common_args.py b/deps/v8/build/fuchsia/common_args.py
new file mode 100644
index 0000000000..8fda07e3df
--- /dev/null
+++ b/deps/v8/build/fuchsia/common_args.py
@@ -0,0 +1,123 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import sys
+
+from device_target import DeviceTarget
+from qemu_target import QemuTarget
+
+
+def AddCommonArgs(arg_parser):
+ """Adds command line arguments to |arg_parser| for options which are shared
+ across test and executable target types."""
+
+ common_args = arg_parser.add_argument_group('common', 'Common arguments')
+ common_args.add_argument('--package',
+ type=os.path.realpath, required=True,
+ help='Path to the package to execute.')
+ common_args.add_argument('--package-name', required=True,
+ help='Name of the package to execute, defined in ' +
+ 'package metadata.')
+ common_args.add_argument('--package-dep', action='append', default=[],
+ help='Path to an additional package to install.')
+ common_args.add_argument('--install-only', action='store_true', default=False,
+ help='Install the packages but do not run them.')
+ common_args.add_argument('--output-directory',
+ type=os.path.realpath, required=True,
+ help=('Path to the directory in which build files '
+ 'are located (must include build type).'))
+ common_args.add_argument('--target-cpu', required=True,
+ help='GN target_cpu setting for the build.')
+ common_args.add_argument('--target-staging-path',
+ help='target path under which to stage packages '
+ 'during deployment.', default='/data')
+ common_args.add_argument('--device', '-d', action='store_true', default=False,
+ help='Run on hardware device instead of QEMU.')
+ common_args.add_argument('--host', help='The IP of the target device. ' +
+ 'Optional.')
+ common_args.add_argument('--node-name',
+ help='The node-name of the device to boot or deploy '
+ 'to. Optional, will use the first discovered '
+ 'device if omitted.')
+ common_args.add_argument('--port', '-p', type=int, default=22,
+ help='The port of the SSH service running on the ' +
+ 'device. Optional.')
+ common_args.add_argument('--ssh-config', '-F',
+ help='The path to the SSH configuration used for '
+ 'connecting to the target device.')
+ common_args.add_argument('--fuchsia-out-dir',
+ help='Path to a Fuchsia build output directory. '
+ 'Equivalent to setting --ssh_config and '
+ '---os-check=ignore')
+ common_args.add_argument('--system-log-file',
+ help='File to write system logs to. Specify - to '
+ 'log to stdout.')
+ common_args.add_argument('--exclude-system-logs',
+ action='store_false',
+ dest='include_system_logs',
+ help='Do not show system log data.')
+ common_args.add_argument('--verbose', '-v', default=False,
+ action='store_true',
+ help='Enable debug-level logging.')
+ common_args.add_argument('--qemu-cpu-cores', type=int, default=4,
+ help='Sets the number of CPU cores to provide if '
+ 'launching in a VM with QEMU.'),
+ common_args.add_argument(
+ '--os_check', choices=['check', 'update', 'ignore'],
+ default='update',
+ help='Sets the OS version enforcement policy. If \'check\', then the '
+ 'deployment process will halt if the target\'s version doesn\'t '
+ 'match. If \'update\', then the target device will automatically '
+ 'be repaved. If \'ignore\', then the OS version won\'t be checked.')
+
+
+def ConfigureLogging(args):
+ """Configures the logging level based on command line |args|."""
+
+ logging.basicConfig(level=(logging.DEBUG if args.verbose else logging.INFO),
+ format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
+
+ # The test server spawner is too noisy with INFO level logging, so tweak
+ # its verbosity a bit by adjusting its logging level.
+ logging.getLogger('chrome_test_server_spawner').setLevel(
+ logging.DEBUG if args.verbose else logging.WARN)
+
+ # Verbose SCP output can be useful at times but oftentimes is just too noisy.
+ # Only enable it if -vv is passed.
+ logging.getLogger('ssh').setLevel(
+ logging.DEBUG if args.verbose else logging.WARN)
+
+
+def GetDeploymentTargetForArgs(args):
+ """Constructs a deployment target object using parameters taken from
+ command line arguments."""
+
+ if args.system_log_file == '-':
+ system_log_file = sys.stdout
+ elif args.system_log_file:
+ system_log_file = open(args.system_log_file, 'w')
+ else:
+ system_log_file = None
+
+ if not args.device:
+ # KVM is required on x64 test bots.
+ require_kvm = args.test_launcher_bot_mode and args.target_cpu == "x64"
+
+ return QemuTarget(output_dir=args.output_directory,
+ target_cpu=args.target_cpu,
+ cpu_cores=args.qemu_cpu_cores,
+ system_log_file=system_log_file,
+ require_kvm=require_kvm)
+ else:
+ return DeviceTarget(output_dir=args.output_directory,
+ target_cpu=args.target_cpu,
+ host=args.host,
+ node_name=args.node_name,
+ port=args.port,
+ ssh_config=args.ssh_config,
+ fuchsia_out_dir=args.fuchsia_out_dir,
+ system_log_file=system_log_file,
+ os_check=args.os_check)
diff --git a/deps/v8/build/fuchsia/device_target.py b/deps/v8/build/fuchsia/device_target.py
new file mode 100644
index 0000000000..c35fc79b3e
--- /dev/null
+++ b/deps/v8/build/fuchsia/device_target.py
@@ -0,0 +1,282 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Implements commands for running and interacting with Fuchsia on devices."""
+
+import boot_data
+import filecmp
+import logging
+import os
+import re
+import subprocess
+import sys
+import target
+import tempfile
+import time
+import uuid
+
+from common import SDK_ROOT, EnsurePathExists
+
+# The maximum times to attempt mDNS resolution when connecting to a freshly
+# booted Fuchsia instance before aborting.
+_BOOT_DISCOVERY_ATTEMPTS = 30
+
+# Number of seconds to wait when querying a list of all devices over mDNS.
+_LIST_DEVICES_TIMEOUT_SECS = 3
+
+#Number of failed connection attempts before redirecting system logs to stdout.
+CONNECT_RETRY_COUNT_BEFORE_LOGGING = 10
+
+TARGET_HASH_FILE_PATH = '/data/.hash'
+
+class DeviceTarget(target.Target):
+ """Prepares a device to be used as a deployment target. Depending on the
+ command line parameters, it automatically handling a number of preparatory
+ steps relating to address resolution, device provisioning, and SDK
+ versioning.
+
+ If |_node_name| is unset:
+ If there is one running device, use it for deployment and execution. The
+ device's SDK version is checked unless --os-check=ignore is set.
+ If --os-check=update is set, then the target device is repaved if the SDK
+ version doesn't match.
+
+ If there are more than one running devices, then abort and instruct the
+ user to re-run the command with |_node_name|
+
+ Otherwise, if there are no running devices, then search for a device
+ running Zedboot, and pave it.
+
+
+ If |_node_name| is set:
+ If there is a running device with a matching nodename, then it is used
+ for deployment and execution.
+
+ Otherwise, attempt to pave a device with a matching nodename, and use it
+ for deployment and execution.
+
+ If |_host| is set:
+ Deploy to a device at the host IP address as-is."""
+
+ def __init__(self, output_dir, target_cpu, host=None, node_name=None,
+ port=None, ssh_config=None, fuchsia_out_dir=None,
+ os_check='update', system_log_file=None):
+ """output_dir: The directory which will contain the files that are
+ generated to support the deployment.
+ target_cpu: The CPU architecture of the deployment target. Can be
+ "x64" or "arm64".
+ host: The address of the deployment target device.
+ node_name: The node name of the deployment target device.
+ port: The port of the SSH service on the deployment target device.
+ ssh_config: The path to SSH configuration data.
+ fuchsia_out_dir: The path to a Fuchsia build output directory, for
+ deployments to devices paved with local Fuchsia builds.
+ os_check: If 'check', the target's SDK version must match.
+ If 'update', the target will be repaved if the SDK versions
+ mismatch.
+ If 'ignore', the target's SDK version is ignored."""
+
+ super(DeviceTarget, self).__init__(output_dir, target_cpu)
+
+ self._port = port if port else 22
+ self._system_log_file = system_log_file
+ self._loglistener = None
+ self._host = host
+ self._fuchsia_out_dir = fuchsia_out_dir
+ self._node_name = node_name
+ self._os_check = os_check,
+
+ if self._host and self._node_name:
+ raise Exception('Only one of "--host" or "--name" can be specified.')
+
+ if fuchsia_out_dir:
+ if ssh_config:
+ raise Exception('Only one of "--fuchsia-out-dir" or "--ssh_config" can '
+ 'be specified.')
+
+ # Use SSH keys from the Fuchsia output directory.
+ self._ssh_config_path = os.path.join(os.path.expanduser(fuchsia_out_dir),
+ 'ssh-keys', 'ssh_config')
+ self._os_check = 'ignore'
+
+ elif ssh_config:
+ # Use the SSH config provided via the commandline.
+ self._ssh_config_path = os.path.expanduser(ssh_config)
+
+ else:
+ # Default to using an automatically generated SSH config and keys.
+ boot_data.ProvisionSSH(output_dir)
+ self._ssh_config_path = boot_data.GetSSHConfigPath(output_dir)
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ if self._loglistener:
+ self._loglistener.kill()
+
+ def _SDKHashMatches(self):
+ """Checks if /data/.hash on the device matches SDK_ROOT/.hash.
+
+ Returns True if the files are identical, or False otherwise.
+ """
+ with tempfile.NamedTemporaryFile() as tmp:
+ try:
+ self.GetFile(TARGET_HASH_FILE_PATH, tmp.name)
+ except subprocess.CalledProcessError:
+ # If the file is unretrievable for whatever reason, assume mismatch.
+ return False
+
+ return filecmp.cmp(tmp.name, os.path.join(SDK_ROOT, '.hash'), False)
+
+ def __Discover(self):
+ """Queries mDNS for the IP address of a booted Fuchsia instance whose name
+ matches |_node_name| on the local area network. If |_node_name| isn't
+ specified, and there is only one device on the network, then returns the
+ IP address of that advice.
+
+ Sets |_host_name| and returns True if the device was found,
+ or waits up to |timeout| seconds and returns False if the device couldn't
+ be found."""
+
+ dev_finder_path = os.path.join(SDK_ROOT, 'tools', 'dev_finder')
+
+ if self._node_name:
+ command = [dev_finder_path, 'resolve',
+ '-device-limit', '1', # Exit early as soon as a host is found.
+ self._node_name]
+ else:
+ command = [dev_finder_path, 'list', '-full',
+ '-timeout', str(_LIST_DEVICES_TIMEOUT_SECS * 1000)]
+
+ proc = subprocess.Popen(command,
+ stdout=subprocess.PIPE,
+ stderr=open(os.devnull, 'w'))
+
+ output = proc.communicate()[0].strip().split('\n')
+
+ if proc.returncode != 0:
+ return False
+
+ if self._node_name:
+ # Handle the result of "dev_finder resolve".
+ self._host = output[0].strip()
+
+ else:
+ name_host_pairs = [x.strip().split(' ') for x in output]
+
+ # Handle the output of "dev_finder list".
+ if len(name_host_pairs) > 1:
+ print 'More than one device was discovered on the network.'
+ print 'Use --node-name <name> to specify the device to use.'
+ print '\nList of devices:'
+ for pair in name_host_pairs:
+ print ' ' + pair[1]
+ print
+ raise Exception('Ambiguous target device specification.')
+
+ assert len(name_host_pairs) == 1
+ self._host, self._node_name = name_host_pairs[0]
+
+ logging.info('Found device "%s" at address %s.' % (self._node_name,
+ self._host))
+
+ return True
+
+ def Start(self):
+ if self._host:
+ self._WaitUntilReady()
+
+ else:
+ should_provision = False
+
+ if self.__Discover():
+ self._WaitUntilReady()
+
+ if self._os_check != 'ignore':
+ if self._SDKHashMatches():
+ if self._os_check == 'update':
+ logging.info( 'SDK hash does not match; rebooting and repaving.')
+ self.RunCommand(['dm', 'reboot'])
+ should_provision = True
+ elif self._os_check == 'check':
+ raise Exception('Target device SDK version does not match.')
+
+ else:
+ should_provision = True
+
+ if should_provision:
+ self.__ProvisionDevice()
+
+ assert self._node_name
+ assert self._host
+
+
+ def __ProvisionDevice(self):
+ """Netboots a device with Fuchsia. If |_node_name| is set, then only a
+ device with a matching node name is used.
+
+ The device is up and reachable via SSH when the function is successfully
+ completes."""
+
+ bootserver_path = os.path.join(SDK_ROOT, 'tools', 'bootserver')
+ bootserver_command = [
+ bootserver_path,
+ '-1',
+ '--fvm',
+ EnsurePathExists(boot_data.GetTargetFile(self._GetTargetSdkArch(),
+ 'fvm.sparse.blk')),
+ EnsurePathExists(boot_data.GetBootImage(self._output_dir,
+ self._GetTargetSdkArch()))]
+
+ if self._GetTargetSdkArch() == 'x64':
+ bootserver_command += [
+ '--efi',
+ EnsurePathExists(boot_data.GetTargetFile(self._GetTargetSdkArch(),
+ 'local.esp.blk'))]
+
+ if self._node_name:
+ bootserver_command += ['-n', self._node_name]
+
+ bootserver_command += ['--']
+ bootserver_command += boot_data.GetKernelArgs(self._output_dir)
+
+ logging.debug(' '.join(bootserver_command))
+ stdout = subprocess.check_output(bootserver_command,
+ stderr=subprocess.STDOUT)
+
+ # Parse the nodename from bootserver stdout.
+ m = re.search(r'.*Proceeding with nodename (?P<nodename>.*)$', stdout,
+ re.MULTILINE)
+ if not m:
+ raise Exception('Couldn\'t parse nodename from bootserver output.')
+ self._node_name = m.groupdict()['nodename']
+ logging.info('Booted device "%s".' % self._node_name)
+
+ # Start loglistener to save system logs.
+ if self._system_log_file:
+ loglistener_path = os.path.join(SDK_ROOT, 'tools', 'loglistener')
+ self._loglistener = subprocess.Popen(
+ [loglistener_path, self._node_name],
+ stdout=self._system_log_file,
+ stderr=subprocess.STDOUT, stdin=open(os.devnull))
+
+ # Repeatdly query mDNS until we find the device, or we hit the timeout of
+ # DISCOVERY_TIMEOUT_SECS.
+ logging.info('Waiting for device to join network.')
+ for _ in xrange(_BOOT_DISCOVERY_ATTEMPTS):
+ if self.__Discover():
+ break
+
+ if not self._host:
+ raise Exception('Device %s couldn\'t be discovered via mDNS.' %
+ self._node_name)
+
+ self._WaitUntilReady();
+
+ # Update the target's hash to match the current tree's.
+ self.PutFile(os.path.join(SDK_ROOT, '.hash'), TARGET_HASH_FILE_PATH)
+
+ def _GetEndpoint(self):
+ return (self._host, self._port)
+
+ def _GetSshConfigPath(self):
+ return self._ssh_config_path
diff --git a/deps/v8/build/fuchsia/exe_runner.py b/deps/v8/build/fuchsia/exe_runner.py
new file mode 100755
index 0000000000..feb96d0779
--- /dev/null
+++ b/deps/v8/build/fuchsia/exe_runner.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Deploys and executes a packaged Fuchsia executable on a target."""
+
+import argparse
+import logging
+import sys
+
+from common_args import AddCommonArgs, ConfigureLogging, \
+ GetDeploymentTargetForArgs
+from run_package import RunPackage, RunPackageArgs
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ AddCommonArgs(parser)
+ parser.add_argument('child_args', nargs='*',
+ help='Arguments for the test process.')
+ args = parser.parse_args()
+ ConfigureLogging(args)
+
+ with GetDeploymentTargetForArgs(args) as target:
+ target.Start()
+
+ run_package_args = RunPackageArgs.FromCommonArgs(args)
+ return RunPackage(
+ args.output_directory, target, args.package, args.package_name,
+ args.package_dep, args.child_args, run_package_args)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/fuchsia/fidlgen_js/BUILD.gn b/deps/v8/build/fuchsia/fidlgen_js/BUILD.gn
new file mode 100644
index 0000000000..4b2bb6400c
--- /dev/null
+++ b/deps/v8/build/fuchsia/fidlgen_js/BUILD.gn
@@ -0,0 +1,63 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/fuchsia/fidl_library.gni")
+import("//testing/test.gni")
+
+test("fidlgen_js_unittests") {
+ testonly = true
+
+ sources = [
+ "test/fidlgen_js_unittest.cc",
+ ]
+
+ deps = [
+ ":fidljstest",
+ ":runtime",
+ "//base/test:test_support",
+ "//gin:gin_test",
+ "//testing/gtest",
+ "//v8",
+ ]
+
+ configs += [
+ "//tools/v8_context_snapshot:use_v8_context_snapshot",
+ "//v8:external_startup_data",
+ ]
+
+ data_deps = [
+ "//tools/v8_context_snapshot:v8_context_snapshot",
+ ]
+
+ data = [
+ "runtime/fidl.mjs",
+ ]
+}
+
+static_library("runtime") {
+ sources = [
+ "runtime/zircon.cc",
+ "runtime/zircon.h",
+ ]
+
+ deps = [
+ "//base",
+ "//gin",
+ "//third_party/fuchsia-sdk/sdk:async",
+ "//third_party/fuchsia-sdk/sdk:async_default",
+ "//v8",
+ ]
+}
+
+fidl_library("fidljstest") {
+ testonly = true
+ sources = [
+ "test/simple.fidl",
+ ]
+
+ languages = [
+ "cpp",
+ "js",
+ ]
+}
diff --git a/deps/v8/build/fuchsia/fidlgen_js/DEPS b/deps/v8/build/fuchsia/fidlgen_js/DEPS
new file mode 100644
index 0000000000..681254d0f3
--- /dev/null
+++ b/deps/v8/build/fuchsia/fidlgen_js/DEPS
@@ -0,0 +1,4 @@
+include_rules = [
+ "+gin",
+ "+v8/include",
+]
diff --git a/deps/v8/build/fuchsia/fidlgen_js/fidl.py b/deps/v8/build/fuchsia/fidlgen_js/fidl.py
new file mode 100644
index 0000000000..6f8b99f441
--- /dev/null
+++ b/deps/v8/build/fuchsia/fidlgen_js/fidl.py
@@ -0,0 +1,549 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This was generated (and can be regenerated) by pasting
+# zircon/system/host/fidl/schema.json from Fuchsia into
+# https://app.quicktype.io and choosing Python 2.7 output. The only manual
+# change is to modify the import path for Enum.
+
+from third_party.enum34 import Enum
+
+
+def from_str(x):
+ assert isinstance(x, (str, unicode))
+ return x
+
+
+def from_int(x):
+ assert isinstance(x, int) and not isinstance(x, bool)
+ return x
+
+
+def from_none(x):
+ assert x is None
+ return x
+
+
+def from_union(fs, x):
+ for f in fs:
+ try:
+ return f(x)
+ except:
+ pass
+ assert False
+
+
+def from_bool(x):
+ assert isinstance(x, bool)
+ return x
+
+
+def to_class(c, x):
+ assert isinstance(x, c)
+ return x.to_dict()
+
+
+def to_enum(c, x):
+ assert isinstance(x, c)
+ return x.value
+
+
+def from_list(f, x):
+ assert isinstance(x, list)
+ return [f(y) for y in x]
+
+
+def from_dict(f, x):
+ assert isinstance(x, dict)
+ return { k: f(v) for (k, v) in x.items() }
+
+
+class Attribute:
+ def __init__(self, name, value):
+ self.name = name
+ self.value = value
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ name = from_str(obj.get(u"name"))
+ value = from_str(obj.get(u"value"))
+ return Attribute(name, value)
+
+ def to_dict(self):
+ result = {}
+ result[u"name"] = from_str(self.name)
+ result[u"value"] = from_str(self.value)
+ return result
+
+
+class TypeKind(Enum):
+ ARRAY = u"array"
+ HANDLE = u"handle"
+ IDENTIFIER = u"identifier"
+ PRIMITIVE = u"primitive"
+ REQUEST = u"request"
+ STRING = u"string"
+ VECTOR = u"vector"
+
+
+class TypeClass:
+ def __init__(self, element_count, element_type, kind, maybe_element_count, nullable, subtype, identifier):
+ self.element_count = element_count
+ self.element_type = element_type
+ self.kind = kind
+ self.maybe_element_count = maybe_element_count
+ self.nullable = nullable
+ self.subtype = subtype
+ self.identifier = identifier
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ element_count = from_union([from_int, from_none], obj.get(u"element_count"))
+ element_type = from_union([TypeClass.from_dict, from_none], obj.get(u"element_type"))
+ kind = TypeKind(obj.get(u"kind"))
+ maybe_element_count = from_union([from_int, from_none], obj.get(u"maybe_element_count"))
+ nullable = from_union([from_bool, from_none], obj.get(u"nullable"))
+ subtype = from_union([from_str, from_none], obj.get(u"subtype"))
+ identifier = from_union([from_str, from_none], obj.get(u"identifier"))
+ return TypeClass(element_count, element_type, kind, maybe_element_count, nullable, subtype, identifier)
+
+ def to_dict(self):
+ result = {}
+ result[u"element_count"] = from_union([from_int, from_none], self.element_count)
+ result[u"element_type"] = from_union([lambda x: to_class(TypeClass, x), from_none], self.element_type)
+ result[u"kind"] = to_enum(TypeKind, self.kind)
+ result[u"maybe_element_count"] = from_union([from_int, from_none], self.maybe_element_count)
+ result[u"nullable"] = from_union([from_bool, from_none], self.nullable)
+ result[u"subtype"] = from_union([from_str, from_none], self.subtype)
+ result[u"identifier"] = from_union([from_str, from_none], self.identifier)
+ return result
+
+
+class ConstantKind(Enum):
+ IDENTIFIER = u"identifier"
+ LITERAL = u"literal"
+
+
+class LiteralKind(Enum):
+ DEFAULT = u"default"
+ FALSE = u"false"
+ NUMERIC = u"numeric"
+ STRING = u"string"
+ TRUE = u"true"
+
+
+class Literal:
+ def __init__(self, kind, value):
+ self.kind = kind
+ self.value = value
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ kind = LiteralKind(obj.get(u"kind"))
+ value = from_union([from_str, from_none], obj.get(u"value"))
+ return Literal(kind, value)
+
+ def to_dict(self):
+ result = {}
+ result[u"kind"] = to_enum(LiteralKind, self.kind)
+ result[u"value"] = from_union([from_str, from_none], self.value)
+ return result
+
+
+class Constant:
+ def __init__(self, identifier, kind, literal):
+ self.identifier = identifier
+ self.kind = kind
+ self.literal = literal
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ identifier = from_union([from_str, from_none], obj.get(u"identifier"))
+ kind = ConstantKind(obj.get(u"kind"))
+ literal = from_union([Literal.from_dict, from_none], obj.get(u"literal"))
+ return Constant(identifier, kind, literal)
+
+ def to_dict(self):
+ result = {}
+ result[u"identifier"] = from_union([from_str, from_none], self.identifier)
+ result[u"kind"] = to_enum(ConstantKind, self.kind)
+ result[u"literal"] = from_union([lambda x: to_class(Literal, x), from_none], self.literal)
+ return result
+
+
+class Const:
+ def __init__(self, maybe_attributes, name, type, value):
+ self.maybe_attributes = maybe_attributes
+ self.name = name
+ self.type = type
+ self.value = value
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ maybe_attributes = from_union([lambda x: from_list(Attribute.from_dict, x), from_none], obj.get(u"maybe_attributes"))
+ name = from_str(obj.get(u"name"))
+ type = TypeClass.from_dict(obj.get(u"type"))
+ value = Constant.from_dict(obj.get(u"value"))
+ return Const(maybe_attributes, name, type, value)
+
+ def to_dict(self):
+ result = {}
+ result[u"maybe_attributes"] = from_union([lambda x: from_list(lambda x: to_class(Attribute, x), x), from_none], self.maybe_attributes)
+ result[u"name"] = from_str(self.name)
+ result[u"type"] = to_class(TypeClass, self.type)
+ result[u"value"] = to_class(Constant, self.value)
+ return result
+
+
+class DeclarationsMap(Enum):
+ CONST = u"const"
+ ENUM = u"enum"
+ INTERFACE = u"interface"
+ STRUCT = u"struct"
+ UNION = u"union"
+
+
+class EnumMember:
+ def __init__(self, name, value):
+ self.name = name
+ self.value = value
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ name = from_str(obj.get(u"name"))
+ value = Constant.from_dict(obj.get(u"value"))
+ return EnumMember(name, value)
+
+ def to_dict(self):
+ result = {}
+ result[u"name"] = from_str(self.name)
+ result[u"value"] = to_class(Constant, self.value)
+ return result
+
+
+class IntegerType(Enum):
+ INT16 = u"int16"
+ INT32 = u"int32"
+ INT64 = u"int64"
+ INT8 = u"int8"
+ UINT16 = u"uint16"
+ UINT32 = u"uint32"
+ UINT64 = u"uint64"
+ UINT8 = u"uint8"
+
+
+class EnumDeclarationElement:
+ def __init__(self, maybe_attributes, members, name, type):
+ self.maybe_attributes = maybe_attributes
+ self.members = members
+ self.name = name
+ self.type = type
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ maybe_attributes = from_union([lambda x: from_list(Attribute.from_dict, x), from_none], obj.get(u"maybe_attributes"))
+ members = from_list(EnumMember.from_dict, obj.get(u"members"))
+ name = from_str(obj.get(u"name"))
+ type = IntegerType(obj.get(u"type"))
+ return EnumDeclarationElement(maybe_attributes, members, name, type)
+
+ def to_dict(self):
+ result = {}
+ result[u"maybe_attributes"] = from_union([lambda x: from_list(lambda x: to_class(Attribute, x), x), from_none], self.maybe_attributes)
+ result[u"members"] = from_list(lambda x: to_class(EnumMember, x), self.members)
+ result[u"name"] = from_str(self.name)
+ result[u"type"] = to_enum(IntegerType, self.type)
+ return result
+
+
+class InterfaceMethodParameter:
+ def __init__(self, alignment, name, offset, size, type):
+ self.alignment = alignment
+ self.name = name
+ self.offset = offset
+ self.size = size
+ self.type = type
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ alignment = from_int(obj.get(u"alignment"))
+ name = from_str(obj.get(u"name"))
+ offset = from_int(obj.get(u"offset"))
+ size = from_int(obj.get(u"size"))
+ type = TypeClass.from_dict(obj.get(u"type"))
+ return InterfaceMethodParameter(alignment, name, offset, size, type)
+
+ def to_dict(self):
+ result = {}
+ result[u"alignment"] = from_int(self.alignment)
+ result[u"name"] = from_str(self.name)
+ result[u"offset"] = from_int(self.offset)
+ result[u"size"] = from_int(self.size)
+ result[u"type"] = to_class(TypeClass, self.type)
+ return result
+
+
+class InterfaceMethod:
+ def __init__(self, has_request, has_response, maybe_attributes, maybe_request, maybe_request_alignment, maybe_request_size, maybe_response, maybe_response_alignment, maybe_response_size, name, ordinal):
+ self.has_request = has_request
+ self.has_response = has_response
+ self.maybe_attributes = maybe_attributes
+ self.maybe_request = maybe_request
+ self.maybe_request_alignment = maybe_request_alignment
+ self.maybe_request_size = maybe_request_size
+ self.maybe_response = maybe_response
+ self.maybe_response_alignment = maybe_response_alignment
+ self.maybe_response_size = maybe_response_size
+ self.name = name
+ self.ordinal = ordinal
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ has_request = from_bool(obj.get(u"has_request"))
+ has_response = from_bool(obj.get(u"has_response"))
+ maybe_attributes = from_union([lambda x: from_list(Attribute.from_dict, x), from_none], obj.get(u"maybe_attributes"))
+ maybe_request = from_union([lambda x: from_list(InterfaceMethodParameter.from_dict, x), from_none], obj.get(u"maybe_request"))
+ maybe_request_alignment = from_union([from_int, from_none], obj.get(u"maybe_request_alignment"))
+ maybe_request_size = from_union([from_int, from_none], obj.get(u"maybe_request_size"))
+ maybe_response = from_union([lambda x: from_list(InterfaceMethodParameter.from_dict, x), from_none], obj.get(u"maybe_response"))
+ maybe_response_alignment = from_union([from_int, from_none], obj.get(u"maybe_response_alignment"))
+ maybe_response_size = from_union([from_int, from_none], obj.get(u"maybe_response_size"))
+ name = from_str(obj.get(u"name"))
+ ordinal = from_int(obj.get(u"ordinal"))
+ return InterfaceMethod(has_request, has_response, maybe_attributes, maybe_request, maybe_request_alignment, maybe_request_size, maybe_response, maybe_response_alignment, maybe_response_size, name, ordinal)
+
+ def to_dict(self):
+ result = {}
+ result[u"has_request"] = from_bool(self.has_request)
+ result[u"has_response"] = from_bool(self.has_response)
+ result[u"maybe_attributes"] = from_union([lambda x: from_list(lambda x: to_class(Attribute, x), x), from_none], self.maybe_attributes)
+ result[u"maybe_request"] = from_union([lambda x: from_list(lambda x: to_class(InterfaceMethodParameter, x), x), from_none], self.maybe_request)
+ result[u"maybe_request_alignment"] = from_union([from_int, from_none], self.maybe_request_alignment)
+ result[u"maybe_request_size"] = from_union([from_int, from_none], self.maybe_request_size)
+ result[u"maybe_response"] = from_union([lambda x: from_list(lambda x: to_class(InterfaceMethodParameter, x), x), from_none], self.maybe_response)
+ result[u"maybe_response_alignment"] = from_union([from_int, from_none], self.maybe_response_alignment)
+ result[u"maybe_response_size"] = from_union([from_int, from_none], self.maybe_response_size)
+ result[u"name"] = from_str(self.name)
+ result[u"ordinal"] = from_int(self.ordinal)
+ return result
+
+
+class Interface:
+ def __init__(self, maybe_attributes, methods, name):
+ self.maybe_attributes = maybe_attributes
+ self.methods = methods
+ self.name = name
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ maybe_attributes = from_union([lambda x: from_list(Attribute.from_dict, x), from_none], obj.get(u"maybe_attributes"))
+ methods = from_list(InterfaceMethod.from_dict, obj.get(u"methods"))
+ name = from_str(obj.get(u"name"))
+ return Interface(maybe_attributes, methods, name)
+
+ def to_dict(self):
+ result = {}
+ result[u"maybe_attributes"] = from_union([lambda x: from_list(lambda x: to_class(Attribute, x), x), from_none], self.maybe_attributes)
+ result[u"methods"] = from_list(lambda x: to_class(InterfaceMethod, x), self.methods)
+ result[u"name"] = from_str(self.name)
+ return result
+
+
+class Library:
+ def __init__(self, declarations, name):
+ self.declarations = declarations
+ self.name = name
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ declarations = from_dict(DeclarationsMap, obj.get(u"declarations"))
+ name = from_str(obj.get(u"name"))
+ return Library(declarations, name)
+
+ def to_dict(self):
+ result = {}
+ result[u"declarations"] = from_dict(lambda x: to_enum(DeclarationsMap, x), self.declarations)
+ result[u"name"] = from_str(self.name)
+ return result
+
+
+class StructMember:
+ def __init__(self, alignment, maybe_default_value, name, offset, size, type):
+ self.alignment = alignment
+ self.maybe_default_value = maybe_default_value
+ self.name = name
+ self.offset = offset
+ self.size = size
+ self.type = type
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ alignment = from_int(obj.get(u"alignment"))
+ maybe_default_value = from_union([Constant.from_dict, from_none], obj.get(u"maybe_default_value"))
+ name = from_str(obj.get(u"name"))
+ offset = from_int(obj.get(u"offset"))
+ size = from_int(obj.get(u"size"))
+ type = TypeClass.from_dict(obj.get(u"type"))
+ return StructMember(alignment, maybe_default_value, name, offset, size, type)
+
+ def to_dict(self):
+ result = {}
+ result[u"alignment"] = from_int(self.alignment)
+ result[u"maybe_default_value"] = from_union([lambda x: to_class(Constant, x), from_none], self.maybe_default_value)
+ result[u"name"] = from_str(self.name)
+ result[u"offset"] = from_int(self.offset)
+ result[u"size"] = from_int(self.size)
+ result[u"type"] = to_class(TypeClass, self.type)
+ return result
+
+
+class Struct:
+ def __init__(self, max_handles, maybe_attributes, members, name, size):
+ self.max_handles = max_handles
+ self.maybe_attributes = maybe_attributes
+ self.members = members
+ self.name = name
+ self.size = size
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ max_handles = from_union([from_int, from_none], obj.get(u"max_handles"))
+ maybe_attributes = from_union([lambda x: from_list(Attribute.from_dict, x), from_none], obj.get(u"maybe_attributes"))
+ members = from_list(StructMember.from_dict, obj.get(u"members"))
+ name = from_str(obj.get(u"name"))
+ size = from_int(obj.get(u"size"))
+ return Struct(max_handles, maybe_attributes, members, name, size)
+
+ def to_dict(self):
+ result = {}
+ result[u"max_handles"] = from_union([from_int, from_none], self.max_handles)
+ result[u"maybe_attributes"] = from_union([lambda x: from_list(lambda x: to_class(Attribute, x), x), from_none], self.maybe_attributes)
+ result[u"members"] = from_list(lambda x: to_class(StructMember, x), self.members)
+ result[u"name"] = from_str(self.name)
+ result[u"size"] = from_int(self.size)
+ return result
+
+
+class UnionMember:
+ def __init__(self, alignment, name, offset, size, type):
+ self.alignment = alignment
+ self.name = name
+ self.offset = offset
+ self.size = size
+ self.type = type
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ alignment = from_int(obj.get(u"alignment"))
+ name = from_str(obj.get(u"name"))
+ offset = from_int(obj.get(u"offset"))
+ size = from_int(obj.get(u"size"))
+ type = TypeClass.from_dict(obj.get(u"type"))
+ return UnionMember(alignment, name, offset, size, type)
+
+ def to_dict(self):
+ result = {}
+ result[u"alignment"] = from_int(self.alignment)
+ result[u"name"] = from_str(self.name)
+ result[u"offset"] = from_int(self.offset)
+ result[u"size"] = from_int(self.size)
+ result[u"type"] = to_class(TypeClass, self.type)
+ return result
+
+
+class UnionDeclarationElement:
+ def __init__(self, alignment, max_handles, maybe_attributes, members, name, size):
+ self.alignment = alignment
+ self.max_handles = max_handles
+ self.maybe_attributes = maybe_attributes
+ self.members = members
+ self.name = name
+ self.size = size
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ alignment = from_int(obj.get(u"alignment"))
+ max_handles = from_union([from_int, from_none], obj.get(u"max_handles"))
+ maybe_attributes = from_union([lambda x: from_list(Attribute.from_dict, x), from_none], obj.get(u"maybe_attributes"))
+ members = from_list(UnionMember.from_dict, obj.get(u"members"))
+ name = from_str(obj.get(u"name"))
+ size = from_int(obj.get(u"size"))
+ return UnionDeclarationElement(alignment, max_handles, maybe_attributes, members, name, size)
+
+ def to_dict(self):
+ result = {}
+ result[u"alignment"] = from_int(self.alignment)
+ result[u"max_handles"] = from_union([from_int, from_none], self.max_handles)
+ result[u"maybe_attributes"] = from_union([lambda x: from_list(lambda x: to_class(Attribute, x), x), from_none], self.maybe_attributes)
+ result[u"members"] = from_list(lambda x: to_class(UnionMember, x), self.members)
+ result[u"name"] = from_str(self.name)
+ result[u"size"] = from_int(self.size)
+ return result
+
+
+class Fidl:
+ def __init__(self, const_declarations, declaration_order, declarations, enum_declarations, interface_declarations, library_dependencies, name, struct_declarations, union_declarations, version):
+ self.const_declarations = const_declarations
+ self.declaration_order = declaration_order
+ self.declarations = declarations
+ self.enum_declarations = enum_declarations
+ self.interface_declarations = interface_declarations
+ self.library_dependencies = library_dependencies
+ self.name = name
+ self.struct_declarations = struct_declarations
+ self.union_declarations = union_declarations
+ self.version = version
+
+ @staticmethod
+ def from_dict(obj):
+ assert isinstance(obj, dict)
+ const_declarations = from_list(Const.from_dict, obj.get(u"const_declarations"))
+ declaration_order = from_list(from_str, obj.get(u"declaration_order"))
+ declarations = from_dict(DeclarationsMap, obj.get(u"declarations"))
+ enum_declarations = from_list(EnumDeclarationElement.from_dict, obj.get(u"enum_declarations"))
+ interface_declarations = from_list(Interface.from_dict, obj.get(u"interface_declarations"))
+ library_dependencies = from_list(Library.from_dict, obj.get(u"library_dependencies"))
+ name = from_str(obj.get(u"name"))
+ struct_declarations = from_list(Struct.from_dict, obj.get(u"struct_declarations"))
+ union_declarations = from_list(UnionDeclarationElement.from_dict, obj.get(u"union_declarations"))
+ version = from_str(obj.get(u"version"))
+ return Fidl(const_declarations, declaration_order, declarations, enum_declarations, interface_declarations, library_dependencies, name, struct_declarations, union_declarations, version)
+
+ def to_dict(self):
+ result = {}
+ result[u"const_declarations"] = from_list(lambda x: to_class(Const, x), self.const_declarations)
+ result[u"declaration_order"] = from_list(from_str, self.declaration_order)
+ result[u"declarations"] = from_dict(lambda x: to_enum(DeclarationsMap, x), self.declarations)
+ result[u"enum_declarations"] = from_list(lambda x: to_class(EnumDeclarationElement, x), self.enum_declarations)
+ result[u"interface_declarations"] = from_list(lambda x: to_class(Interface, x), self.interface_declarations)
+ result[u"library_dependencies"] = from_list(lambda x: to_class(Library, x), self.library_dependencies)
+ result[u"name"] = from_str(self.name)
+ result[u"struct_declarations"] = from_list(lambda x: to_class(Struct, x), self.struct_declarations)
+ result[u"union_declarations"] = from_list(lambda x: to_class(UnionDeclarationElement, x), self.union_declarations)
+ result[u"version"] = from_str(self.version)
+ return result
+
+
+def fidl_from_dict(s):
+ return Fidl.from_dict(s)
+
+
+def fidl_to_dict(x):
+ return to_class(Fidl, x)
+
diff --git a/deps/v8/build/fuchsia/fidlgen_js/gen.py b/deps/v8/build/fuchsia/fidlgen_js/gen.py
new file mode 100755
index 0000000000..484440e2d1
--- /dev/null
+++ b/deps/v8/build/fuchsia/fidlgen_js/gen.py
@@ -0,0 +1,673 @@
+#!/usr/bin/env python
+
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import fidl
+import json
+
+
+class _CompoundIdentifier(object):
+
+ def __init__(self, library, name):
+ self.library = library
+ self.name = name
+
+
+def _ParseLibraryName(lib):
+ return lib.split('.')
+
+
+def _ParseCompoundIdentifier(ident):
+ parts = ident.split('/', 2)
+ raw_library = ''
+ raw_name = parts[0]
+ if len(parts) == 2:
+ raw_library, raw_name = parts
+ library = _ParseLibraryName(raw_library)
+ return _CompoundIdentifier(library, raw_name)
+
+
+def _ChangeIfReserved(name):
+ # TODO(crbug.com/883496): Remap any JS keywords.
+ return name
+
+
+def _CompileCompoundIdentifier(compound, ext=''):
+ result = _ChangeIfReserved(compound.name) + ext
+ return result
+
+
+def _CompileIdentifier(ident):
+ return _ChangeIfReserved(ident)
+
+
+def _GetUnderlyingPrimitiveType(t):
+ """Returns the underlying FIDL primitive type for a higher level type."""
+ if t.kind == fidl.TypeKind.PRIMITIVE:
+ return t.subtype
+ elif t.kind == fidl.TypeKind.STRING:
+ return 'string'
+ elif t.kind == fidl.TypeKind.IDENTIFIER:
+ # No underlying type is required because it will be implied by the type of
+ # the value that the identifer represents.
+ return None
+ else:
+ raise Exception(
+ 'expected primitive or identifier representing primitive underlying '
+ 'type, but got ' + str(t.kind))
+
+
+def _InlineSizeOfPrimitiveType(primitive_type):
+ return {
+ 'bool': 1,
+ 'float32': 4,
+ 'float64': 8,
+ 'int16': 2,
+ 'int32': 4,
+ 'int64': 8,
+ 'int8': 1,
+ 'uint16': 2,
+ 'uint32': 4,
+ 'uint64': 8,
+ 'uint8': 1,
+ }[primitive_type]
+
+
+def _JsTypeForPrimitiveType(t):
+ mapping = {
+ fidl.IntegerType.INT16: 'number',
+ fidl.IntegerType.INT32: 'number',
+ fidl.IntegerType.INT64: 'BigInt',
+ fidl.IntegerType.INT8: 'number',
+ fidl.IntegerType.UINT16: 'number',
+ fidl.IntegerType.UINT32: 'number',
+ fidl.IntegerType.UINT64: 'BigInt',
+ fidl.IntegerType.UINT8: 'number',
+ }
+ return mapping[t]
+
+
+def _BuildInlineSizeTable(fidl):
+ """Builds a mapping from type name to inline type size. These need to be
+ extracted beforehand because a vector<X> can be required during compilation
+ before seeing the compilation of X."""
+ result = {}
+ for enum in fidl.enum_declarations:
+ result[enum.name] = _InlineSizeOfPrimitiveType(enum.type.value)
+ for union in fidl.union_declarations:
+ result[union.name] = union.size
+ for struct in fidl.struct_declarations:
+ result[struct.name] = struct.size
+ return result
+
+
+class Compiler(object):
+
+ def __init__(self, fidl, output_file):
+ self.fidl = fidl
+ self.f = output_file
+ self.output_deferred_to_eof = ''
+ self.type_table_defined = set()
+ self.type_inline_size_by_name = _BuildInlineSizeTable(self.fidl)
+ # Used to hold the JS name for constants and enumerants. In particular,
+ # enums aren't scoped by name to their enum in the fidl json, but the JS
+ # bindings emit them as Enum.Something. So this maps from Something ->
+ # Enum.Something.
+ self.resolved_constant_name = {}
+
+ def Compile(self):
+ self._EmitHeader()
+ for c in self.fidl.const_declarations:
+ self._CompileConst(c)
+ for e in self.fidl.enum_declarations:
+ self._CompileEnum(e)
+ for u in self.fidl.union_declarations:
+ self._CompileUnion(u)
+ for s in self.fidl.struct_declarations:
+ self._CompileStruct(s)
+ for i in self.fidl.interface_declarations:
+ self._CompileInterface(i)
+
+ self.f.write(self.output_deferred_to_eof)
+
+ def _InlineSizeOfType(self, t):
+ if t.kind == fidl.TypeKind.PRIMITIVE:
+ return _InlineSizeOfPrimitiveType(t.subtype)
+ elif t.kind == fidl.TypeKind.STRING:
+ return 16
+ elif t.kind == fidl.TypeKind.IDENTIFIER:
+ size = self.type_inline_size_by_name.get(t.identifier)
+ if size is None:
+ raise Exception('expected ' + t.identifier +
+ ' to be in self.type_inline_size_by_name')
+ return size
+ elif t.kind == fidl.TypeKind.HANDLE:
+ return 4
+ else:
+ raise NotImplementedError(t.kind)
+
+ def _CompileConstant(self, val, primitive_type):
+ """primitive_type is the string representation of the underlying FIDL type
+ of the constant's value. Note that this is not a type object, but rather
+ the string name of a basic primitive type, e.g. 'int8' or 'uint64'."""
+ if val.kind == fidl.ConstantKind.IDENTIFIER:
+ js_name = self.resolved_constant_name.get(val.identifier)
+ if not js_name:
+ raise Exception('expected ' + val.identifer +
+ ' to be in self.resolved_constant_name')
+ return js_name
+ elif val.kind == fidl.ConstantKind.LITERAL:
+ lit_kind = val.literal.kind
+ if lit_kind == fidl.LiteralKind.STRING:
+ return json.dumps(val.literal.value)
+ elif lit_kind == fidl.LiteralKind.NUMERIC:
+ suffix = 'n' if primitive_type in ('int64', 'uint64') else ''
+ return val.literal.value + suffix
+ elif lit_kind == fidl.LiteralKind.TRUE:
+ return 'true'
+ elif lit_kind == fidl.LiteralKind.FALSE:
+ return 'false'
+ elif lit_kind == fidl.LiteralKind.DEFAULT:
+ return 'default'
+ else:
+ raise Exception('unexpected kind')
+
+ def _EmitHeader(self):
+ self.f.write('''// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// WARNING: This file is machine generated by fidlgen_js.
+
+''')
+
+ def _CompileConst(self, const):
+ compound = _ParseCompoundIdentifier(const.name)
+ name = _CompileCompoundIdentifier(compound)
+ value = self._CompileConstant(const.value,
+ _GetUnderlyingPrimitiveType(const.type))
+ self.f.write('''/**
+ * @const
+ */
+const %(name)s = %(value)s;
+
+''' % {
+ 'name': name,
+ 'value': value
+ })
+ self.resolved_constant_name[const.name] = name
+
+ def _CompileEnum(self, enum):
+ compound = _ParseCompoundIdentifier(enum.name)
+ name = _CompileCompoundIdentifier(compound)
+ js_type = _JsTypeForPrimitiveType(enum.type)
+ data = {'js_type': js_type, 'type': enum.type.value, 'name': name}
+ self.f.write('''/**
+ * @enum {%(js_type)s}
+ */
+const %(name)s = {
+''' % data)
+ for member in enum.members:
+ # The 'type' of an enum isn't a real Type like most other places, but
+ # instead just a simple 'int8' or similar.
+ underlying_type = enum.type.value
+ self.f.write(
+ ''' %s: %s,\n''' %
+ (member.name, self._CompileConstant(member.value, underlying_type)))
+ fidl_constant_name = '.'.join(compound.library) + '/' + member.name
+ javascript_name = name + '.' + member.name
+ self.resolved_constant_name[fidl_constant_name] = javascript_name
+ self.f.write('};\n')
+ self.f.write('const _kTT_%(name)s = _kTT_%(type)s;\n\n' % data)
+
+ def _CompileUnion(self, union):
+ compound = _ParseCompoundIdentifier(union.name)
+ name = _CompileCompoundIdentifier(compound)
+ member_names = []
+ enc_cases = []
+ dec_cases = []
+ for i, m in enumerate(union.members):
+ member_name = _ChangeIfReserved(m.name)
+ member_names.append(member_name)
+ member_type = self._CompileType(m.type)
+ enc_cases.append('''\
+ case %(index)s:
+ _kTT_%(member_type)s.enc(e, o + 4, v.%(member_name)s);
+ break;''' % {
+ 'index': i,
+ 'member_type': member_type,
+ 'member_name': member_name,
+ })
+ dec_cases.append('''\
+ case %(index)s:
+ result.set_%(member_name)s(_kTT_%(member_type)s.dec(d, o + 4));
+ break;''' % {
+ 'index': i,
+ 'member_type': member_type,
+ 'member_name': member_name,
+ })
+
+ self.f.write(
+ '''\
+const _kTT_%(name)s = {
+ enc: function(e, o, v) {
+ if (v.$tag === $fidl__kInvalidUnionTag) throw "invalid tag";
+ e.data.setUint32(o, v.$tag, $fidl__kLE);
+ switch (v.$tag) {
+%(enc_cases)s
+ }
+ },
+ dec: function(d, o) {
+ var tag = d.data.getUint32(o, $fidl__kLE);
+ var result = new %(name)s();
+ switch (tag) {
+%(dec_cases)s
+ default:
+ throw "invalid tag";
+ }
+ return result;
+ },
+};
+
+const _kTT_%(name)s_Nullable = {
+ enc: function(e, o, v) {
+ e.data.setUint32(o, v ? 0xffffffff : 0, $fidl__kLE);
+ e.data.setUint32(o + 4, v ? 0xffffffff : 0, $fidl__kLE);
+ var start = e.alloc(%(size)s);
+ _kTT_%(name)s.enc(e, start, v);
+ },
+ dec: function(d, o) {
+ if (d.data.getUint32(o, $fidl__kLE) === 0) {
+ return new %(name)s();
+ }
+ var pointer = d.data.getUint32(o + 4, $fidl__kLE);
+ var dataOffset = d.claimMemory(%(size)s);
+ return _kTT_%(name)s.dec(d, dataOffset);
+ },
+};
+
+/**
+ * @constructor
+ */
+function %(name)s() { this.reset(); }
+
+%(name)s.prototype.reset = function(i) {
+ this.$tag = (i === undefined) ? $fidl__kInvalidUnionTag : i;
+''' % {
+ 'name': name,
+ 'size': union.size,
+ 'enc_cases': '\n'.join(enc_cases),
+ 'dec_cases': '\n'.join(dec_cases),
+ })
+ for m in member_names:
+ self.f.write(' this.%s = null;\n' % m)
+ self.f.write('}\n\n')
+
+ for i, m in enumerate(member_names):
+ self.f.write('''\
+%(name)s.prototype.set_%(member_name)s = function(v) {
+ this.reset(%(index)s);
+ this.%(member_name)s = v;
+};
+
+%(name)s.prototype.is_%(member_name)s = function() {
+ return this.$tag === %(index)s;
+};
+
+''' % {
+ 'name': name,
+ 'member_name': m,
+ 'index': i,
+ })
+
+ def _CompileStruct(self, struct):
+ compound = _ParseCompoundIdentifier(struct.name)
+ name = _CompileCompoundIdentifier(compound)
+ param_names = [_ChangeIfReserved(x.name) for x in struct.members]
+ # TODO(crbug.com/883496): @param and types.
+ self.f.write('''/**
+ * @constructor
+ * @struct
+ */
+function %(name)s(%(param_names)s) {
+''' % {
+ 'name': name,
+ 'param_names': ', '.join(param_names)
+ })
+ for member in struct.members:
+ member_name = _ChangeIfReserved(member.name)
+ value = '%(member_name)s'
+ if member.maybe_default_value:
+ underlying_type = _GetUnderlyingPrimitiveType(member.type)
+ value = (
+ '(%(member_name)s !== undefined) ? %(member_name)s : ' +
+ self._CompileConstant(member.maybe_default_value, underlying_type))
+ elif self.fidl.declarations.get(member.type.identifier) == \
+ fidl.DeclarationsMap.UNION:
+ union_compound = _ParseCompoundIdentifier(member.type.identifier)
+ union_name = _CompileCompoundIdentifier(union_compound)
+ value = ('(%(member_name)s !== undefined) ? %(member_name)s : ' + 'new '
+ + union_name + '()')
+ self.f.write((' this.%(member_name)s = ' + value + ';\n') %
+ {'member_name': member_name})
+ self.f.write('}\n\n')
+
+ self.f.write('''const _kTT_%(name)s = {
+ enc: function(e, o, v) {
+''' % {'name': name})
+
+ for member in struct.members:
+ element_ttname = self._CompileType(member.type)
+ self.f.write(
+ ' _kTT_%(element_ttname)s.enc('
+ 'e, o + %(offset)s, v.%(member_name)s);\n' % {
+ 'element_ttname': element_ttname,
+ 'offset': member.offset,
+ 'member_name': _ChangeIfReserved(member.name)
+ })
+
+ self.f.write(''' },
+ dec: function(d, o) {
+''')
+
+ for member in struct.members:
+ element_ttname = self._CompileType(member.type)
+ self.f.write(
+ ' var $temp_%(member_name)s = _kTT_%(element_ttname)s.dec('
+ 'd, o + %(offset)s);\n' % {
+ 'element_ttname': element_ttname,
+ 'offset': member.offset,
+ 'member_name': _ChangeIfReserved(member.name)
+ })
+ self.f.write(''' return new %(name)s(%(temp_names)s);
+ }
+};
+
+''' % {
+ 'name': name,
+ 'temp_names': ', '.join(['$temp_' + x for x in param_names])
+ })
+
+ def _CompileType(self, t):
+ """Ensures there's a type table for the given type, and returns the stem of
+ its name."""
+ if t.kind == fidl.TypeKind.PRIMITIVE:
+ return t.subtype
+ elif t.kind == fidl.TypeKind.STRING:
+ return 'String' + ('_Nullable' if t.nullable else '')
+ elif t.kind == fidl.TypeKind.IDENTIFIER:
+ compound = _ParseCompoundIdentifier(t.identifier)
+ name = _CompileCompoundIdentifier(compound)
+ return name + ('_Nullable' if t.nullable else '')
+ elif t.kind == fidl.TypeKind.HANDLE or t.kind == fidl.TypeKind.REQUEST:
+ return 'Handle'
+ elif t.kind == fidl.TypeKind.ARRAY:
+ element_ttname = self._CompileType(t.element_type)
+ ttname = 'ARR_%d_%s' % (t.element_count, element_ttname)
+ if ttname not in self.type_table_defined:
+ self.type_table_defined.add(ttname)
+ self.output_deferred_to_eof += ('''\
+const _kTT_%(ttname)s = {
+ enc: function(e, o, v) {
+ for (var i = 0; i < %(element_count)s; i++) {
+ _kTT_%(element_ttname)s.enc(e, o + (i * %(element_size)s), v[i]);
+ }
+ },
+ dec: function(d, o) {
+ var result = [];
+ for (var i = 0; i < %(element_count)s; i++) {
+ result.push(_kTT_%(element_ttname)s.dec(d, o + (i * %(element_size)s)));
+ }
+ return result;
+ },
+};
+
+''' % {
+ 'ttname': ttname,
+ 'element_ttname': element_ttname,
+ 'element_count': t.element_count,
+ 'element_size': self._InlineSizeOfType(t.element_type),
+ })
+ return ttname
+ elif t.kind == fidl.TypeKind.VECTOR:
+ element_ttname = self._CompileType(t.element_type)
+ ttname = ('VEC_' + ('Nullable_' if t.nullable else '') + element_ttname)
+ if t.nullable:
+ handle_null_enc = '''e.data.setUint32(o, 0, $fidl__kLE);
+ e.data.setUint32(o + 4, 0, $fidl__kLE);
+ e.data.setUint32(o + 8, 0, $fidl__kLE);
+ e.data.setUint32(o + 12, 0, $fidl__kLE);
+ return;
+'''
+ handle_null_dec = 'return null;'
+ else:
+ handle_null_enc = 'throw "non-null vector required";'
+ handle_null_dec = 'throw "non-null vector required";'
+
+ if ttname not in self.type_table_defined:
+ self.type_table_defined.add(ttname)
+ self.output_deferred_to_eof += ('''\
+const _kTT_%(ttname)s = {
+ enc: function(e, o, v) {
+ if (v === null || v === undefined) {
+ %(handle_null_enc)s
+ }
+ e.data.setUint32(o, v.length, $fidl__kLE);
+ e.data.setUint32(o + 4, 0, $fidl__kLE);
+ e.data.setUint32(o + 8, 0xffffffff, $fidl__kLE);
+ e.data.setUint32(o + 12, 0xffffffff, $fidl__kLE);
+ var start = e.alloc(v.length * %(element_size)s);
+ for (var i = 0; i < v.length; i++) {
+ _kTT_%(element_ttname)s.enc(e, start + (i * %(element_size)s), v[i]);
+ }
+ },
+ dec: function(d, o) {
+ var len = d.data.getUint32(o, $fidl__kLE);
+ var pointer = d.data.getUint32(o + 8, $fidl__kLE);
+ if (pointer === 0) {
+ %(handle_null_dec)s
+ }
+ var dataOffset = d.claimMemory(len * %(element_size)s);
+ var result = [];
+ for (var i = 0; i < len; i++) {
+ result.push(_kTT_%(element_ttname)s.dec(
+ d, dataOffset + (i * %(element_size)s)));
+ }
+ return result;
+ }
+};
+
+''' % {
+ 'ttname': ttname,
+ 'element_ttname': element_ttname,
+ 'element_size': self._InlineSizeOfType(t.element_type),
+ 'handle_null_enc': handle_null_enc,
+ 'handle_null_dec': handle_null_dec,
+ })
+ return ttname
+ else:
+ raise NotImplementedError(t.kind)
+
+ def _GenerateJsInterfaceForInterface(self, name, interface):
+ """Generates a JS @interface for the given FIDL interface."""
+ self.f.write('''/**
+ * @interface
+ */
+function %(name)s() {}
+
+''' % {'name': name})
+
+ # Define a JS interface part for the interface for typechecking.
+ for method in interface.methods:
+ method_name = _CompileIdentifier(method.name)
+ if method.has_request:
+ param_names = [_CompileIdentifier(x.name) for x in method.maybe_request]
+ if len(param_names):
+ self.f.write('/**\n')
+ # TODO(crbug.com/883496): Emit @param and @return type comments.
+ self.f.write(' */\n')
+ self.f.write(
+ '%(name)s.prototype.%(method_name)s = '
+ 'function(%(param_names)s) {};\n\n' % {
+ 'name': name,
+ 'method_name': method_name,
+ 'param_names': ', '.join(param_names)
+ })
+
+ # Emit message ordinals for later use.
+ for method in interface.methods:
+ method_name = _CompileIdentifier(method.name)
+ self.f.write(
+ 'const _k%(name)s_%(method_name)s_Ordinal = %(ordinal)s;\n' % {
+ 'name': name,
+ 'method_name': method_name,
+ 'ordinal': method.ordinal
+ })
+
+ self.f.write('\n')
+
+ def _GenerateJsProxyForInterface(self, name, interface):
+ """Generates the JS side implementation of a proxy class implementing the
+ given interface."""
+ proxy_name = name + 'Proxy'
+ self.f.write('''/**
+ * @constructor
+ * @implements %(name)s
+ */
+function %(proxy_name)s() {
+ this.channel = $ZX_HANDLE_INVALID;
+}
+
+%(proxy_name)s.prototype.$bind = function(channel) {
+ this.channel = channel;
+};
+
+%(proxy_name)s.prototype.$is_bound = function() {
+ return this.channel != $ZX_HANDLE_INVALID;
+};
+
+%(proxy_name)s.prototype.$request = function() {
+ if (this.$is_bound())
+ throw "Proxy already bound";
+ var pair = $ZxChannelCreate();
+ if (pair.status != $ZX_OK)
+ throw "ChannelPair creation failed";
+ this.channel = pair.first;
+ return pair.second;
+};
+
+%(proxy_name)s.prototype.$close = function() {
+ if (!this.$is_bound())
+ return;
+ var status = $zx_handle_close(this.channel);
+ if (status !== $ZX_OK) {
+ throw "close handle failed";
+ }
+ this.channel = $ZX_HANDLE_INVALID;
+};
+
+''' % {
+ 'name': name,
+ 'proxy_name': proxy_name
+ })
+ for method in interface.methods:
+ method_name = _CompileIdentifier(method.name)
+ if method.has_request:
+ type_tables = []
+ for param in method.maybe_request:
+ type_tables.append(self._CompileType(param.type))
+ param_names = [_CompileIdentifier(x.name) for x in method.maybe_request]
+ self.f.write(
+ '''\
+%(proxy_name)s.prototype.%(method_name)s = function(%(param_names)s) {
+ if (this.channel === $ZX_HANDLE_INVALID) {
+ throw "channel closed";
+ }
+ var $encoder = new $fidl_Encoder(_k%(name)s_%(method_name)s_Ordinal);
+ $encoder.alloc(%(size)s - $fidl_kMessageHeaderSize);
+''' % {
+ 'name': name,
+ 'proxy_name': proxy_name,
+ 'method_name': method_name,
+ 'param_names': ', '.join(param_names),
+ 'size': method.maybe_request_size
+ })
+
+ for param, ttname in zip(method.maybe_request, type_tables):
+ self.f.write(
+ '''\
+ _kTT_%(type_table)s.enc($encoder, %(offset)s, %(param_name)s);
+''' % {
+ 'type_table': ttname,
+ 'param_name': _CompileIdentifier(param.name),
+ 'offset': param.offset
+ })
+
+ self.f.write(''' var $writeResult = $ZxChannelWrite(this.channel,
+ $encoder.messageData(),
+ $encoder.messageHandles());
+ if ($writeResult !== $ZX_OK) {
+ throw "$ZxChannelWrite failed: " + $writeResult;
+ }
+''')
+
+ if method.has_response:
+ type_tables = []
+ for param in method.maybe_response:
+ type_tables.append(self._CompileType(param.type))
+ self.f.write('''
+ return $ZxObjectWaitOne(this.channel, $ZX_CHANNEL_READABLE, $ZX_TIME_INFINITE)
+ .then(() => new Promise(res => {
+ var $readResult = $ZxChannelRead(this.channel);
+ if ($readResult.status !== $ZX_OK) {
+ throw "channel read failed";
+ }
+
+ var $view = new DataView($readResult.data);
+
+ var $decoder = new $fidl_Decoder($view, $readResult.handles);
+ $decoder.claimMemory(%(size)s - $fidl_kMessageHeaderSize);
+''' % {'size': method.maybe_response_size})
+ for param, ttname in zip(method.maybe_response, type_tables):
+ self.f.write(
+ '''\
+ var %(param_name)s = _kTT_%(type_table)s.dec($decoder, %(offset)s);
+''' % {
+ 'type_table': ttname,
+ 'param_name': _CompileIdentifier(param.name),
+ 'offset': param.offset
+ })
+
+ self.f.write('''
+ res(%(args)s);
+ }));
+''' % {'args': ', '.join(x.name for x in method.maybe_response)})
+
+ self.f.write('''};
+
+''')
+
+ def _CompileInterface(self, interface):
+ compound = _ParseCompoundIdentifier(interface.name)
+ name = _CompileCompoundIdentifier(compound)
+ self._GenerateJsInterfaceForInterface(name, interface)
+ self._GenerateJsProxyForInterface(name, interface)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('json')
+ parser.add_argument('--output', required=True)
+ args = parser.parse_args()
+
+ fidl_obj = fidl.fidl_from_dict(json.load(open(args.json, 'r')))
+ with open(args.output, 'w') as f:
+ c = Compiler(fidl_obj, f)
+ c.Compile()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/fuchsia/fidlgen_js/runtime/fidl.mjs b/deps/v8/build/fuchsia/fidlgen_js/runtime/fidl.mjs
new file mode 100644
index 0000000000..722098b143
--- /dev/null
+++ b/deps/v8/build/fuchsia/fidlgen_js/runtime/fidl.mjs
@@ -0,0 +1,270 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This is the JS runtime support library for code generated by fidlgen_js. It
+// mostly consists of helpers to facilitate encoding and decoding of FIDL
+// messages.
+
+const $fidl_kInitialBufferSize = 1024;
+
+const $fidl_kMessageHeaderSize = 16;
+const $fidl_kMessageTxidOffset = 0;
+const $fidl_kMessageOrdinalOffset = 12;
+
+const $fidl__kAlignment = 8;
+const $fidl__kAlignmentMask = 0x7;
+
+const $fidl__kLE = true;
+
+const $fidl__kUserspaceTxidMask = 0x7fffffff;
+const $fidl__kHandlePresent = 0xffffffff;
+const $fidl__kInvalidUnionTag = 0xffffffff;
+var $fidl__nextTxid = 1;
+
+function $fidl__align(size) {
+ return size + (($fidl__kAlignment - (size & $fidl__kAlignmentMask)) &
+ $fidl__kAlignmentMask);
+}
+
+/**
+ * @constructor
+ * @param {number} ordinal
+ */
+function $fidl_Encoder(ordinal) {
+ var buf = new ArrayBuffer($fidl_kInitialBufferSize);
+ this.data = new DataView(buf);
+ this.extent = 0;
+ this.handles = [];
+ this._encodeMessageHeader(ordinal);
+}
+
+/**
+ * @param {number} ordinal
+ */
+$fidl_Encoder.prototype._encodeMessageHeader = function(ordinal) {
+ this.alloc($fidl_kMessageHeaderSize);
+ var txid = $fidl__nextTxid++ & $fidl__kUserspaceTxidMask;
+ this.data.setUint32($fidl_kMessageTxidOffset, txid, $fidl__kLE);
+ this.data.setUint32($fidl_kMessageOrdinalOffset, ordinal, $fidl__kLE);
+};
+
+/**
+ * @param {number} size
+ */
+$fidl_Encoder.prototype.alloc = function(size) {
+ var offset = this.extent;
+ this._claimMemory($fidl__align(size));
+ return offset;
+};
+
+/**
+ * @param {number} claimSize
+ */
+$fidl_Encoder.prototype._claimMemory = function(claimSize) {
+ this.extent += claimSize;
+ if (this.extent > this.data.byteLength) {
+ var newSize = this.data.byteLength + claimSize;
+ newSize += newSize * 2;
+ this._grow(newSize);
+ }
+};
+
+/**
+ * @param {number} newSize
+ */
+$fidl_Encoder.prototype._grow = function(newSize) {
+ var newBuffer = new ArrayBuffer(newSize);
+ new Uint8Array(newBuffer).set(new Uint8Array(this.data.buffer));
+ this.data = new DataView(newBuffer);
+};
+
+/**
+ * @param {number} handle
+ */
+$fidl_Encoder.prototype.addHandle = function(handle) {
+ this.handles.push(handle);
+};
+
+$fidl_Encoder.prototype.messageData = function() {
+ return new DataView(this.data.buffer, 0, this.extent);
+};
+
+$fidl_Encoder.prototype.messageHandles = function() {
+ return this.handles;
+};
+
+
+/**
+ * @constructor
+ * @param {Array} data
+ * @param {Array} handles
+ */
+function $fidl_Decoder(data, handles) {
+ this.data = data;
+ this.handles = handles;
+ this.nextOffset = 0;
+ this.nextHandle = 0;
+ this.claimMemory($fidl_kMessageHeaderSize);
+}
+
+/**
+ * @param {number} size
+ */
+$fidl_Decoder.prototype.claimMemory = function(size) {
+ var result = this.nextOffset;
+ this.nextOffset = $fidl__align(this.nextOffset + size);
+ return result;
+}
+
+$fidl_Decoder.prototype.claimHandle = function() {
+ if (this.nextHandle >= this.handles.length)
+ throw "Attempt to claim more handles than are available";
+ return this.handles[this.nextHandle++];
+}
+
+
+// Type tables and encoding helpers for generated Proxy code.
+const _kTT_bool = {
+ enc: function(e, o, v) { e.data.setInt8(o, v ? 1 : 0); },
+ dec: function(d, o) { return d.data.getInt8(o) != 0; },
+};
+
+const _kTT_float32 = {
+ enc: function(e, o, v) { e.data.setFloat32(o, v, $fidl__kLE); },
+ dec: function(d, o) { return d.data.getFloat32(o, $fidl__kLE); },
+};
+
+const _kTT_float64 = {
+ enc: function(e, o, v) { e.data.setFloat64(o, v, $fidl__kLE); },
+ dec: function(d, o) { return d.data.getFloat64(o, $fidl__kLE); },
+};
+
+const _kTT_int8 = {
+ enc: function(e, o, v) { e.data.setInt8(o, v); },
+ dec: function(d, o) { return d.data.getInt8(o); },
+};
+
+const _kTT_int16 = {
+ enc: function(e, o, v) { e.data.setInt16(o, v, $fidl__kLE); },
+ dec: function(d, o) { return d.data.getInt16(o, $fidl__kLE); },
+};
+
+const _kTT_int32 = {
+ enc: function(e, o, v) { e.data.setUint32(o, v, $fidl__kLE); },
+ dec: function(d, o) { return d.data.getInt32(o, $fidl__kLE); },
+};
+
+const _kTT_int64 = {
+ enc: function(e, o, v) {
+ var bi = BigInt.asIntN(64, BigInt(v));
+ var x = Number(bi & 0xffffffffn);
+ var y = Number((bi >> 32n) & 0xffffffffn);
+ e.data.setInt32(o, x, $fidl__kLE);
+ e.data.setInt32(o + 4, y, $fidl__kLE);
+ },
+ dec: function(d, o) {
+ var x = BigInt.asIntN(64, BigInt(d.data.getInt32(o, $fidl__kLE)));
+ var y = BigInt.asIntN(64, BigInt(d.data.getInt32(o + 4, $fidl__kLE)));
+ return x | (y << 32n);
+ },
+};
+
+const _kTT_uint8 = {
+ enc: function(e, o, v) { e.data.setUint8(o, v); },
+ dec: function(d, o) { return d.data.getUint8(o); },
+};
+
+const _kTT_uint16 = {
+ enc: function(e, o, v) { e.data.setUint16(o, v, $fidl__kLE); },
+ dec: function(d, o) { return d.data.getUint16(o, $fidl__kLE); },
+};
+
+const _kTT_uint32 = {
+ enc: function(e, o, v) { e.data.setUint32(o, v, $fidl__kLE); },
+ dec: function(d, o) { return d.data.getUint32(o, $fidl__kLE); },
+};
+
+const _kTT_uint64 = {
+ enc: function(e, o, v) {
+ var bi = BigInt.asUintN(64, BigInt(v));
+ var x = Number(bi & 0xffffffffn);
+ var y = Number((bi >> 32n) & 0xffffffffn);
+ e.data.setUint32(o, x, $fidl__kLE);
+ e.data.setUint32(o + 4, y, $fidl__kLE);
+ },
+ dec: function(d, o) {
+ var x = BigInt.asUintN(64, BigInt(d.data.getUint32(o, $fidl__kLE)));
+ var y = BigInt.asUintN(64, BigInt(d.data.getUint32(o + 4, $fidl__kLE)));
+ return x | (y << 32n);
+ },
+};
+
+const _kTT_Handle = {
+ enc: function(e, o, v) {
+ if (v === null || v === undefined) {
+ e.data.setUint32(o, 0, $fidl__kLE);
+ } else {
+ e.data.setUint32(o, $fidl__kHandlePresent, $fidl__kLE);
+ e.addHandle(v);
+ }
+ },
+ dec: function(d, o) {
+ var $present = d.data.getUint32(o, $fidl__kLE);
+ if ($present === 0) {
+ return 0;
+ } else {
+ if ($present !== $fidl__kHandlePresent)
+ throw "Expected UINT32_MAX to indicate handle presence";
+ return d.claimHandle();
+ }
+ },
+};
+
+const _kTT_String = {
+ enc: function(e, o, v) {
+ if (v === null || v === undefined) throw "non-null string required";
+ // Both size and data are uint64, but that's awkward in JS, so for now only
+ // support a maximum of 32b lengths. The maximum length of a FIDL message is
+ // shorter than 32b in any case.
+ var asUtf8 = $FidlJsStrToUtf8Array(v);
+ e.data.setUint32(o, asUtf8.length, $fidl__kLE);
+ e.data.setUint32(o + 4, 0, $fidl__kLE);
+ e.data.setUint32(o + 8, 0xffffffff, $fidl__kLE);
+ e.data.setUint32(o + 12, 0xffffffff, $fidl__kLE);
+ var body = e.alloc(asUtf8.length);
+ for (var i = 0; i < asUtf8.length; i++) {
+ e.data.setUint8(body + i, asUtf8[i], $fidl__kLE);
+ }
+ },
+ dec: function(d, o) {
+ var len = d.data.getUint32(o, $fidl__kLE);
+ var pointer = d.data.getUint32(o + 8, $fidl__kLE);
+ if (pointer === 0) throw "non-null string required";
+ var dataOffset = d.claimMemory(len);
+ return $FidlJsUtf8ArrayToStr(new DataView(d.data.buffer, dataOffset, len));
+ }
+};
+
+const _kTT_String_Nullable = {
+ enc: function(e, o, v) {
+ if (v === null || v === undefined) {
+ e.data.setUint32(o, 0, $fidl__kLE);
+ e.data.setUint32(o + 4, 0, $fidl__kLE);
+ e.data.setUint32(o + 8, 0, $fidl__kLE);
+ e.data.setUint32(o + 12, 0, $fidl__kLE);
+ } else {
+ _kTT_String.enc(e, o, v);
+ }
+ },
+ dec: function(d, o) {
+ if (v === null || v === undefined) {
+ var pointer = d.data.getUint32(o + 8, $fidl__kLE);
+ if (pointer === 0) {
+ return null;
+ }
+ } else {
+ return _kTT_String.dec(e, o, v);
+ }
+ }
+};
diff --git a/deps/v8/build/fuchsia/fidlgen_js/runtime/zircon.cc b/deps/v8/build/fuchsia/fidlgen_js/runtime/zircon.cc
new file mode 100644
index 0000000000..6dd1b1964b
--- /dev/null
+++ b/deps/v8/build/fuchsia/fidlgen_js/runtime/zircon.cc
@@ -0,0 +1,438 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "build/fuchsia/fidlgen_js/runtime/zircon.h"
+
+#include <lib/async/default.h>
+#include <lib/async/wait.h>
+#include <lib/zx/channel.h>
+#include <zircon/errors.h>
+#include <zircon/syscalls.h>
+#include <zircon/types.h>
+
+#include "base/bind.h"
+#include "base/threading/thread_checker.h"
+#include "gin/arguments.h"
+#include "gin/array_buffer.h"
+#include "gin/converter.h"
+#include "gin/data_object_builder.h"
+#include "gin/function_template.h"
+#include "gin/public/gin_embedders.h"
+
+namespace {
+
+fidljs::WaitSet& GetWaitsForIsolate(v8::Isolate* isolate) {
+ return *static_cast<fidljs::WaitSet*>(
+ isolate->GetData(gin::kEmbedderFuchsia));
+}
+
+} // namespace
+
+namespace fidljs {
+
+class WaitPromiseImpl : public async_wait_t {
+ public:
+ WaitPromiseImpl(v8::Isolate* isolate,
+ v8::Local<v8::Context> context,
+ v8::Local<v8::Promise::Resolver> resolver,
+ zx_handle_t handle,
+ zx_signals_t signals)
+ : async_wait_t({ASYNC_STATE_INIT, &WaitPromiseImpl::StaticOnSignaled,
+ handle, signals}),
+ isolate_(isolate),
+ wait_state_(WaitState::kCreated),
+ failed_start_status_(ZX_OK) {
+ context_.Reset(isolate_, context);
+ resolver_.Reset(isolate_, resolver);
+ }
+
+ ~WaitPromiseImpl() {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ switch (wait_state_) {
+ case WaitState::kCreated:
+ // The wait never started, so reject the promise (but don't attempt to
+ // cancel the wait).
+ DCHECK_NE(failed_start_status_, ZX_OK);
+ RejectPromise(failed_start_status_, 0);
+ break;
+
+ case WaitState::kStarted:
+ // The wait was started, but has not yet completed. Cancel the wait and
+ // reject the promise. The object is being destructed here because it's
+ // been removed from the set of waits attached to the isolate, so
+ // we need not remove it.
+ CHECK_EQ(async_cancel_wait(async_get_default_dispatcher(), this),
+ ZX_OK);
+ RejectPromise(ZX_ERR_CANCELED, 0);
+ break;
+
+ case WaitState::kCompleted:
+ // The callback has already been called and so the promise has been
+ // resolved or rejected, and the wait has been removed from the
+ // dispatcher, so there's nothing to do.
+ break;
+ }
+ }
+
+ bool BeginWait() {
+ DCHECK_EQ(wait_state_, WaitState::kCreated);
+ zx_status_t status = async_begin_wait(async_get_default_dispatcher(), this);
+ if (status == ZX_OK) {
+ wait_state_ = WaitState::kStarted;
+ } else {
+ failed_start_status_ = status;
+ }
+ return status == ZX_OK;
+ }
+
+ private:
+ static void StaticOnSignaled(async_dispatcher_t* dispatcher,
+ async_wait_t* wait,
+ zx_status_t status,
+ const zx_packet_signal_t* signal) {
+ auto* self = static_cast<WaitPromiseImpl*>(wait);
+ self->OnSignaled(status, signal);
+ }
+
+ void OnSignaled(zx_status_t status, const zx_packet_signal_t* signal) {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK_EQ(wait_state_, WaitState::kStarted);
+ DCHECK_NE(status, ZX_ERR_CANCELED)
+ << "wait should have been canceled before shutdown";
+
+ wait_state_ = WaitState::kCompleted;
+
+ if (status == ZX_OK &&
+ (signal->observed & signal->trigger) == signal->trigger) {
+ ResolvePromise(signal->observed);
+ } else {
+ RejectPromise(status, signal->observed);
+ }
+
+ GetWaitsForIsolate(isolate_).erase(this);
+ // |this| has been deleted.
+ }
+
+ void ResolvePromise(zx_signals_t observed) {
+ v8::Local<v8::Promise::Resolver> resolver(resolver_.Get(isolate_));
+ v8::Local<v8::Context> context(context_.Get(isolate_));
+ v8::Local<v8::Object> value = gin::DataObjectBuilder(isolate_)
+ .Set("status", ZX_OK)
+ .Set("observed", observed)
+ .Build();
+ resolver->Resolve(context, value).ToChecked();
+ }
+
+ void RejectPromise(zx_status_t status, zx_signals_t observed) {
+ v8::Local<v8::Promise::Resolver> resolver(resolver_.Get(isolate_));
+ v8::Local<v8::Context> context(context_.Get(isolate_));
+ v8::Local<v8::Object> value = gin::DataObjectBuilder(isolate_)
+ .Set("status", status)
+ .Set("observed", observed)
+ .Build();
+ resolver->Reject(context, value).ToChecked();
+ }
+
+ v8::Isolate* isolate_;
+ v8::Global<v8::Context> context_;
+ v8::Global<v8::Promise::Resolver> resolver_;
+ enum class WaitState {
+ kCreated,
+ kStarted,
+ kCompleted,
+ } wait_state_;
+ zx_status_t failed_start_status_;
+
+ THREAD_CHECKER(thread_checker_);
+
+ DISALLOW_COPY_AND_ASSIGN(WaitPromiseImpl);
+};
+
+} // namespace fidljs
+
+namespace {
+
+v8::Local<v8::Promise> ZxObjectWaitOne(gin::Arguments* args) {
+ zx_handle_t handle;
+ if (!args->GetNext(&handle)) {
+ args->ThrowError();
+ return v8::Local<v8::Promise>();
+ }
+
+ zx_signals_t signals;
+ if (!args->GetNext(&signals)) {
+ args->ThrowError();
+ return v8::Local<v8::Promise>();
+ }
+
+ v8::MaybeLocal<v8::Promise::Resolver> maybe_resolver =
+ v8::Promise::Resolver::New(args->GetHolderCreationContext());
+ v8::Local<v8::Promise::Resolver> resolver;
+ if (maybe_resolver.ToLocal(&resolver)) {
+ auto wait = std::make_unique<fidljs::WaitPromiseImpl>(
+ args->isolate(), args->GetHolderCreationContext(), resolver, handle,
+ signals);
+ if (wait->BeginWait()) {
+ // The wait will always be notified asynchronously, so it's OK to delay
+ // the add until after it has completed successfully. Move |wait| into the
+ // set of active waits.
+ GetWaitsForIsolate(args->isolate()).insert(std::move(wait));
+ }
+
+ // If BeginWait() fails, then |wait| will be deleted here, causing the
+ // returned promise to be rejected.
+ return resolver->GetPromise();
+ }
+
+ return v8::Local<v8::Promise>();
+}
+
+v8::Local<v8::Value> ZxChannelCreate(gin::Arguments* args) {
+ zx_handle_t channel0, channel1;
+ zx_status_t status = zx_channel_create(0, &channel0, &channel1);
+ if (status != ZX_OK) {
+ return gin::DataObjectBuilder(args->isolate())
+ .Set("status", status)
+ .Build();
+ }
+
+ return gin::DataObjectBuilder(args->isolate())
+ .Set("status", status)
+ .Set("first", channel0)
+ .Set("second", channel1)
+ .Build();
+}
+
+zx_status_t ZxChannelWrite(gin::Arguments* args) {
+ zx_handle_t handle;
+ if (!args->GetNext(&handle)) {
+ args->ThrowError();
+ return ZX_ERR_INVALID_ARGS;
+ }
+
+ gin::ArrayBufferView data;
+ if (!args->GetNext(&data)) {
+ args->ThrowError();
+ return ZX_ERR_INVALID_ARGS;
+ }
+
+ std::vector<zx_handle_t> handles;
+ if (!args->GetNext(&handles)) {
+ args->ThrowError();
+ return ZX_ERR_INVALID_ARGS;
+ }
+
+ zx_status_t status =
+ zx_channel_write(handle, 0, data.bytes(), data.num_bytes(),
+ handles.data(), handles.size());
+ return status;
+}
+
+v8::Local<v8::Object> ZxChannelRead(gin::Arguments* args) {
+ zx_handle_t handle;
+ if (!args->GetNext(&handle)) {
+ args->ThrowError();
+ return gin::DataObjectBuilder(args->isolate())
+ .Set("status", ZX_ERR_INVALID_ARGS)
+ .Build();
+ }
+ zx::unowned_channel ch(handle);
+
+ uint32_t data_size;
+ uint32_t num_handles;
+ zx_status_t status =
+ ch->rea2(0, nullptr, nullptr, 0, 0, &data_size, &num_handles);
+ DCHECK_EQ(status, ZX_ERR_BUFFER_TOO_SMALL);
+
+ std::vector<zx_handle_t> handles;
+ handles.resize(num_handles);
+
+ v8::Local<v8::ArrayBuffer> buf =
+ v8::ArrayBuffer::New(args->isolate(), data_size);
+ uint32_t actual_bytes, actual_handles;
+ status = ch->rea2(0, buf->GetContents().Data(), handles.data(), data_size,
+ handles.size(), &actual_bytes, &actual_handles);
+ DCHECK_EQ(actual_bytes, data_size);
+ DCHECK_EQ(actual_handles, num_handles);
+
+ if (status != ZX_OK) {
+ return gin::DataObjectBuilder(args->isolate())
+ .Set("status", status)
+ .Build();
+ }
+
+ return gin::DataObjectBuilder(args->isolate())
+ .Set("status", status)
+ .Set("data", buf)
+ .Set("handles", handles)
+ .Build();
+}
+
+v8::Local<v8::Value> StrToUtf8Array(gin::Arguments* args) {
+ std::string str;
+ // This converts the string to utf8 from ucs2, so then just repackage the
+ // string as an array and return it.
+ if (!args->GetNext(&str)) {
+ args->ThrowError();
+ return v8::Local<v8::Object>();
+ }
+
+ // TODO(crbug.com/883496): Not sure how to make a Uint8Array to return here
+ // which would be a bit more efficient.
+ std::vector<int> data;
+ std::copy(str.begin(), str.end(), std::back_inserter(data));
+ return gin::ConvertToV8(args->isolate(), data);
+}
+
+v8::Local<v8::Value> Utf8ArrayToStr(gin::Arguments* args) {
+ gin::ArrayBufferView data;
+ if (!args->GetNext(&data)) {
+ args->ThrowError();
+ return v8::Local<v8::Value>();
+ }
+
+ // Get the UTF-8 out into a string, and then rely on ConvertToV8 to convert
+ // that to a UCS-2 string.
+ return gin::StringToV8(
+ args->isolate(), base::StringPiece(static_cast<const char*>(data.bytes()),
+ data.num_bytes()));
+}
+
+} // namespace
+
+namespace fidljs {
+
+ZxBindings::ZxBindings(v8::Isolate* isolate, v8::Local<v8::Object> global)
+ : isolate_(isolate), wait_set_(std::make_unique<WaitSet>()) {
+ DCHECK_EQ(isolate->GetData(gin::kEmbedderFuchsia), nullptr);
+ isolate->SetData(gin::kEmbedderFuchsia, wait_set_.get());
+
+#define SET_CONSTANT(k) \
+ global->Set(gin::StringToSymbol(isolate, "$" #k), \
+ gin::ConvertToV8(isolate, k))
+
+ // zx_status_t.
+ SET_CONSTANT(ZX_OK);
+ SET_CONSTANT(ZX_ERR_INTERNAL);
+ SET_CONSTANT(ZX_ERR_NOT_SUPPORTED);
+ SET_CONSTANT(ZX_ERR_NO_RESOURCES);
+ SET_CONSTANT(ZX_ERR_NO_MEMORY);
+ SET_CONSTANT(ZX_ERR_INTERNAL_INTR_RETRY);
+ SET_CONSTANT(ZX_ERR_INVALID_ARGS);
+ SET_CONSTANT(ZX_ERR_BAD_HANDLE);
+ SET_CONSTANT(ZX_ERR_WRONG_TYPE);
+ SET_CONSTANT(ZX_ERR_BAD_SYSCALL);
+ SET_CONSTANT(ZX_ERR_OUT_OF_RANGE);
+ SET_CONSTANT(ZX_ERR_BUFFER_TOO_SMALL);
+ SET_CONSTANT(ZX_ERR_BAD_STATE);
+ SET_CONSTANT(ZX_ERR_TIMED_OUT);
+ SET_CONSTANT(ZX_ERR_SHOULD_WAIT);
+ SET_CONSTANT(ZX_ERR_CANCELED);
+ SET_CONSTANT(ZX_ERR_PEER_CLOSED);
+ SET_CONSTANT(ZX_ERR_NOT_FOUND);
+ SET_CONSTANT(ZX_ERR_ALREADY_EXISTS);
+ SET_CONSTANT(ZX_ERR_ALREADY_BOUND);
+ SET_CONSTANT(ZX_ERR_UNAVAILABLE);
+ SET_CONSTANT(ZX_ERR_ACCESS_DENIED);
+ SET_CONSTANT(ZX_ERR_IO);
+ SET_CONSTANT(ZX_ERR_IO_REFUSED);
+ SET_CONSTANT(ZX_ERR_IO_DATA_INTEGRITY);
+ SET_CONSTANT(ZX_ERR_IO_DATA_LOSS);
+ SET_CONSTANT(ZX_ERR_IO_NOT_PRESENT);
+ SET_CONSTANT(ZX_ERR_IO_OVERRUN);
+ SET_CONSTANT(ZX_ERR_IO_MISSED_DEADLINE);
+ SET_CONSTANT(ZX_ERR_IO_INVALID);
+ SET_CONSTANT(ZX_ERR_BAD_PATH);
+ SET_CONSTANT(ZX_ERR_NOT_DIR);
+ SET_CONSTANT(ZX_ERR_NOT_FILE);
+ SET_CONSTANT(ZX_ERR_FILE_BIG);
+ SET_CONSTANT(ZX_ERR_NO_SPACE);
+ SET_CONSTANT(ZX_ERR_NOT_EMPTY);
+ SET_CONSTANT(ZX_ERR_STOP);
+ SET_CONSTANT(ZX_ERR_NEXT);
+ SET_CONSTANT(ZX_ERR_ASYNC);
+ SET_CONSTANT(ZX_ERR_PROTOCOL_NOT_SUPPORTED);
+ SET_CONSTANT(ZX_ERR_ADDRESS_UNREACHABLE);
+ SET_CONSTANT(ZX_ERR_ADDRESS_IN_USE);
+ SET_CONSTANT(ZX_ERR_NOT_CONNECTED);
+ SET_CONSTANT(ZX_ERR_CONNECTION_REFUSED);
+ SET_CONSTANT(ZX_ERR_CONNECTION_RESET);
+ SET_CONSTANT(ZX_ERR_CONNECTION_ABORTED);
+
+ v8::Local<v8::Context> context = isolate->GetCurrentContext();
+
+ // Handle APIs.
+ global
+ ->Set(context, gin::StringToSymbol(isolate, "$ZxObjectWaitOne"),
+ gin::CreateFunctionTemplate(isolate,
+ base::BindRepeating(ZxObjectWaitOne))
+ ->GetFunction(context)
+ .ToLocalChecked())
+ .ToChecked();
+ global
+ ->Set(context, gin::StringToSymbol(isolate, "$zx_handle_close"),
+ gin::CreateFunctionTemplate(isolate,
+ base::BindRepeating(zx_handle_close))
+ ->GetFunction(context)
+ .ToLocalChecked())
+ .ToChecked();
+ SET_CONSTANT(ZX_HANDLE_INVALID);
+ SET_CONSTANT(ZX_TIME_INFINITE);
+
+ // Channel APIs.
+ global
+ ->Set(context, gin::StringToSymbol(isolate, "$ZxChannelCreate"),
+ gin::CreateFunctionTemplate(isolate,
+ base::BindRepeating(&ZxChannelCreate))
+ ->GetFunction(context)
+ .ToLocalChecked())
+ .ToChecked();
+ global
+ ->Set(context, gin::StringToSymbol(isolate, "$ZxChannelWrite"),
+ gin::CreateFunctionTemplate(isolate,
+ base::BindRepeating(&ZxChannelWrite))
+ ->GetFunction(context)
+ .ToLocalChecked())
+ .ToChecked();
+ global
+ ->Set(context, gin::StringToSymbol(isolate, "$ZxChannelRead"),
+ gin::CreateFunctionTemplate(isolate,
+ base::BindRepeating(&ZxChannelRead))
+ ->GetFunction(context)
+ .ToLocalChecked())
+ .ToChecked();
+ SET_CONSTANT(ZX_CHANNEL_READABLE);
+ SET_CONSTANT(ZX_CHANNEL_WRITABLE);
+ SET_CONSTANT(ZX_CHANNEL_PEER_CLOSED);
+ SET_CONSTANT(ZX_CHANNEL_READ_MAY_DISCARD);
+ SET_CONSTANT(ZX_CHANNEL_MAX_MSG_BYTES);
+ SET_CONSTANT(ZX_CHANNEL_MAX_MSG_HANDLES);
+
+ // Utilities to make string handling easier to convert to/from UCS-2 (JS) <->
+ // UTF-8 (FIDL).
+ global
+ ->Set(context, gin::StringToSymbol(isolate, "$FidlJsStrToUtf8Array"),
+ gin::CreateFunctionTemplate(isolate,
+ base::BindRepeating(&StrToUtf8Array))
+ ->GetFunction(context)
+ .ToLocalChecked())
+ .ToChecked();
+ global
+ ->Set(context, gin::StringToSymbol(isolate, "$FidlJsUtf8ArrayToStr"),
+ gin::CreateFunctionTemplate(isolate,
+ base::BindRepeating(&Utf8ArrayToStr))
+ ->GetFunction(context)
+ .ToLocalChecked())
+ .ToChecked();
+
+#undef SET_CONSTANT
+}
+
+ZxBindings::~ZxBindings() {
+ wait_set_->clear();
+ isolate_->SetData(gin::kEmbedderFuchsia, nullptr);
+}
+
+} // namespace fidljs
diff --git a/deps/v8/build/fuchsia/fidlgen_js/runtime/zircon.h b/deps/v8/build/fuchsia/fidlgen_js/runtime/zircon.h
new file mode 100644
index 0000000000..b54d35495c
--- /dev/null
+++ b/deps/v8/build/fuchsia/fidlgen_js/runtime/zircon.h
@@ -0,0 +1,58 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef BUILD_FUCHSIA_FIDLGEN_JS_RUNTIME_ZIRCON_H_
+#define BUILD_FUCHSIA_FIDLGEN_JS_RUNTIME_ZIRCON_H_
+
+#include <memory>
+
+#include "base/containers/flat_set.h"
+#include "base/containers/unique_ptr_adapters.h"
+#include "base/macros.h"
+#include "v8/include/v8.h"
+
+namespace fidljs {
+
+class WaitPromiseImpl;
+
+// A WaitSet is associated with each Isolate and represents all outstanding
+// waits that are queued on the dispatcher.
+//
+// If the wait completes normally, the contained promise is resolved, the
+// WaitPromiseImpl is marked as completed, and then deleted (by removing it from
+// the pending set).
+//
+// If the caller shuts down with outstanding waits pending, the asynchronous
+// waits are canceled by clearing the set (which deletes all the
+// WaitPromiseImpls). If a WaitPromiseImpl has not completed when it is
+// destroyed, it cancels the outstanding wait in its destructor.
+//
+// WaitPromiseImpl is responsible for resolving or rejecting promises. If the
+// object was created, but a wait never started it will not have been added to
+// the wait set, and so will reject the promise immediately. Otherwise, the
+// promise will be resolved or rejected when the asynchronous wait is signaled
+// or canceled.
+using WaitSet =
+ base::flat_set<std::unique_ptr<WaitPromiseImpl>, base::UniquePtrComparator>;
+
+class ZxBindings {
+ public:
+ // Adds Zircon APIs bindings to |global|, for use by JavaScript callers.
+ ZxBindings(v8::Isolate* isolate, v8::Local<v8::Object> global);
+
+ // Cleans up attached storage in the isolate added by the bindings, and
+ // cancels any pending asynchronous requests. It is important this this be
+ // done before the v8 context is torn down.
+ ~ZxBindings();
+
+ private:
+ v8::Isolate* const isolate_;
+ std::unique_ptr<WaitSet> wait_set_;
+
+ DISALLOW_COPY_AND_ASSIGN(ZxBindings);
+};
+
+} // namespace fidljs
+
+#endif // BUILD_FUCHSIA_FIDLGEN_JS_RUNTIME_ZIRCON_H_
diff --git a/deps/v8/build/fuchsia/fidlgen_js/test/fidlgen_js_unittest.cc b/deps/v8/build/fuchsia/fidlgen_js/test/fidlgen_js_unittest.cc
new file mode 100644
index 0000000000..ed025c878b
--- /dev/null
+++ b/deps/v8/build/fuchsia/fidlgen_js/test/fidlgen_js_unittest.cc
@@ -0,0 +1,1334 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <lib/fidl/cpp/binding.h>
+#include <lib/fidl/cpp/internal/pending_response.h>
+#include <lib/fidl/cpp/internal/weak_stub_controller.h>
+#include <lib/zx/debuglog.h>
+#include <zircon/syscalls/log.h>
+
+#include "base/bind.h"
+#include "base/files/file_util.h"
+#include "base/fuchsia/fuchsia_logging.h"
+#include "base/run_loop.h"
+#include "base/stl_util.h"
+#include "base/strings/stringprintf.h"
+#include "base/test/launcher/unit_test_launcher.h"
+#include "base/test/test_suite.h"
+#include "base/test/test_timeouts.h"
+#include "build/fuchsia/fidlgen_js/fidl/fidljstest/cpp/fidl.h"
+#include "build/fuchsia/fidlgen_js/runtime/zircon.h"
+#include "gin/converter.h"
+#include "gin/modules/console.h"
+#include "gin/object_template_builder.h"
+#include "gin/public/isolate_holder.h"
+#include "gin/shell_runner.h"
+#include "gin/test/v8_test.h"
+#include "gin/try_catch.h"
+#include "v8/include/v8.h"
+
+static const char kRuntimeFile[] =
+ "/pkg/build/fuchsia/fidlgen_js/runtime/fidl.mjs";
+static const char kTestBindingFile[] =
+ "/pkg/build/fuchsia/fidlgen_js/fidl/fidljstest/js/fidl.js";
+
+namespace {
+
+zx_koid_t GetKoidForHandle(zx_handle_t handle) {
+ zx_info_handle_basic_t info;
+ zx_status_t status = zx_object_get_info(handle, ZX_INFO_HANDLE_BASIC, &info,
+ sizeof(info), nullptr, nullptr);
+ if (status != ZX_OK) {
+ ZX_LOG(ERROR, status) << "zx_object_get_info";
+ return ZX_KOID_INVALID;
+ }
+ return info.koid;
+}
+
+zx_koid_t GetKoidForHandle(const zx::object_base& object) {
+ return GetKoidForHandle(object.get());
+}
+
+} // namespace
+
+class FidlGenJsTestShellRunnerDelegate : public gin::ShellRunnerDelegate {
+ public:
+ FidlGenJsTestShellRunnerDelegate() {}
+
+ v8::Local<v8::ObjectTemplate> GetGlobalTemplate(
+ gin::ShellRunner* runner,
+ v8::Isolate* isolate) override {
+ v8::Local<v8::ObjectTemplate> templ =
+ gin::ObjectTemplateBuilder(isolate).Build();
+ gin::Console::Register(isolate, templ);
+ return templ;
+ }
+
+ void UnhandledException(gin::ShellRunner* runner,
+ gin::TryCatch& try_catch) override {
+ LOG(ERROR) << try_catch.GetStackTrace();
+ ADD_FAILURE();
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(FidlGenJsTestShellRunnerDelegate);
+};
+
+using FidlGenJsTest = gin::V8Test;
+
+TEST_F(FidlGenJsTest, BasicJSSetup) {
+ v8::Isolate* isolate = instance_->isolate();
+
+ std::string source = "log('this is a log'); this.stuff = 'HAI';";
+ FidlGenJsTestShellRunnerDelegate delegate;
+ gin::ShellRunner runner(&delegate, isolate);
+ gin::Runner::Scope scope(&runner);
+ runner.Run(source, "test.js");
+
+ std::string result;
+ EXPECT_TRUE(gin::Converter<std::string>::FromV8(
+ isolate, runner.global()->Get(gin::StringToV8(isolate, "stuff")),
+ &result));
+ EXPECT_EQ("HAI", result);
+}
+
+void LoadAndSource(gin::ShellRunner* runner, const base::FilePath& filename) {
+ std::string contents;
+ ASSERT_TRUE(base::ReadFileToString(filename, &contents));
+
+ runner->Run(contents, filename.MaybeAsASCII());
+}
+
+class BindingsSetupHelper {
+ public:
+ explicit BindingsSetupHelper(v8::Isolate* isolate)
+ : isolate_(isolate),
+ handle_scope_(isolate),
+ delegate_(),
+ runner_(&delegate_, isolate),
+ scope_(&runner_),
+ zx_bindings_(
+ std::make_unique<fidljs::ZxBindings>(isolate, runner_.global())) {
+ // TODO(scottmg): Figure out how to set up v8 import hooking and make
+ // fidl_Xyz into $fidl.Xyz. Manually inject the runtime support js files
+ // for now. https://crbug.com/883496.
+ LoadAndSource(&runner_, base::FilePath(kRuntimeFile));
+ LoadAndSource(&runner_, base::FilePath(kTestBindingFile));
+
+ zx_status_t status = zx::channel::create(0, &server_, &client_);
+ EXPECT_EQ(status, ZX_OK);
+
+ runner_.global()->Set(gin::StringToSymbol(isolate, "testHandle"),
+ gin::ConvertToV8(isolate, client_.get()));
+ }
+
+ template <class T>
+ T Get(const std::string& name) {
+ T t;
+ EXPECT_TRUE(gin::Converter<T>::FromV8(
+ isolate_, runner_.global()->Get(gin::StringToV8(isolate_, name)), &t));
+ return t;
+ }
+
+ template <class T>
+ T FromV8BigInt(v8::Local<v8::Value> val);
+
+ template <>
+ uint64_t FromV8BigInt(v8::Local<v8::Value> val) {
+ EXPECT_TRUE(val->IsBigInt());
+ return val.As<v8::BigInt>()->Uint64Value(nullptr);
+ }
+
+ template <>
+ int64_t FromV8BigInt(v8::Local<v8::Value> val) {
+ EXPECT_TRUE(val->IsBigInt());
+ return val.As<v8::BigInt>()->Int64Value(nullptr);
+ }
+
+ // Custom version of gin::Converter that handles int64/uint64 from BigInt as
+ // gin::Converter is quite tied to Number.
+ template <class T>
+ std::vector<T> GetBigIntVector(const std::string& name) {
+ v8::Local<v8::Value> val =
+ runner_.global()->Get(gin::StringToV8(isolate_, name));
+ EXPECT_TRUE(val->IsArray());
+
+ std::vector<T> result;
+ v8::Local<v8::Array> array(v8::Local<v8::Array>::Cast(val));
+ uint32_t length = array->Length();
+ for (uint32_t i = 0; i < length; ++i) {
+ v8::Local<v8::Value> v8_item;
+ EXPECT_TRUE(
+ array->Get(isolate_->GetCurrentContext(), i).ToLocal(&v8_item));
+ T item;
+ if (v8_item->IsNumber()) {
+ EXPECT_TRUE(gin::Converter<T>::FromV8(isolate_, v8_item, &item));
+ } else if (v8_item->IsBigInt()) {
+ item = FromV8BigInt<T>(v8_item);
+ } else {
+ ADD_FAILURE();
+ }
+ result.push_back(item);
+ }
+
+ return result;
+ }
+
+ bool IsNull(const std::string& name) {
+ return runner_.global()->Get(gin::StringToV8(isolate_, name))->IsNull();
+ }
+
+ void DestroyBindingsForTesting() { zx_bindings_.reset(); }
+
+ zx::channel& server() { return server_; }
+ zx::channel& client() { return client_; }
+ gin::ShellRunner& runner() { return runner_; }
+
+ private:
+ v8::Isolate* isolate_;
+ v8::HandleScope handle_scope_;
+ FidlGenJsTestShellRunnerDelegate delegate_;
+ gin::ShellRunner runner_;
+ gin::Runner::Scope scope_;
+ std::unique_ptr<fidljs::ZxBindings> zx_bindings_;
+ zx::channel server_;
+ zx::channel client_;
+
+ DISALLOW_COPY_AND_ASSIGN(BindingsSetupHelper);
+};
+
+class AnotherInterfaceImpl : public fidljstest::AnotherInterface {
+ public:
+ AnotherInterfaceImpl(
+ fidl::InterfaceRequest<fidljstest::AnotherInterface> request)
+ : binding_(this, std::move(request)) {}
+ ~AnotherInterfaceImpl() override = default;
+
+ void TimesTwo(int32_t a, TimesTwoCallback callback) override {
+ callback(a * 2);
+ }
+
+ private:
+ fidl::Binding<fidljstest::AnotherInterface> binding_;
+
+ DISALLOW_COPY_AND_ASSIGN(AnotherInterfaceImpl);
+};
+
+class TestolaImpl : public fidljstest::Testola {
+ public:
+ TestolaImpl() {
+ // Don't want the default values from the C++ side.
+ memset(&basic_struct_, -1, sizeof(basic_struct_));
+ }
+ ~TestolaImpl() override {}
+
+ void DoSomething() override { was_do_something_called_ = true; }
+
+ void PrintInt(int32_t number) override { received_int_ = number; }
+
+ void PrintMsg(std::string message) override { received_msg_ = message; }
+
+ void VariousArgs(fidljstest::Blorp blorp,
+ std::string msg,
+ std::vector<uint32_t> stuff) override {
+ various_blorp_ = blorp;
+ various_msg_ = msg;
+ various_stuff_ = stuff;
+ }
+
+ void WithResponse(int32_t a,
+ int32_t b,
+ WithResponseCallback callback) override {
+ response_callbacks_.push_back(base::BindOnce(
+ [](WithResponseCallback callback, int32_t result) { callback(result); },
+ std::move(callback), a + b));
+ }
+
+ void SendAStruct(fidljstest::BasicStruct basic_struct) override {
+ basic_struct_ = basic_struct;
+ }
+
+ void NestedStructsWithResponse(
+ fidljstest::BasicStruct basic_struct,
+ NestedStructsWithResponseCallback resp) override {
+ // Construct a response, echoing the passed in structure with some
+ // modifications, as well as additional data.
+ fidljstest::StuffAndThings sat;
+ sat.count = 123;
+ sat.id = "here is my id";
+ sat.a_vector.push_back(1);
+ sat.a_vector.push_back(-2);
+ sat.a_vector.push_back(4);
+ sat.a_vector.push_back(-8);
+ sat.basic.b = !basic_struct.b;
+ sat.basic.i8 = basic_struct.i8 * 2;
+ sat.basic.i16 = basic_struct.i16 * 2;
+ sat.basic.i32 = basic_struct.i32 * 2;
+ sat.basic.u8 = basic_struct.u8 * 2;
+ sat.basic.u16 = basic_struct.u16 * 2;
+ sat.basic.u32 = basic_struct.u32 * 2;
+ sat.later_string = "ⓣⓔⓡⓜⓘⓝⓐⓣⓞⓡ";
+ for (uint64_t i = 0; i < fidljstest::ARRRR_SIZE; ++i) {
+ sat.arrrr[i] = static_cast<int32_t>(i * 5) - 10;
+ }
+ sat.nullable_vector_of_string0 = nullptr;
+ std::vector<std::string> vector_of_str;
+ vector_of_str.push_back("passed_str0");
+ vector_of_str.push_back("passed_str1");
+ sat.nullable_vector_of_string1.reset(std::move(vector_of_str));
+ std::vector<fidljstest::Blorp> vector_of_blorp;
+ vector_of_blorp.push_back(fidljstest::Blorp::GAMMA);
+ vector_of_blorp.push_back(fidljstest::Blorp::BETA);
+ vector_of_blorp.push_back(fidljstest::Blorp::BETA);
+ vector_of_blorp.push_back(fidljstest::Blorp::ALPHA);
+ sat.vector_of_blorp = std::move(vector_of_blorp);
+
+ resp(std::move(sat));
+ }
+
+ void PassHandles(zx::job job, PassHandlesCallback callback) override {
+ EXPECT_EQ(GetKoidForHandle(job), GetKoidForHandle(*zx::job::default_job()));
+ zx::process process;
+ ASSERT_EQ(zx::process::self()->duplicate(ZX_RIGHT_SAME_RIGHTS, &process),
+ ZX_OK);
+ callback(std::move(process));
+ }
+
+ void ReceiveUnions(fidljstest::StructOfMultipleUnions somu) override {
+ EXPECT_TRUE(somu.initial.is_swb());
+ EXPECT_TRUE(somu.initial.swb().some_bool);
+
+ EXPECT_TRUE(somu.optional.get());
+ EXPECT_TRUE(somu.optional->is_lswa());
+ for (int i = 0; i < 32; ++i) {
+ EXPECT_EQ(somu.optional->lswa().components[i], i * 99);
+ }
+
+ EXPECT_TRUE(somu.trailing.is_swu());
+ EXPECT_EQ(somu.trailing.swu().num, 123456u);
+
+ did_receive_union_ = true;
+ }
+
+ void SendUnions(SendUnionsCallback callback) override {
+ fidljstest::StructOfMultipleUnions resp;
+
+ resp.initial.set_swb(fidljstest::StructWithBool());
+ resp.initial.swb().some_bool = true;
+
+ resp.optional = std::make_unique<fidljstest::UnionOfStructs>();
+ resp.optional->set_swu(fidljstest::StructWithUint());
+ resp.optional->swu().num = 987654;
+
+ resp.trailing.set_lswa(fidljstest::LargerStructWithArray());
+
+ callback(std::move(resp));
+ }
+
+ void SendVectorsOfString(std::vector<std::string> unsized,
+ std::vector<fidl::StringPtr> nullable,
+ std::vector<std::string> max_strlen) override {
+ ASSERT_EQ(unsized.size(), 3u);
+ EXPECT_EQ(unsized[0], "str0");
+ EXPECT_EQ(unsized[1], "str1");
+ EXPECT_EQ(unsized[2], "str2");
+
+ ASSERT_EQ(nullable.size(), 5u);
+ EXPECT_EQ(nullable[0], "str3");
+ EXPECT_TRUE(nullable[1].is_null());
+ EXPECT_TRUE(nullable[2].is_null());
+ EXPECT_TRUE(nullable[3].is_null());
+ EXPECT_EQ(nullable[4], "str4");
+
+ ASSERT_EQ(max_strlen.size(), 1u);
+ EXPECT_EQ(max_strlen[0], "0123456789");
+
+ did_get_vectors_of_string_ = true;
+ }
+
+ void VectorOfStruct(std::vector<fidljstest::StructWithUint> stuff,
+ VectorOfStructCallback callback) override {
+ ASSERT_EQ(stuff.size(), 4u);
+ EXPECT_EQ(stuff[0].num, 456u);
+ EXPECT_EQ(stuff[1].num, 789u);
+ EXPECT_EQ(stuff[2].num, 123u);
+ EXPECT_EQ(stuff[3].num, 0xfffffu);
+
+ std::vector<fidljstest::StructWithUint> response;
+ fidljstest::StructWithUint a;
+ a.num = 369;
+ response.push_back(a);
+ fidljstest::StructWithUint b;
+ b.num = 258;
+ response.push_back(b);
+ callback(std::move(response));
+ }
+
+ void PassVectorOfPrimitives(
+ fidljstest::VectorsOfPrimitives input,
+ PassVectorOfPrimitivesCallback callback) override {
+ ASSERT_EQ(input.v_bool.size(), 1u);
+ ASSERT_EQ(input.v_uint8.size(), 2u);
+ ASSERT_EQ(input.v_uint16.size(), 3u);
+ ASSERT_EQ(input.v_uint32.size(), 4u);
+ ASSERT_EQ(input.v_uint64.size(), 5u);
+ ASSERT_EQ(input.v_int8.size(), 6u);
+ ASSERT_EQ(input.v_int16.size(), 7u);
+ ASSERT_EQ(input.v_int32.size(), 8u);
+ ASSERT_EQ(input.v_int64.size(), 9u);
+ ASSERT_EQ(input.v_float32.size(), 10u);
+ ASSERT_EQ(input.v_float64.size(), 11u);
+
+ EXPECT_EQ(input.v_bool[0], true);
+
+ EXPECT_EQ(input.v_uint8[0], 2u);
+ EXPECT_EQ(input.v_uint8[1], 3u);
+
+ EXPECT_EQ(input.v_uint16[0], 4u);
+ EXPECT_EQ(input.v_uint16[1], 5u);
+ EXPECT_EQ(input.v_uint16[2], 6u);
+
+ EXPECT_EQ(input.v_uint32[0], 7u);
+ EXPECT_EQ(input.v_uint32[1], 8u);
+ EXPECT_EQ(input.v_uint32[2], 9u);
+ EXPECT_EQ(input.v_uint32[3], 10u);
+
+ EXPECT_EQ(input.v_uint64[0], 11u);
+ EXPECT_EQ(input.v_uint64[1], 12u);
+ EXPECT_EQ(input.v_uint64[2], 13u);
+ EXPECT_EQ(input.v_uint64[3], 14u);
+ EXPECT_EQ(input.v_uint64[4], 0xffffffffffffff00ULL);
+
+ EXPECT_EQ(input.v_int8[0], -16);
+ EXPECT_EQ(input.v_int8[1], -17);
+ EXPECT_EQ(input.v_int8[2], -18);
+ EXPECT_EQ(input.v_int8[3], -19);
+ EXPECT_EQ(input.v_int8[4], -20);
+ EXPECT_EQ(input.v_int8[5], -21);
+
+ EXPECT_EQ(input.v_int16[0], -22);
+ EXPECT_EQ(input.v_int16[1], -23);
+ EXPECT_EQ(input.v_int16[2], -24);
+ EXPECT_EQ(input.v_int16[3], -25);
+ EXPECT_EQ(input.v_int16[4], -26);
+ EXPECT_EQ(input.v_int16[5], -27);
+ EXPECT_EQ(input.v_int16[6], -28);
+
+ EXPECT_EQ(input.v_int32[0], -29);
+ EXPECT_EQ(input.v_int32[1], -30);
+ EXPECT_EQ(input.v_int32[2], -31);
+ EXPECT_EQ(input.v_int32[3], -32);
+ EXPECT_EQ(input.v_int32[4], -33);
+ EXPECT_EQ(input.v_int32[5], -34);
+ EXPECT_EQ(input.v_int32[6], -35);
+ EXPECT_EQ(input.v_int32[7], -36);
+
+ EXPECT_EQ(input.v_int64[0], -37);
+ EXPECT_EQ(input.v_int64[1], -38);
+ EXPECT_EQ(input.v_int64[2], -39);
+ EXPECT_EQ(input.v_int64[3], -40);
+ EXPECT_EQ(input.v_int64[4], -41);
+ EXPECT_EQ(input.v_int64[5], -42);
+ EXPECT_EQ(input.v_int64[6], -43);
+ EXPECT_EQ(input.v_int64[7], -44);
+ EXPECT_EQ(input.v_int64[8], -0x7fffffffffffffffLL);
+
+ EXPECT_EQ(input.v_float32[0], 46.f);
+ EXPECT_EQ(input.v_float32[1], 47.f);
+ EXPECT_EQ(input.v_float32[2], 48.f);
+ EXPECT_EQ(input.v_float32[3], 49.f);
+ EXPECT_EQ(input.v_float32[4], 50.f);
+ EXPECT_EQ(input.v_float32[5], 51.f);
+ EXPECT_EQ(input.v_float32[6], 52.f);
+ EXPECT_EQ(input.v_float32[7], 53.f);
+ EXPECT_EQ(input.v_float32[8], 54.f);
+ EXPECT_EQ(input.v_float32[9], 55.f);
+
+ EXPECT_EQ(input.v_float64[0], 56.0);
+ EXPECT_EQ(input.v_float64[1], 57.0);
+ EXPECT_EQ(input.v_float64[2], 58.0);
+ EXPECT_EQ(input.v_float64[3], 59.0);
+ EXPECT_EQ(input.v_float64[4], 60.0);
+ EXPECT_EQ(input.v_float64[5], 61.0);
+ EXPECT_EQ(input.v_float64[6], 62.0);
+ EXPECT_EQ(input.v_float64[7], 63.0);
+ EXPECT_EQ(input.v_float64[8], 64.0);
+ EXPECT_EQ(input.v_float64[9], 65.0);
+ EXPECT_EQ(input.v_float64[10], 66.0);
+
+ fidljstest::VectorsOfPrimitives output = std::move(input);
+#define INC_OUTPUT_ARRAY(v) \
+ for (size_t i = 0; i < output.v.size(); ++i) { \
+ output.v[i] += 10; \
+ }
+ INC_OUTPUT_ARRAY(v_uint8);
+ INC_OUTPUT_ARRAY(v_uint16);
+ INC_OUTPUT_ARRAY(v_uint32);
+ INC_OUTPUT_ARRAY(v_uint64);
+ INC_OUTPUT_ARRAY(v_int8);
+ INC_OUTPUT_ARRAY(v_int16);
+ INC_OUTPUT_ARRAY(v_int32);
+ INC_OUTPUT_ARRAY(v_int64);
+ INC_OUTPUT_ARRAY(v_float32);
+ INC_OUTPUT_ARRAY(v_float64);
+#undef INC_OUTPUT_ARRAY
+
+ callback(std::move(output));
+ }
+
+ void PassVectorOfVMO(fidljstest::VectorOfHandleToVMO input,
+ PassVectorOfVMOCallback callback) override {
+ callback(std::move(input));
+ }
+
+ bool was_do_something_called() const { return was_do_something_called_; }
+ int32_t received_int() const { return received_int_; }
+ const std::string& received_msg() const { return received_msg_; }
+
+ fidljstest::Blorp various_blorp() const { return various_blorp_; }
+ const std::string& various_msg() const { return various_msg_; }
+ const std::vector<uint32_t>& various_stuff() const { return various_stuff_; }
+
+ fidljstest::BasicStruct GetReceivedStruct() const { return basic_struct_; }
+
+ bool did_receive_union() const { return did_receive_union_; }
+
+ bool did_get_vectors_of_string() const { return did_get_vectors_of_string_; }
+
+ void CallResponseCallbacks() {
+ for (auto& cb : response_callbacks_) {
+ std::move(cb).Run();
+ }
+ response_callbacks_.clear();
+ }
+
+ void GetAnother(
+ fidl::InterfaceRequest<fidljstest::AnotherInterface> request) override {
+ another_interface_impl_ =
+ std::make_unique<AnotherInterfaceImpl>(std::move(request));
+ }
+
+ private:
+ bool was_do_something_called_ = false;
+ int32_t received_int_ = -1;
+ std::string received_msg_;
+ fidljstest::Blorp various_blorp_;
+ std::string various_msg_;
+ std::vector<uint32_t> various_stuff_;
+ fidljstest::BasicStruct basic_struct_;
+ std::vector<base::OnceClosure> response_callbacks_;
+ bool did_receive_union_ = false;
+ bool did_get_vectors_of_string_ = false;
+ std::unique_ptr<AnotherInterfaceImpl> another_interface_impl_;
+
+ DISALLOW_COPY_AND_ASSIGN(TestolaImpl);
+};
+
+TEST_F(FidlGenJsTest, RawReceiveFidlMessage) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ // Send the data from the JS side into the channel.
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+ proxy.DoSomething();
+ )";
+ helper.runner().Run(source, "test.js");
+
+ // Read it out, decode, and confirm it was dispatched.
+ TestolaImpl testola_impl;
+ fidljstest::Testola_Stub stub(&testola_impl);
+ uint8_t data[1024];
+ zx_handle_t handles[1];
+ uint32_t actual_bytes, actual_handles;
+ ASSERT_EQ(
+ helper.server().rea2(0, data, handles, base::size(data),
+ base::size(handles), &actual_bytes, &actual_handles),
+ ZX_OK);
+ EXPECT_EQ(actual_bytes, 16u);
+ EXPECT_EQ(actual_handles, 0u);
+
+ fidl::Message message(
+ fidl::BytePart(data, actual_bytes, actual_bytes),
+ fidl::HandlePart(handles, actual_handles, actual_handles));
+ stub.Dispatch_(std::move(message), fidl::internal::PendingResponse());
+
+ EXPECT_TRUE(testola_impl.was_do_something_called());
+}
+
+TEST_F(FidlGenJsTest, RawReceiveFidlMessageWithSimpleArg) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ // Send the data from the JS side into the channel.
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+ proxy.PrintInt(12345);
+ )";
+ helper.runner().Run(source, "test.js");
+
+ // Read it out, decode, and confirm it was dispatched.
+ TestolaImpl testola_impl;
+ fidljstest::Testola_Stub stub(&testola_impl);
+ uint8_t data[1024];
+ zx_handle_t handles[1];
+ uint32_t actual_bytes, actual_handles;
+ ASSERT_EQ(
+ helper.server().rea2(0, data, handles, base::size(data),
+ base::size(handles), &actual_bytes, &actual_handles),
+ ZX_OK);
+ // 24 rather than 20 because everything's 8 aligned.
+ EXPECT_EQ(actual_bytes, 24u);
+ EXPECT_EQ(actual_handles, 0u);
+
+ fidl::Message message(
+ fidl::BytePart(data, actual_bytes, actual_bytes),
+ fidl::HandlePart(handles, actual_handles, actual_handles));
+ stub.Dispatch_(std::move(message), fidl::internal::PendingResponse());
+
+ EXPECT_EQ(testola_impl.received_int(), 12345);
+}
+
+TEST_F(FidlGenJsTest, RawReceiveFidlMessageWithStringArg) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ // Send the data from the JS side into the channel.
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+ proxy.PrintMsg('Ça c\'est a 你好 from deep in JS');
+ )";
+ helper.runner().Run(source, "test.js");
+
+ // Read it out, decode, and confirm it was dispatched.
+ TestolaImpl testola_impl;
+ fidljstest::Testola_Stub stub(&testola_impl);
+ uint8_t data[1024];
+ zx_handle_t handles[1];
+ uint32_t actual_bytes, actual_handles;
+ ASSERT_EQ(
+ helper.server().rea2(0, data, handles, base::size(data),
+ base::size(handles), &actual_bytes, &actual_handles),
+ ZX_OK);
+ EXPECT_EQ(actual_handles, 0u);
+
+ fidl::Message message(
+ fidl::BytePart(data, actual_bytes, actual_bytes),
+ fidl::HandlePart(handles, actual_handles, actual_handles));
+ stub.Dispatch_(std::move(message), fidl::internal::PendingResponse());
+
+ EXPECT_EQ(testola_impl.received_msg(), "Ça c'est a 你好 from deep in JS");
+}
+
+TEST_F(FidlGenJsTest, RawReceiveFidlMessageWithMultipleArgs) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ // Send the data from the JS side into the channel.
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+ proxy.VariousArgs(Blorp.GAMMA, 'zippy zap', [ 999, 987, 123456 ]);
+ )";
+ helper.runner().Run(source, "test.js");
+
+ // Read it out, decode, and confirm it was dispatched.
+ TestolaImpl testola_impl;
+ fidljstest::Testola_Stub stub(&testola_impl);
+ uint8_t data[1024];
+ zx_handle_t handles[1];
+ uint32_t actual_bytes, actual_handles;
+ ASSERT_EQ(
+ helper.server().rea2(0, data, handles, base::size(data),
+ base::size(handles), &actual_bytes, &actual_handles),
+ ZX_OK);
+ EXPECT_EQ(actual_handles, 0u);
+
+ fidl::Message message(
+ fidl::BytePart(data, actual_bytes, actual_bytes),
+ fidl::HandlePart(handles, actual_handles, actual_handles));
+ stub.Dispatch_(std::move(message), fidl::internal::PendingResponse());
+
+ EXPECT_EQ(testola_impl.various_blorp(), fidljstest::Blorp::GAMMA);
+ EXPECT_EQ(testola_impl.various_msg(), "zippy zap");
+ ASSERT_EQ(testola_impl.various_stuff().size(), 3u);
+ EXPECT_EQ(testola_impl.various_stuff()[0], 999u);
+ EXPECT_EQ(testola_impl.various_stuff()[1], 987u);
+ EXPECT_EQ(testola_impl.various_stuff()[2], 123456u);
+}
+
+TEST_F(FidlGenJsTest, RawWithResponse) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ TestolaImpl testola_impl;
+ fidl::Binding<fidljstest::Testola> binding(&testola_impl);
+ binding.Bind(std::move(helper.server()));
+
+ // Send the data from the JS side into the channel.
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+ this.sum_result = -1;
+ proxy.WithResponse(72, 99)
+ .then(sum => {
+ this.sum_result = sum;
+ })
+ .catch((e) => log('FAILED: ' + e));
+ )";
+ helper.runner().Run(source, "test.js");
+
+ base::RunLoop().RunUntilIdle();
+
+ testola_impl.CallResponseCallbacks();
+
+ base::RunLoop().RunUntilIdle();
+
+ // Confirm that the response was received with the correct value.
+ auto sum_result = helper.Get<int>("sum_result");
+ EXPECT_EQ(sum_result, 72 + 99);
+}
+
+TEST_F(FidlGenJsTest, NoResponseBeforeTearDown) {
+ v8::Isolate* isolate = instance_->isolate();
+
+ BindingsSetupHelper helper(isolate);
+
+ TestolaImpl testola_impl;
+ fidl::Binding<fidljstest::Testola> binding(&testola_impl);
+ binding.Bind(std::move(helper.server()));
+
+ // Send the data from the JS side into the channel.
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+ this.resolved = false;
+ this.rejected = false;
+ this.excepted = false;
+ proxy.WithResponse(1, 2)
+ .then(sum => {
+ this.resolved = true;
+ }, () => {
+ this.rejected = true;
+ })
+ .catch((e) => {
+ log('FAILED: ' + e);
+ this.excepted = true;
+ })
+ )";
+ helper.runner().Run(source, "test.js");
+
+ // Run the message loop to read and queue the request, but don't send the
+ // response.
+ base::RunLoop().RunUntilIdle();
+
+ // This causes outstanding waits to be canceled.
+ helper.DestroyBindingsForTesting();
+
+ EXPECT_FALSE(helper.Get<bool>("resolved"));
+ EXPECT_TRUE(helper.Get<bool>("rejected"));
+ EXPECT_FALSE(helper.Get<bool>("excepted"));
+}
+
+TEST_F(FidlGenJsTest, RawReceiveFidlStructMessage) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ TestolaImpl testola_impl;
+ fidl::Binding<fidljstest::Testola> binding(&testola_impl);
+ binding.Bind(std::move(helper.server()));
+
+ // Send the data from the JS side into the channel.
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+ var basicStruct = new BasicStruct(
+ true, -30, undefined, -789, 200, 65000, 0);
+ proxy.SendAStruct(basicStruct);
+ )";
+ helper.runner().Run(source, "test.js");
+
+ // Run the dispatcher to read and dispatch the response.
+ base::RunLoop().RunUntilIdle();
+
+ fidljstest::BasicStruct received_struct = testola_impl.GetReceivedStruct();
+ EXPECT_EQ(received_struct.b, true);
+ EXPECT_EQ(received_struct.i8, -30);
+ EXPECT_EQ(received_struct.i16, 18); // From defaults.
+ EXPECT_EQ(received_struct.i32, -789);
+ EXPECT_EQ(received_struct.u8, 200);
+ EXPECT_EQ(received_struct.u16, 65000);
+ // Make sure this didn't get defaulted, even though it has a false-ish value.
+ EXPECT_EQ(received_struct.u32, 0u);
+}
+
+TEST_F(FidlGenJsTest, RawReceiveFidlNestedStructsAndRespond) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ TestolaImpl testola_impl;
+ fidl::Binding<fidljstest::Testola> binding(&testola_impl);
+ binding.Bind(std::move(helper.server()));
+
+ // Send the data from the JS side into the channel.
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+ var toSend = new BasicStruct(false, -5, -6, -7, 8, 32000, 2000000000);
+ proxy.NestedStructsWithResponse(toSend)
+ .then(sat => {
+ this.result_count = sat.count;
+ this.result_id = sat.id;
+ this.result_vector = sat.a_vector;
+ this.result_basic_b = sat.basic.b;
+ this.result_basic_i8 = sat.basic.i8;
+ this.result_basic_i16 = sat.basic.i16;
+ this.result_basic_i32 = sat.basic.i32;
+ this.result_basic_u8 = sat.basic.u8;
+ this.result_basic_u16 = sat.basic.u16;
+ this.result_basic_u32 = sat.basic.u32;
+ this.result_later_string = sat.later_string;
+ this.result_arrrr = sat.arrrr;
+ this.result_vs0 = sat.nullable_vector_of_string0;
+ this.result_vs1 = sat.nullable_vector_of_string1;
+ this.result_vblorp = sat.vector_of_blorp;
+ })
+ .catch((e) => log('FAILED: ' + e));
+ )";
+ helper.runner().Run(source, "test.js");
+
+ // Run the message loop to read the request and write the response.
+ base::RunLoop().RunUntilIdle();
+
+ EXPECT_EQ(helper.Get<int>("result_count"), 123);
+ EXPECT_EQ(helper.Get<std::string>("result_id"), "here is my id");
+ auto result_vector = helper.Get<std::vector<int>>("result_vector");
+ ASSERT_EQ(result_vector.size(), 4u);
+ EXPECT_EQ(result_vector[0], 1);
+ EXPECT_EQ(result_vector[1], -2);
+ EXPECT_EQ(result_vector[2], 4);
+ EXPECT_EQ(result_vector[3], -8);
+ EXPECT_EQ(helper.Get<bool>("result_basic_b"), true);
+ EXPECT_EQ(helper.Get<int>("result_basic_i8"), -10);
+ EXPECT_EQ(helper.Get<int>("result_basic_i16"), -12);
+ EXPECT_EQ(helper.Get<int>("result_basic_i32"), -14);
+ EXPECT_EQ(helper.Get<unsigned int>("result_basic_u8"), 16u);
+ EXPECT_EQ(helper.Get<unsigned int>("result_basic_u16"), 64000u);
+ EXPECT_EQ(helper.Get<unsigned int>("result_basic_u32"), 4000000000u);
+ EXPECT_EQ(helper.Get<std::string>("result_later_string"), "ⓣⓔⓡⓜⓘⓝⓐⓣⓞⓡ");
+ // Retrieve as a vector as there's no difference in representation in JS (and
+ // gin already supports vector), and verify the length matches the expected
+ // length of the fidl array.
+ auto result_arrrr = helper.Get<std::vector<int32_t>>("result_arrrr");
+ ASSERT_EQ(result_arrrr.size(), fidljstest::ARRRR_SIZE);
+ for (uint64_t i = 0; i < fidljstest::ARRRR_SIZE; ++i) {
+ EXPECT_EQ(result_arrrr[i], static_cast<int32_t>(i * 5) - 10);
+ }
+ EXPECT_TRUE(helper.IsNull("result_vs0"));
+ EXPECT_FALSE(helper.IsNull("result_vs1"));
+ auto result_vs1 = helper.Get<std::vector<std::string>>("result_vs1");
+ ASSERT_EQ(result_vs1.size(), 2u);
+ EXPECT_EQ(result_vs1[0], "passed_str0");
+ EXPECT_EQ(result_vs1[1], "passed_str1");
+
+ // This is a vector of enum class fidljstest::Blorp, but gin can't retrieve
+ // those, so just get it as int, and cast to check values.
+ auto result_vblorp = helper.Get<std::vector<int>>("result_vblorp");
+ ASSERT_EQ(result_vblorp.size(), 4u);
+ EXPECT_EQ(result_vblorp[0], static_cast<int>(fidljstest::Blorp::GAMMA));
+ EXPECT_EQ(result_vblorp[1], static_cast<int>(fidljstest::Blorp::BETA));
+ EXPECT_EQ(result_vblorp[2], static_cast<int>(fidljstest::Blorp::BETA));
+ EXPECT_EQ(result_vblorp[3], static_cast<int>(fidljstest::Blorp::ALPHA));
+}
+
+TEST_F(FidlGenJsTest, HandlePassing) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ TestolaImpl testola_impl;
+ fidl::Binding<fidljstest::Testola> binding(&testola_impl);
+ binding.Bind(std::move(helper.server()));
+
+ zx::job default_job_copy;
+ ASSERT_EQ(zx::job::default_job()->duplicate(ZX_RIGHT_SAME_RIGHTS,
+ &default_job_copy),
+ ZX_OK);
+ helper.runner().global()->Set(
+ gin::StringToSymbol(isolate, "testJobHandle"),
+ gin::ConvertToV8(isolate, default_job_copy.get()));
+
+ // TODO(crbug.com/883496): Handles wrapped in Transferrable once MessagePort
+ // is sorted out, and then stop treating handles as unmanaged |uint32_t|s.
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+ proxy.PassHandles(testJobHandle).then(h => {
+ this.processHandle = h;
+ }).catch((e) => log('FAILED: ' + e));
+ )";
+ helper.runner().Run(source, "test.js");
+
+ // Run the message loop to send the request and receive a response.
+ base::RunLoop().RunUntilIdle();
+
+ zx_handle_t process_handle_back_from_js =
+ helper.Get<uint32_t>("processHandle");
+ EXPECT_EQ(GetKoidForHandle(process_handle_back_from_js),
+ GetKoidForHandle(*zx::process::self()));
+
+ // Make sure we received the valid handle back correctly, and close it. Not
+ // stored into a zx::process in case it isn't valid, and to check the return
+ // value from closing it.
+ EXPECT_EQ(zx_handle_close(process_handle_back_from_js), ZX_OK);
+
+ // Ensure we didn't pass away our default job, or process self.
+ EXPECT_NE(GetKoidForHandle(*zx::job::default_job()), ZX_KOID_INVALID);
+ EXPECT_NE(GetKoidForHandle(*zx::process::self()), ZX_KOID_INVALID);
+}
+
+TEST_F(FidlGenJsTest, UnionSend) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ TestolaImpl testola_impl;
+ fidl::Binding<fidljstest::Testola> binding(&testola_impl);
+ binding.Bind(std::move(helper.server()));
+
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+ var somu = new StructOfMultipleUnions();
+
+ var swb = new StructWithBool(/*some_bool*/ true);
+ somu.initial.set_swb(swb);
+
+ var lswa = new LargerStructWithArray([]);
+ for (var i = 0; i < 32; ++i) {
+ lswa.components[i] = i * 99;
+ }
+ somu.optional.set_lswa(lswa);
+
+ somu.trailing.set_swu(new StructWithUint(123456));
+
+ proxy.ReceiveUnions(somu);
+ )";
+ helper.runner().Run(source, "test.js");
+
+ base::RunLoop().RunUntilIdle();
+
+ // Expectations on the contents of the union are checked in the body of
+ // TestolaImpl::ReceiveAUnion().
+ EXPECT_TRUE(testola_impl.did_receive_union());
+}
+
+TEST_F(FidlGenJsTest, UnionReceive) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ TestolaImpl testola_impl;
+ fidl::Binding<fidljstest::Testola> binding(&testola_impl);
+ binding.Bind(std::move(helper.server()));
+
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+ proxy.SendUnions().then(resp => {
+ this.result_initial_is_swb = resp.initial.is_swb();
+ this.result_initial_is_swu = resp.initial.is_swu();
+ this.result_initial_is_lswa = resp.initial.is_lswa();
+ this.result_optional_is_swb = resp.optional.is_swb();
+ this.result_optional_is_swu = resp.optional.is_swu();
+ this.result_optional_is_lswa = resp.optional.is_lswa();
+ this.result_trailing_is_swb = resp.trailing.is_swb();
+ this.result_trailing_is_swu = resp.trailing.is_swu();
+ this.result_trailing_is_lswa = resp.trailing.is_lswa();
+
+ this.result_initial_some_bool = resp.initial.swb.some_bool;
+ this.result_optional_num = resp.optional.swu.num;
+ }).catch((e) => log('FAILED: ' + e));
+ )";
+ helper.runner().Run(source, "test.js");
+
+ base::RunLoop().RunUntilIdle();
+
+ EXPECT_TRUE(helper.Get<bool>("result_initial_is_swb"));
+ EXPECT_FALSE(helper.Get<bool>("result_initial_is_swu"));
+ EXPECT_FALSE(helper.Get<bool>("result_initial_is_lswa"));
+
+ EXPECT_FALSE(helper.Get<bool>("result_optional_is_swb"));
+ EXPECT_TRUE(helper.Get<bool>("result_optional_is_swu"));
+ EXPECT_FALSE(helper.Get<bool>("result_optional_is_lswa"));
+
+ EXPECT_FALSE(helper.Get<bool>("result_trailing_is_swb"));
+ EXPECT_FALSE(helper.Get<bool>("result_trailing_is_swu"));
+ EXPECT_TRUE(helper.Get<bool>("result_trailing_is_lswa"));
+
+ EXPECT_TRUE(helper.Get<bool>("result_initial_some_bool"));
+ EXPECT_EQ(helper.Get<uint32_t>("result_optional_num"), 987654u);
+}
+
+TEST_F(FidlGenJsTest, VariousDefaults) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ std::string source = R"(
+ var temp = new VariousDefaults();
+ this.result_blorp = temp.blorp_defaulting_to_beta;
+ this.result_timestamp = temp.int64_defaulting_to_no_timestamp;
+ this.result_another_copy = ANOTHER_COPY;
+ this.result_int64_const = temp.int64_defaulting_to_const;
+ this.result_string_in_struct = temp.string_with_default;
+ this.result_string_const = SOME_STRING;
+ )";
+ helper.runner().Run(source, "test.js");
+
+ EXPECT_EQ(helper.Get<int>("result_blorp"),
+ static_cast<int>(fidljstest::Blorp::BETA));
+ EXPECT_EQ(helper.FromV8BigInt<int64_t>(helper.runner().global()->Get(
+ gin::StringToV8(isolate, "result_timestamp"))),
+ fidljstest::NO_TIMESTAMP);
+ EXPECT_EQ(helper.FromV8BigInt<int64_t>(helper.runner().global()->Get(
+ gin::StringToV8(isolate, "result_another_copy"))),
+ fidljstest::ANOTHER_COPY);
+ EXPECT_EQ(helper.FromV8BigInt<int64_t>(helper.runner().global()->Get(
+ gin::StringToV8(isolate, "result_int64_const"))),
+ 0x7fffffffffffff11LL);
+ EXPECT_EQ(helper.Get<std::string>("result_string_const"),
+ "a 你好 thing\" containing ' quotes");
+ EXPECT_EQ(helper.Get<std::string>("result_string_in_struct"), "stuff");
+}
+
+TEST_F(FidlGenJsTest, VectorOfStrings) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ TestolaImpl testola_impl;
+ fidl::Binding<fidljstest::Testola> binding(&testola_impl);
+ binding.Bind(std::move(helper.server()));
+
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+
+ var v1 = ['str0', 'str1', 'str2'];
+ var v2 = ['str3', null, null, null, 'str4'];
+ var v3 = ['0123456789']; // This is the maximum allowed length.
+ proxy.SendVectorsOfString(v1, v2, v3);
+ )";
+ helper.runner().Run(source, "test.js");
+ base::RunLoop().RunUntilIdle();
+
+ EXPECT_TRUE(testola_impl.did_get_vectors_of_string());
+}
+
+TEST_F(FidlGenJsTest, VectorOfStringsTooLongString) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ TestolaImpl testola_impl;
+ fidl::Binding<fidljstest::Testola> binding(&testola_impl);
+ binding.Bind(std::move(helper.server()));
+
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+
+ var too_long = ['this string is longer than allowed'];
+ proxy.SendVectorsOfString([], [], too_long);
+ this.tried_to_send = true;
+ )";
+ helper.runner().Run(source, "test.js");
+ base::RunLoop().RunUntilIdle();
+
+ EXPECT_TRUE(helper.Get<bool>("tried_to_send"));
+ EXPECT_FALSE(testola_impl.did_get_vectors_of_string());
+}
+
+TEST_F(FidlGenJsTest, VectorOfStruct) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ TestolaImpl testola_impl;
+ fidl::Binding<fidljstest::Testola> binding(&testola_impl);
+ binding.Bind(std::move(helper.server()));
+
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+
+ var data = [
+ new StructWithUint(456),
+ new StructWithUint(789),
+ new StructWithUint(123),
+ new StructWithUint(0xfffff),
+ ];
+ proxy.VectorOfStruct(data).then(resp => {
+ this.result_length = resp.length;
+ this.result_0 = resp[0].num;
+ this.result_1 = resp[1].num;
+ }).catch((e) => log('FAILED: ' + e));
+ )";
+ helper.runner().Run(source, "test.js");
+ base::RunLoop().RunUntilIdle();
+
+ EXPECT_EQ(helper.Get<uint32_t>("result_length"), 2u);
+ EXPECT_EQ(helper.Get<int>("result_0"), 369);
+ EXPECT_EQ(helper.Get<int>("result_1"), 258);
+}
+
+TEST_F(FidlGenJsTest, VectorsOfPrimitives) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ TestolaImpl testola_impl;
+ fidl::Binding<fidljstest::Testola> binding(&testola_impl);
+ binding.Bind(std::move(helper.server()));
+
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+
+ var v_bool = [true];
+ var v_uint8 = [2, 3];
+ var v_uint16 = [4, 5, 6];
+ var v_uint32 = [7, 8, 9, 10];
+ var v_uint64 = [11, 12, 13, 14, 0xffffffffffffff00n];
+ var v_int8 = [-16, -17, -18, -19, -20, -21];
+ var v_int16 = [-22, -23, -24, -25, -26, -27, -28];
+ var v_int32 = [-29, -30, -31, -32, -33, -34, -35, -36];
+ var v_int64 = [-37, -38, -39, -40, -41, -42, -43, -44,
+ -0x7fffffffffffffffn];
+ var v_float32 = [46, 47, 48, 49, 50, 51, 52, 53, 54, 55];
+ var v_float64 = [56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66];
+
+ var data = new VectorsOfPrimitives(
+ v_bool,
+ v_uint8,
+ v_uint16,
+ v_uint32,
+ v_uint64,
+ v_int8,
+ v_int16,
+ v_int32,
+ v_int64,
+ v_float32,
+ v_float64);
+
+ proxy.PassVectorOfPrimitives(data).then(resp => {
+ this.result_v_bool = resp.v_bool;
+ this.result_v_uint8 = resp.v_uint8;
+ this.result_v_uint16 = resp.v_uint16;
+ this.result_v_uint32 = resp.v_uint32;
+ this.result_v_uint64 = resp.v_uint64;
+ this.result_v_int8 = resp.v_int8;
+ this.result_v_int16 = resp.v_int16;
+ this.result_v_int32 = resp.v_int32;
+ this.result_v_int64 = resp.v_int64;
+ this.result_v_float32 = resp.v_float32;
+ this.result_v_float64 = resp.v_float64;
+ }).catch((e) => log('FAILED: ' + e));
+ )";
+
+ helper.runner().Run(source, "test.js");
+ base::RunLoop().RunUntilIdle();
+
+ auto result_v_bool = helper.Get<std::vector<bool>>("result_v_bool");
+ auto result_v_uint8 = helper.Get<std::vector<unsigned int>>("result_v_uint8");
+ auto result_v_uint16 =
+ helper.Get<std::vector<unsigned int>>("result_v_uint16");
+ auto result_v_uint32 = helper.Get<std::vector<uint32_t>>("result_v_uint32");
+ auto result_v_uint64 = helper.GetBigIntVector<uint64_t>("result_v_uint64");
+ auto result_v_int8 = helper.Get<std::vector<int>>("result_v_int8");
+ auto result_v_int16 = helper.Get<std::vector<int>>("result_v_int16");
+ auto result_v_int32 = helper.Get<std::vector<int32_t>>("result_v_int32");
+ auto result_v_int64 = helper.GetBigIntVector<int64_t>("result_v_int64");
+ auto result_v_float32 = helper.Get<std::vector<float>>("result_v_float32");
+ auto result_v_float64 = helper.Get<std::vector<double>>("result_v_float64");
+
+ ASSERT_EQ(result_v_bool.size(), 1u);
+ ASSERT_EQ(result_v_uint8.size(), 2u);
+ ASSERT_EQ(result_v_uint16.size(), 3u);
+ ASSERT_EQ(result_v_uint32.size(), 4u);
+ ASSERT_EQ(result_v_uint64.size(), 5u);
+ ASSERT_EQ(result_v_int8.size(), 6u);
+ ASSERT_EQ(result_v_int16.size(), 7u);
+ ASSERT_EQ(result_v_int32.size(), 8u);
+ ASSERT_EQ(result_v_int64.size(), 9u);
+ ASSERT_EQ(result_v_float32.size(), 10u);
+ ASSERT_EQ(result_v_float64.size(), 11u);
+
+ // Check that all the responses have had 10 added to them (except bool).
+
+ EXPECT_EQ(result_v_bool[0], true);
+
+ EXPECT_EQ(result_v_uint8[0], 12u);
+ EXPECT_EQ(result_v_uint8[1], 13u);
+
+ EXPECT_EQ(result_v_uint16[0], 14u);
+ EXPECT_EQ(result_v_uint16[1], 15u);
+ EXPECT_EQ(result_v_uint16[2], 16u);
+
+ EXPECT_EQ(result_v_uint32[0], 17u);
+ EXPECT_EQ(result_v_uint32[1], 18u);
+ EXPECT_EQ(result_v_uint32[2], 19u);
+ EXPECT_EQ(result_v_uint32[3], 20u);
+
+ EXPECT_EQ(result_v_uint64[0], 21u);
+ EXPECT_EQ(result_v_uint64[1], 22u);
+ EXPECT_EQ(result_v_uint64[2], 23u);
+ EXPECT_EQ(result_v_uint64[3], 24u);
+ EXPECT_EQ(result_v_uint64[4], 0xffffffffffffff0aULL);
+
+ EXPECT_EQ(result_v_int8[0], -6);
+ EXPECT_EQ(result_v_int8[1], -7);
+ EXPECT_EQ(result_v_int8[2], -8);
+ EXPECT_EQ(result_v_int8[3], -9);
+ EXPECT_EQ(result_v_int8[4], -10);
+ EXPECT_EQ(result_v_int8[5], -11);
+
+ EXPECT_EQ(result_v_int16[0], -12);
+ EXPECT_EQ(result_v_int16[1], -13);
+ EXPECT_EQ(result_v_int16[2], -14);
+ EXPECT_EQ(result_v_int16[3], -15);
+ EXPECT_EQ(result_v_int16[4], -16);
+ EXPECT_EQ(result_v_int16[5], -17);
+ EXPECT_EQ(result_v_int16[6], -18);
+
+ EXPECT_EQ(result_v_int32[0], -19);
+ EXPECT_EQ(result_v_int32[1], -20);
+ EXPECT_EQ(result_v_int32[2], -21);
+ EXPECT_EQ(result_v_int32[3], -22);
+ EXPECT_EQ(result_v_int32[4], -23);
+ EXPECT_EQ(result_v_int32[5], -24);
+ EXPECT_EQ(result_v_int32[6], -25);
+ EXPECT_EQ(result_v_int32[7], -26);
+
+ EXPECT_EQ(result_v_int64[0], -27);
+ EXPECT_EQ(result_v_int64[1], -28);
+ EXPECT_EQ(result_v_int64[2], -29);
+ EXPECT_EQ(result_v_int64[3], -30);
+ EXPECT_EQ(result_v_int64[4], -31);
+ EXPECT_EQ(result_v_int64[5], -32);
+ EXPECT_EQ(result_v_int64[6], -33);
+ EXPECT_EQ(result_v_int64[7], -34);
+ EXPECT_EQ(result_v_int64[8], -0x7ffffffffffffff5LL);
+
+ EXPECT_EQ(result_v_float32[0], 56.f);
+ EXPECT_EQ(result_v_float32[1], 57.f);
+ EXPECT_EQ(result_v_float32[2], 58.f);
+ EXPECT_EQ(result_v_float32[3], 59.f);
+ EXPECT_EQ(result_v_float32[4], 60.f);
+ EXPECT_EQ(result_v_float32[5], 61.f);
+ EXPECT_EQ(result_v_float32[6], 62.f);
+ EXPECT_EQ(result_v_float32[7], 63.f);
+ EXPECT_EQ(result_v_float32[8], 64.f);
+ EXPECT_EQ(result_v_float32[9], 65.f);
+
+ EXPECT_EQ(result_v_float64[0], 66.f);
+ EXPECT_EQ(result_v_float64[1], 67.f);
+ EXPECT_EQ(result_v_float64[2], 68.f);
+ EXPECT_EQ(result_v_float64[3], 69.f);
+ EXPECT_EQ(result_v_float64[4], 70.f);
+ EXPECT_EQ(result_v_float64[5], 71.f);
+ EXPECT_EQ(result_v_float64[6], 72.f);
+ EXPECT_EQ(result_v_float64[7], 73.f);
+ EXPECT_EQ(result_v_float64[8], 74.f);
+ EXPECT_EQ(result_v_float64[9], 75.f);
+ EXPECT_EQ(result_v_float64[10], 76.f);
+}
+
+TEST_F(FidlGenJsTest, VectorOfHandle) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ TestolaImpl testola_impl;
+ fidl::Binding<fidljstest::Testola> binding(&testola_impl);
+ binding.Bind(std::move(helper.server()));
+
+ zx::vmo test_vmo0, test_vmo1;
+ ASSERT_EQ(zx::vmo::create(4096, 0, &test_vmo0), ZX_OK);
+ ASSERT_EQ(zx::vmo::create(16384, 0, &test_vmo1), ZX_OK);
+
+ // Save to compare on return.
+ zx_koid_t koid_of_vmo0 = GetKoidForHandle(test_vmo0);
+ zx_koid_t koid_of_vmo1 = GetKoidForHandle(test_vmo1);
+
+ helper.runner().global()->Set(gin::StringToSymbol(isolate, "vmo0"),
+ gin::ConvertToV8(isolate, test_vmo0.release()));
+ helper.runner().global()->Set(gin::StringToSymbol(isolate, "vmo1"),
+ gin::ConvertToV8(isolate, test_vmo1.release()));
+
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+
+ proxy.PassVectorOfVMO(new VectorOfHandleToVMO([vmo0, vmo1])).then(
+ resp => {
+ this.result_vmo0 = resp.vmos[0];
+ this.result_vmo1 = resp.vmos[1];
+ }).catch((e) => log('FAILED: ' + e));
+ )";
+ helper.runner().Run(source, "test.js");
+ base::RunLoop().RunUntilIdle();
+
+ zx_handle_t result_vmo0 = helper.Get<zx_handle_t>("result_vmo0");
+ zx_handle_t result_vmo1 = helper.Get<zx_handle_t>("result_vmo1");
+
+ EXPECT_EQ(GetKoidForHandle(result_vmo0), koid_of_vmo0);
+ EXPECT_EQ(GetKoidForHandle(result_vmo1), koid_of_vmo1);
+
+ uint64_t size;
+ ASSERT_EQ(zx_vmo_get_size(result_vmo0, &size), ZX_OK);
+ EXPECT_EQ(size, 4096u);
+ ASSERT_EQ(zx_vmo_get_size(result_vmo1, &size), ZX_OK);
+ EXPECT_EQ(size, 16384u);
+
+ EXPECT_EQ(zx_handle_close(result_vmo0), ZX_OK);
+ EXPECT_EQ(zx_handle_close(result_vmo1), ZX_OK);
+}
+
+TEST_F(FidlGenJsTest, RequestInterface) {
+ v8::Isolate* isolate = instance_->isolate();
+ BindingsSetupHelper helper(isolate);
+
+ TestolaImpl testola_impl;
+ fidl::Binding<fidljstest::Testola> binding(&testola_impl);
+ binding.Bind(std::move(helper.server()));
+
+ std::string source = R"(
+ var proxy = new TestolaProxy();
+ proxy.$bind(testHandle);
+
+ var another_proxy = new AnotherInterfaceProxy();
+
+ proxy.GetAnother(another_proxy.$request());
+ this.is_bound = another_proxy.$is_bound();
+ another_proxy.TimesTwo(456).then(resp => {
+ this.result = resp;
+
+ // TODO(crbug.com/883496): Handle created by $request() must be manually
+ // closed for now to avoid leaking it.
+ another_proxy.$close();
+ }).catch((e) => log('FAILED: ' + e));
+
+ // Use the original interface to make sure we didn't break its connection.
+ proxy.PrintInt(789);
+ )";
+ helper.runner().Run(source, "test.js");
+ base::RunLoop().RunUntilIdle();
+
+ EXPECT_EQ(helper.Get<int>("result"), 456 * 2);
+ EXPECT_EQ(testola_impl.received_int(), 789);
+}
+
+int main(int argc, char** argv) {
+ base::TestSuite test_suite(argc, argv);
+
+ return base::LaunchUnitTests(
+ argc, argv,
+ base::BindOnce(&base::TestSuite::Run, base::Unretained(&test_suite)));
+}
diff --git a/deps/v8/build/fuchsia/fidlgen_js/test/simple.fidl b/deps/v8/build/fuchsia/fidlgen_js/test/simple.fidl
new file mode 100644
index 0000000000..18770650fb
--- /dev/null
+++ b/deps/v8/build/fuchsia/fidlgen_js/test/simple.fidl
@@ -0,0 +1,142 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+library fidljstest;
+
+enum Blorp : int8 {
+ ALPHA = 1;
+ BETA = 2;
+ GAMMA = 0x48;
+};
+
+// A struct of basic types, some with defaults and some without to test various
+// paths of the generator.
+struct BasicStruct {
+ bool b;
+ int8 i8;
+ int16 i16 = 18;
+ int32 i32;
+ uint8 u8;
+ uint16 u16;
+ uint32 u32 = 4000000000;
+};
+
+const uint64 ARRRR_SIZE = 32;
+
+struct StuffAndThings {
+ int32 count;
+ string id;
+ vector<int32> a_vector;
+ BasicStruct basic;
+ string later_string;
+ array<int32>:ARRRR_SIZE arrrr;
+ vector<string>? nullable_vector_of_string0;
+ vector<string>? nullable_vector_of_string1;
+ vector<Blorp> vector_of_blorp;
+};
+
+struct StructWithBool {
+ bool some_bool = false;
+};
+
+struct StructWithUint {
+ uint32 num;
+};
+
+struct LargerStructWithArray {
+ array<int32>:32 components;
+};
+
+union UnionOfStructs {
+ StructWithBool swb;
+ StructWithUint swu;
+ LargerStructWithArray lswa;
+};
+
+struct StructOfMultipleUnions {
+ UnionOfStructs initial;
+ UnionOfStructs? optional;
+ UnionOfStructs trailing;
+};
+
+const int64 NO_TIMESTAMP = 0x7fffffffffffffff;
+const int64 ANOTHER_COPY = NO_TIMESTAMP;
+const string SOME_STRING = "a 你好 thing\" containing ' quotes";
+
+struct VariousDefaults {
+ Blorp blorp_defaulting_to_beta = BETA;
+ int64 int64_defaulting_to_no_timestamp = NO_TIMESTAMP;
+ int64 int64_defaulting_to_const = 0x7fffffffffffff11;
+ string string_with_default = "stuff";
+};
+
+struct VectorsOfPrimitives {
+ vector<bool> v_bool;
+ vector<uint8> v_uint8;
+ vector<uint16> v_uint16;
+ vector<uint32> v_uint32;
+ vector<uint64> v_uint64;
+ vector<int8> v_int8;
+ vector<int16> v_int16;
+ vector<int32> v_int32;
+ vector<int64> v_int64;
+ vector<float32> v_float32;
+ vector<float64> v_float64;
+};
+
+struct VectorOfHandleToVMO {
+ vector<handle<vmo>> vmos;
+};
+
+// This is a compile-only test for gen.py to ensure that the size of
+// AfterPreviousReference is available before the vector<AfterPreviousReference>
+// is compiled in this struct.
+struct LaterReference {
+ vector<AfterPreviousReference>? later;
+};
+
+struct AfterPreviousReference {
+ int32 an_int;
+};
+
+protocol AnotherInterface {
+ TimesTwo(int32 a) -> (int32 b);
+};
+
+protocol Testola {
+ DoSomething();
+
+ PrintInt(int32 num);
+
+ PrintMsg(string msg);
+
+ VariousArgs(Blorp blorp, string:32 msg, vector<uint32> stuff);
+
+ WithResponse(int32 a, int32 b) -> (int32 sum);
+
+ SendAStruct(BasicStruct basic);
+
+ NestedStructsWithResponse(BasicStruct basic) -> (StuffAndThings resp);
+
+ PassHandles(handle<job> job) -> (handle<process> process);
+
+ ReceiveUnions(StructOfMultipleUnions somu);
+
+ SendUnions() -> (StructOfMultipleUnions somu);
+
+ SendVectorsOfString(vector<string> unsized,
+ vector<string?> nullable,
+ vector<string:10> max_strlen);
+
+ VectorOfStruct(vector<StructWithUint> stuff)
+ -> (vector<StructWithUint> result);
+
+ PassVectorOfPrimitives(VectorsOfPrimitives input)
+ -> (VectorsOfPrimitives output);
+
+ PassVectorOfVMO(VectorOfHandleToVMO input)
+ -> (VectorOfHandleToVMO output);
+
+ GetAnother(request<AnotherInterface> another);
+};
diff --git a/deps/v8/build/fuchsia/fidlgen_js/third_party/__init__.py b/deps/v8/build/fuchsia/fidlgen_js/third_party/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/deps/v8/build/fuchsia/fidlgen_js/third_party/__init__.py
diff --git a/deps/v8/build/fuchsia/fidlgen_js/third_party/enum34/LICENSE b/deps/v8/build/fuchsia/fidlgen_js/third_party/enum34/LICENSE
new file mode 100644
index 0000000000..9003b8850e
--- /dev/null
+++ b/deps/v8/build/fuchsia/fidlgen_js/third_party/enum34/LICENSE
@@ -0,0 +1,32 @@
+Copyright (c) 2013, Ethan Furman.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+ Redistributions of source code must retain the above
+ copyright notice, this list of conditions and the
+ following disclaimer.
+
+ Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials
+ provided with the distribution.
+
+ Neither the name Ethan Furman nor the names of any
+ contributors may be used to endorse or promote products
+ derived from this software without specific prior written
+ permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
diff --git a/deps/v8/build/fuchsia/fidlgen_js/third_party/enum34/README.chromium b/deps/v8/build/fuchsia/fidlgen_js/third_party/enum34/README.chromium
new file mode 100644
index 0000000000..4d0ef07c43
--- /dev/null
+++ b/deps/v8/build/fuchsia/fidlgen_js/third_party/enum34/README.chromium
@@ -0,0 +1,15 @@
+Name: enum34
+Short Name: enum34
+URL: https://bitbucket.org/stoneleaf/enum34
+License: BSD
+License File: LICENSE
+Revision: f24487b
+Security Critical: no
+
+
+Description:
+
+'Enum' backported from Python 3.4 to earlier Python versions. Only LICENSE and
+__init__.py are taken, other packaging files, documentation, etc. removed.
+
+Only used at build time.
diff --git a/deps/v8/build/fuchsia/fidlgen_js/third_party/enum34/__init__.py b/deps/v8/build/fuchsia/fidlgen_js/third_party/enum34/__init__.py
new file mode 100644
index 0000000000..d6ffb3a40f
--- /dev/null
+++ b/deps/v8/build/fuchsia/fidlgen_js/third_party/enum34/__init__.py
@@ -0,0 +1,837 @@
+"""Python Enumerations"""
+
+import sys as _sys
+
+__all__ = ['Enum', 'IntEnum', 'unique']
+
+version = 1, 1, 6
+
+pyver = float('%s.%s' % _sys.version_info[:2])
+
+try:
+ any
+except NameError:
+ def any(iterable):
+ for element in iterable:
+ if element:
+ return True
+ return False
+
+try:
+ from collections import OrderedDict
+except ImportError:
+ OrderedDict = None
+
+try:
+ basestring
+except NameError:
+ # In Python 2 basestring is the ancestor of both str and unicode
+ # in Python 3 it's just str, but was missing in 3.1
+ basestring = str
+
+try:
+ unicode
+except NameError:
+ # In Python 3 unicode no longer exists (it's just str)
+ unicode = str
+
+class _RouteClassAttributeToGetattr(object):
+ """Route attribute access on a class to __getattr__.
+
+ This is a descriptor, used to define attributes that act differently when
+ accessed through an instance and through a class. Instance access remains
+ normal, but access to an attribute through a class will be routed to the
+ class's __getattr__ method; this is done by raising AttributeError.
+
+ """
+ def __init__(self, fget=None):
+ self.fget = fget
+
+ def __get__(self, instance, ownerclass=None):
+ if instance is None:
+ raise AttributeError()
+ return self.fget(instance)
+
+ def __set__(self, instance, value):
+ raise AttributeError("can't set attribute")
+
+ def __delete__(self, instance):
+ raise AttributeError("can't delete attribute")
+
+
+def _is_descriptor(obj):
+ """Returns True if obj is a descriptor, False otherwise."""
+ return (
+ hasattr(obj, '__get__') or
+ hasattr(obj, '__set__') or
+ hasattr(obj, '__delete__'))
+
+
+def _is_dunder(name):
+ """Returns True if a __dunder__ name, False otherwise."""
+ return (name[:2] == name[-2:] == '__' and
+ name[2:3] != '_' and
+ name[-3:-2] != '_' and
+ len(name) > 4)
+
+
+def _is_sunder(name):
+ """Returns True if a _sunder_ name, False otherwise."""
+ return (name[0] == name[-1] == '_' and
+ name[1:2] != '_' and
+ name[-2:-1] != '_' and
+ len(name) > 2)
+
+
+def _make_class_unpicklable(cls):
+ """Make the given class un-picklable."""
+ def _break_on_call_reduce(self, protocol=None):
+ raise TypeError('%r cannot be pickled' % self)
+ cls.__reduce_ex__ = _break_on_call_reduce
+ cls.__module__ = '<unknown>'
+
+
+class _EnumDict(dict):
+ """Track enum member order and ensure member names are not reused.
+
+ EnumMeta will use the names found in self._member_names as the
+ enumeration member names.
+
+ """
+ def __init__(self):
+ super(_EnumDict, self).__init__()
+ self._member_names = []
+
+ def __setitem__(self, key, value):
+ """Changes anything not dundered or not a descriptor.
+
+ If a descriptor is added with the same name as an enum member, the name
+ is removed from _member_names (this may leave a hole in the numerical
+ sequence of values).
+
+ If an enum member name is used twice, an error is raised; duplicate
+ values are not checked for.
+
+ Single underscore (sunder) names are reserved.
+
+ Note: in 3.x __order__ is simply discarded as a not necessary piece
+ leftover from 2.x
+
+ """
+ if pyver >= 3.0 and key in ('_order_', '__order__'):
+ return
+ elif key == '__order__':
+ key = '_order_'
+ if _is_sunder(key):
+ if key != '_order_':
+ raise ValueError('_names_ are reserved for future Enum use')
+ elif _is_dunder(key):
+ pass
+ elif key in self._member_names:
+ # descriptor overwriting an enum?
+ raise TypeError('Attempted to reuse key: %r' % key)
+ elif not _is_descriptor(value):
+ if key in self:
+ # enum overwriting a descriptor?
+ raise TypeError('Key already defined as: %r' % self[key])
+ self._member_names.append(key)
+ super(_EnumDict, self).__setitem__(key, value)
+
+
+# Dummy value for Enum as EnumMeta explicity checks for it, but of course until
+# EnumMeta finishes running the first time the Enum class doesn't exist. This
+# is also why there are checks in EnumMeta like `if Enum is not None`
+Enum = None
+
+
+class EnumMeta(type):
+ """Metaclass for Enum"""
+ @classmethod
+ def __prepare__(metacls, cls, bases):
+ return _EnumDict()
+
+ def __new__(metacls, cls, bases, classdict):
+ # an Enum class is final once enumeration items have been defined; it
+ # cannot be mixed with other types (int, float, etc.) if it has an
+ # inherited __new__ unless a new __new__ is defined (or the resulting
+ # class will fail).
+ if type(classdict) is dict:
+ original_dict = classdict
+ classdict = _EnumDict()
+ for k, v in original_dict.items():
+ classdict[k] = v
+
+ member_type, first_enum = metacls._get_mixins_(bases)
+ __new__, save_new, use_args = metacls._find_new_(classdict, member_type,
+ first_enum)
+ # save enum items into separate mapping so they don't get baked into
+ # the new class
+ members = dict((k, classdict[k]) for k in classdict._member_names)
+ for name in classdict._member_names:
+ del classdict[name]
+
+ # py2 support for definition order
+ _order_ = classdict.get('_order_')
+ if _order_ is None:
+ if pyver < 3.0:
+ try:
+ _order_ = [name for (name, value) in sorted(members.items(), key=lambda item: item[1])]
+ except TypeError:
+ _order_ = [name for name in sorted(members.keys())]
+ else:
+ _order_ = classdict._member_names
+ else:
+ del classdict['_order_']
+ if pyver < 3.0:
+ _order_ = _order_.replace(',', ' ').split()
+ aliases = [name for name in members if name not in _order_]
+ _order_ += aliases
+
+ # check for illegal enum names (any others?)
+ invalid_names = set(members) & set(['mro'])
+ if invalid_names:
+ raise ValueError('Invalid enum member name(s): %s' % (
+ ', '.join(invalid_names), ))
+
+ # save attributes from super classes so we know if we can take
+ # the shortcut of storing members in the class dict
+ base_attributes = set([a for b in bases for a in b.__dict__])
+ # create our new Enum type
+ enum_class = super(EnumMeta, metacls).__new__(metacls, cls, bases, classdict)
+ enum_class._member_names_ = [] # names in random order
+ if OrderedDict is not None:
+ enum_class._member_map_ = OrderedDict()
+ else:
+ enum_class._member_map_ = {} # name->value map
+ enum_class._member_type_ = member_type
+
+ # Reverse value->name map for hashable values.
+ enum_class._value2member_map_ = {}
+
+ # instantiate them, checking for duplicates as we go
+ # we instantiate first instead of checking for duplicates first in case
+ # a custom __new__ is doing something funky with the values -- such as
+ # auto-numbering ;)
+ if __new__ is None:
+ __new__ = enum_class.__new__
+ for member_name in _order_:
+ value = members[member_name]
+ if not isinstance(value, tuple):
+ args = (value, )
+ else:
+ args = value
+ if member_type is tuple: # special case for tuple enums
+ args = (args, ) # wrap it one more time
+ if not use_args or not args:
+ enum_member = __new__(enum_class)
+ if not hasattr(enum_member, '_value_'):
+ enum_member._value_ = value
+ else:
+ enum_member = __new__(enum_class, *args)
+ if not hasattr(enum_member, '_value_'):
+ enum_member._value_ = member_type(*args)
+ value = enum_member._value_
+ enum_member._name_ = member_name
+ enum_member.__objclass__ = enum_class
+ enum_member.__init__(*args)
+ # If another member with the same value was already defined, the
+ # new member becomes an alias to the existing one.
+ for name, canonical_member in enum_class._member_map_.items():
+ if canonical_member.value == enum_member._value_:
+ enum_member = canonical_member
+ break
+ else:
+ # Aliases don't appear in member names (only in __members__).
+ enum_class._member_names_.append(member_name)
+ # performance boost for any member that would not shadow
+ # a DynamicClassAttribute (aka _RouteClassAttributeToGetattr)
+ if member_name not in base_attributes:
+ setattr(enum_class, member_name, enum_member)
+ # now add to _member_map_
+ enum_class._member_map_[member_name] = enum_member
+ try:
+ # This may fail if value is not hashable. We can't add the value
+ # to the map, and by-value lookups for this value will be
+ # linear.
+ enum_class._value2member_map_[value] = enum_member
+ except TypeError:
+ pass
+
+
+ # If a custom type is mixed into the Enum, and it does not know how
+ # to pickle itself, pickle.dumps will succeed but pickle.loads will
+ # fail. Rather than have the error show up later and possibly far
+ # from the source, sabotage the pickle protocol for this class so
+ # that pickle.dumps also fails.
+ #
+ # However, if the new class implements its own __reduce_ex__, do not
+ # sabotage -- it's on them to make sure it works correctly. We use
+ # __reduce_ex__ instead of any of the others as it is preferred by
+ # pickle over __reduce__, and it handles all pickle protocols.
+ unpicklable = False
+ if '__reduce_ex__' not in classdict:
+ if member_type is not object:
+ methods = ('__getnewargs_ex__', '__getnewargs__',
+ '__reduce_ex__', '__reduce__')
+ if not any(m in member_type.__dict__ for m in methods):
+ _make_class_unpicklable(enum_class)
+ unpicklable = True
+
+
+ # double check that repr and friends are not the mixin's or various
+ # things break (such as pickle)
+ for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'):
+ class_method = getattr(enum_class, name)
+ obj_method = getattr(member_type, name, None)
+ enum_method = getattr(first_enum, name, None)
+ if name not in classdict and class_method is not enum_method:
+ if name == '__reduce_ex__' and unpicklable:
+ continue
+ setattr(enum_class, name, enum_method)
+
+ # method resolution and int's are not playing nice
+ # Python's less than 2.6 use __cmp__
+
+ if pyver < 2.6:
+
+ if issubclass(enum_class, int):
+ setattr(enum_class, '__cmp__', getattr(int, '__cmp__'))
+
+ elif pyver < 3.0:
+
+ if issubclass(enum_class, int):
+ for method in (
+ '__le__',
+ '__lt__',
+ '__gt__',
+ '__ge__',
+ '__eq__',
+ '__ne__',
+ '__hash__',
+ ):
+ setattr(enum_class, method, getattr(int, method))
+
+ # replace any other __new__ with our own (as long as Enum is not None,
+ # anyway) -- again, this is to support pickle
+ if Enum is not None:
+ # if the user defined their own __new__, save it before it gets
+ # clobbered in case they subclass later
+ if save_new:
+ setattr(enum_class, '__member_new__', enum_class.__dict__['__new__'])
+ setattr(enum_class, '__new__', Enum.__dict__['__new__'])
+ return enum_class
+
+ def __bool__(cls):
+ """
+ classes/types should always be True.
+ """
+ return True
+
+ def __call__(cls, value, names=None, module=None, type=None, start=1):
+ """Either returns an existing member, or creates a new enum class.
+
+ This method is used both when an enum class is given a value to match
+ to an enumeration member (i.e. Color(3)) and for the functional API
+ (i.e. Color = Enum('Color', names='red green blue')).
+
+ When used for the functional API: `module`, if set, will be stored in
+ the new class' __module__ attribute; `type`, if set, will be mixed in
+ as the first base class.
+
+ Note: if `module` is not set this routine will attempt to discover the
+ calling module by walking the frame stack; if this is unsuccessful
+ the resulting class will not be pickleable.
+
+ """
+ if names is None: # simple value lookup
+ return cls.__new__(cls, value)
+ # otherwise, functional API: we're creating a new Enum type
+ return cls._create_(value, names, module=module, type=type, start=start)
+
+ def __contains__(cls, member):
+ return isinstance(member, cls) and member.name in cls._member_map_
+
+ def __delattr__(cls, attr):
+ # nicer error message when someone tries to delete an attribute
+ # (see issue19025).
+ if attr in cls._member_map_:
+ raise AttributeError(
+ "%s: cannot delete Enum member." % cls.__name__)
+ super(EnumMeta, cls).__delattr__(attr)
+
+ def __dir__(self):
+ return (['__class__', '__doc__', '__members__', '__module__'] +
+ self._member_names_)
+
+ @property
+ def __members__(cls):
+ """Returns a mapping of member name->value.
+
+ This mapping lists all enum members, including aliases. Note that this
+ is a copy of the internal mapping.
+
+ """
+ return cls._member_map_.copy()
+
+ def __getattr__(cls, name):
+ """Return the enum member matching `name`
+
+ We use __getattr__ instead of descriptors or inserting into the enum
+ class' __dict__ in order to support `name` and `value` being both
+ properties for enum members (which live in the class' __dict__) and
+ enum members themselves.
+
+ """
+ if _is_dunder(name):
+ raise AttributeError(name)
+ try:
+ return cls._member_map_[name]
+ except KeyError:
+ raise AttributeError(name)
+
+ def __getitem__(cls, name):
+ return cls._member_map_[name]
+
+ def __iter__(cls):
+ return (cls._member_map_[name] for name in cls._member_names_)
+
+ def __reversed__(cls):
+ return (cls._member_map_[name] for name in reversed(cls._member_names_))
+
+ def __len__(cls):
+ return len(cls._member_names_)
+
+ __nonzero__ = __bool__
+
+ def __repr__(cls):
+ return "<enum %r>" % cls.__name__
+
+ def __setattr__(cls, name, value):
+ """Block attempts to reassign Enum members.
+
+ A simple assignment to the class namespace only changes one of the
+ several possible ways to get an Enum member from the Enum class,
+ resulting in an inconsistent Enumeration.
+
+ """
+ member_map = cls.__dict__.get('_member_map_', {})
+ if name in member_map:
+ raise AttributeError('Cannot reassign members.')
+ super(EnumMeta, cls).__setattr__(name, value)
+
+ def _create_(cls, class_name, names=None, module=None, type=None, start=1):
+ """Convenience method to create a new Enum class.
+
+ `names` can be:
+
+ * A string containing member names, separated either with spaces or
+ commas. Values are auto-numbered from 1.
+ * An iterable of member names. Values are auto-numbered from 1.
+ * An iterable of (member name, value) pairs.
+ * A mapping of member name -> value.
+
+ """
+ if pyver < 3.0:
+ # if class_name is unicode, attempt a conversion to ASCII
+ if isinstance(class_name, unicode):
+ try:
+ class_name = class_name.encode('ascii')
+ except UnicodeEncodeError:
+ raise TypeError('%r is not representable in ASCII' % class_name)
+ metacls = cls.__class__
+ if type is None:
+ bases = (cls, )
+ else:
+ bases = (type, cls)
+ classdict = metacls.__prepare__(class_name, bases)
+ _order_ = []
+
+ # special processing needed for names?
+ if isinstance(names, basestring):
+ names = names.replace(',', ' ').split()
+ if isinstance(names, (tuple, list)) and isinstance(names[0], basestring):
+ names = [(e, i+start) for (i, e) in enumerate(names)]
+
+ # Here, names is either an iterable of (name, value) or a mapping.
+ item = None # in case names is empty
+ for item in names:
+ if isinstance(item, basestring):
+ member_name, member_value = item, names[item]
+ else:
+ member_name, member_value = item
+ classdict[member_name] = member_value
+ _order_.append(member_name)
+ # only set _order_ in classdict if name/value was not from a mapping
+ if not isinstance(item, basestring):
+ classdict['_order_'] = ' '.join(_order_)
+ enum_class = metacls.__new__(metacls, class_name, bases, classdict)
+
+ # TODO: replace the frame hack if a blessed way to know the calling
+ # module is ever developed
+ if module is None:
+ try:
+ module = _sys._getframe(2).f_globals['__name__']
+ except (AttributeError, ValueError):
+ pass
+ if module is None:
+ _make_class_unpicklable(enum_class)
+ else:
+ enum_class.__module__ = module
+
+ return enum_class
+
+ @staticmethod
+ def _get_mixins_(bases):
+ """Returns the type for creating enum members, and the first inherited
+ enum class.
+
+ bases: the tuple of bases that was given to __new__
+
+ """
+ if not bases or Enum is None:
+ return object, Enum
+
+
+ # double check that we are not subclassing a class with existing
+ # enumeration members; while we're at it, see if any other data
+ # type has been mixed in so we can use the correct __new__
+ member_type = first_enum = None
+ for base in bases:
+ if (base is not Enum and
+ issubclass(base, Enum) and
+ base._member_names_):
+ raise TypeError("Cannot extend enumerations")
+ # base is now the last base in bases
+ if not issubclass(base, Enum):
+ raise TypeError("new enumerations must be created as "
+ "`ClassName([mixin_type,] enum_type)`")
+
+ # get correct mix-in type (either mix-in type of Enum subclass, or
+ # first base if last base is Enum)
+ if not issubclass(bases[0], Enum):
+ member_type = bases[0] # first data type
+ first_enum = bases[-1] # enum type
+ else:
+ for base in bases[0].__mro__:
+ # most common: (IntEnum, int, Enum, object)
+ # possible: (<Enum 'AutoIntEnum'>, <Enum 'IntEnum'>,
+ # <class 'int'>, <Enum 'Enum'>,
+ # <class 'object'>)
+ if issubclass(base, Enum):
+ if first_enum is None:
+ first_enum = base
+ else:
+ if member_type is None:
+ member_type = base
+
+ return member_type, first_enum
+
+ if pyver < 3.0:
+ @staticmethod
+ def _find_new_(classdict, member_type, first_enum):
+ """Returns the __new__ to be used for creating the enum members.
+
+ classdict: the class dictionary given to __new__
+ member_type: the data type whose __new__ will be used by default
+ first_enum: enumeration to check for an overriding __new__
+
+ """
+ # now find the correct __new__, checking to see of one was defined
+ # by the user; also check earlier enum classes in case a __new__ was
+ # saved as __member_new__
+ __new__ = classdict.get('__new__', None)
+ if __new__:
+ return None, True, True # __new__, save_new, use_args
+
+ N__new__ = getattr(None, '__new__')
+ O__new__ = getattr(object, '__new__')
+ if Enum is None:
+ E__new__ = N__new__
+ else:
+ E__new__ = Enum.__dict__['__new__']
+ # check all possibles for __member_new__ before falling back to
+ # __new__
+ for method in ('__member_new__', '__new__'):
+ for possible in (member_type, first_enum):
+ try:
+ target = possible.__dict__[method]
+ except (AttributeError, KeyError):
+ target = getattr(possible, method, None)
+ if target not in [
+ None,
+ N__new__,
+ O__new__,
+ E__new__,
+ ]:
+ if method == '__member_new__':
+ classdict['__new__'] = target
+ return None, False, True
+ if isinstance(target, staticmethod):
+ target = target.__get__(member_type)
+ __new__ = target
+ break
+ if __new__ is not None:
+ break
+ else:
+ __new__ = object.__new__
+
+ # if a non-object.__new__ is used then whatever value/tuple was
+ # assigned to the enum member name will be passed to __new__ and to the
+ # new enum member's __init__
+ if __new__ is object.__new__:
+ use_args = False
+ else:
+ use_args = True
+
+ return __new__, False, use_args
+ else:
+ @staticmethod
+ def _find_new_(classdict, member_type, first_enum):
+ """Returns the __new__ to be used for creating the enum members.
+
+ classdict: the class dictionary given to __new__
+ member_type: the data type whose __new__ will be used by default
+ first_enum: enumeration to check for an overriding __new__
+
+ """
+ # now find the correct __new__, checking to see of one was defined
+ # by the user; also check earlier enum classes in case a __new__ was
+ # saved as __member_new__
+ __new__ = classdict.get('__new__', None)
+
+ # should __new__ be saved as __member_new__ later?
+ save_new = __new__ is not None
+
+ if __new__ is None:
+ # check all possibles for __member_new__ before falling back to
+ # __new__
+ for method in ('__member_new__', '__new__'):
+ for possible in (member_type, first_enum):
+ target = getattr(possible, method, None)
+ if target not in (
+ None,
+ None.__new__,
+ object.__new__,
+ Enum.__new__,
+ ):
+ __new__ = target
+ break
+ if __new__ is not None:
+ break
+ else:
+ __new__ = object.__new__
+
+ # if a non-object.__new__ is used then whatever value/tuple was
+ # assigned to the enum member name will be passed to __new__ and to the
+ # new enum member's __init__
+ if __new__ is object.__new__:
+ use_args = False
+ else:
+ use_args = True
+
+ return __new__, save_new, use_args
+
+
+########################################################
+# In order to support Python 2 and 3 with a single
+# codebase we have to create the Enum methods separately
+# and then use the `type(name, bases, dict)` method to
+# create the class.
+########################################################
+temp_enum_dict = {}
+temp_enum_dict['__doc__'] = "Generic enumeration.\n\n Derive from this class to define new enumerations.\n\n"
+
+def __new__(cls, value):
+ # all enum instances are actually created during class construction
+ # without calling this method; this method is called by the metaclass'
+ # __call__ (i.e. Color(3) ), and by pickle
+ if type(value) is cls:
+ # For lookups like Color(Color.red)
+ value = value.value
+ #return value
+ # by-value search for a matching enum member
+ # see if it's in the reverse mapping (for hashable values)
+ try:
+ if value in cls._value2member_map_:
+ return cls._value2member_map_[value]
+ except TypeError:
+ # not there, now do long search -- O(n) behavior
+ for member in cls._member_map_.values():
+ if member.value == value:
+ return member
+ raise ValueError("%s is not a valid %s" % (value, cls.__name__))
+temp_enum_dict['__new__'] = __new__
+del __new__
+
+def __repr__(self):
+ return "<%s.%s: %r>" % (
+ self.__class__.__name__, self._name_, self._value_)
+temp_enum_dict['__repr__'] = __repr__
+del __repr__
+
+def __str__(self):
+ return "%s.%s" % (self.__class__.__name__, self._name_)
+temp_enum_dict['__str__'] = __str__
+del __str__
+
+if pyver >= 3.0:
+ def __dir__(self):
+ added_behavior = [
+ m
+ for cls in self.__class__.mro()
+ for m in cls.__dict__
+ if m[0] != '_' and m not in self._member_map_
+ ]
+ return (['__class__', '__doc__', '__module__', ] + added_behavior)
+ temp_enum_dict['__dir__'] = __dir__
+ del __dir__
+
+def __format__(self, format_spec):
+ # mixed-in Enums should use the mixed-in type's __format__, otherwise
+ # we can get strange results with the Enum name showing up instead of
+ # the value
+
+ # pure Enum branch
+ if self._member_type_ is object:
+ cls = str
+ val = str(self)
+ # mix-in branch
+ else:
+ cls = self._member_type_
+ val = self.value
+ return cls.__format__(val, format_spec)
+temp_enum_dict['__format__'] = __format__
+del __format__
+
+
+####################################
+# Python's less than 2.6 use __cmp__
+
+if pyver < 2.6:
+
+ def __cmp__(self, other):
+ if type(other) is self.__class__:
+ if self is other:
+ return 0
+ return -1
+ return NotImplemented
+ raise TypeError("unorderable types: %s() and %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__cmp__'] = __cmp__
+ del __cmp__
+
+else:
+
+ def __le__(self, other):
+ raise TypeError("unorderable types: %s() <= %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__le__'] = __le__
+ del __le__
+
+ def __lt__(self, other):
+ raise TypeError("unorderable types: %s() < %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__lt__'] = __lt__
+ del __lt__
+
+ def __ge__(self, other):
+ raise TypeError("unorderable types: %s() >= %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__ge__'] = __ge__
+ del __ge__
+
+ def __gt__(self, other):
+ raise TypeError("unorderable types: %s() > %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__gt__'] = __gt__
+ del __gt__
+
+
+def __eq__(self, other):
+ if type(other) is self.__class__:
+ return self is other
+ return NotImplemented
+temp_enum_dict['__eq__'] = __eq__
+del __eq__
+
+def __ne__(self, other):
+ if type(other) is self.__class__:
+ return self is not other
+ return NotImplemented
+temp_enum_dict['__ne__'] = __ne__
+del __ne__
+
+def __hash__(self):
+ return hash(self._name_)
+temp_enum_dict['__hash__'] = __hash__
+del __hash__
+
+def __reduce_ex__(self, proto):
+ return self.__class__, (self._value_, )
+temp_enum_dict['__reduce_ex__'] = __reduce_ex__
+del __reduce_ex__
+
+# _RouteClassAttributeToGetattr is used to provide access to the `name`
+# and `value` properties of enum members while keeping some measure of
+# protection from modification, while still allowing for an enumeration
+# to have members named `name` and `value`. This works because enumeration
+# members are not set directly on the enum class -- __getattr__ is
+# used to look them up.
+
+@_RouteClassAttributeToGetattr
+def name(self):
+ return self._name_
+temp_enum_dict['name'] = name
+del name
+
+@_RouteClassAttributeToGetattr
+def value(self):
+ return self._value_
+temp_enum_dict['value'] = value
+del value
+
+@classmethod
+def _convert(cls, name, module, filter, source=None):
+ """
+ Create a new Enum subclass that replaces a collection of global constants
+ """
+ # convert all constants from source (or module) that pass filter() to
+ # a new Enum called name, and export the enum and its members back to
+ # module;
+ # also, replace the __reduce_ex__ method so unpickling works in
+ # previous Python versions
+ module_globals = vars(_sys.modules[module])
+ if source:
+ source = vars(source)
+ else:
+ source = module_globals
+ members = dict((name, value) for name, value in source.items() if filter(name))
+ cls = cls(name, members, module=module)
+ cls.__reduce_ex__ = _reduce_ex_by_name
+ module_globals.update(cls.__members__)
+ module_globals[name] = cls
+ return cls
+temp_enum_dict['_convert'] = _convert
+del _convert
+
+Enum = EnumMeta('Enum', (object, ), temp_enum_dict)
+del temp_enum_dict
+
+# Enum has now been created
+###########################
+
+class IntEnum(int, Enum):
+ """Enum where members are also (and must be) ints"""
+
+def _reduce_ex_by_name(self, proto):
+ return self.name
+
+def unique(enumeration):
+ """Class decorator that ensures only unique members exist in an enumeration."""
+ duplicates = []
+ for name, member in enumeration.__members__.items():
+ if name != member.name:
+ duplicates.append((name, member.name))
+ if duplicates:
+ duplicate_names = ', '.join(
+ ["%s -> %s" % (alias, name) for (alias, name) in duplicates]
+ )
+ raise ValueError('duplicate names found in %r: %s' %
+ (enumeration, duplicate_names)
+ )
+ return enumeration
diff --git a/deps/v8/build/fuchsia/layout_test_proxy/BUILD.gn b/deps/v8/build/fuchsia/layout_test_proxy/BUILD.gn
new file mode 100644
index 0000000000..ad065071c5
--- /dev/null
+++ b/deps/v8/build/fuchsia/layout_test_proxy/BUILD.gn
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_fuchsia)
+
+import("//testing/test.gni")
+
+# Binary used to proxy TCP connections from a Fuchsia process. Potentially SSH
+# can be used to forward TCP, but this feature is currently broken on Fuchsia,
+# see ZX-1555. layout_test_proxy can be removed once that issue with sshd is
+# fixed and layout tests are updated to use SSH.
+executable("layout_test_proxy") {
+ testonly = true
+ sources = [
+ "layout_test_proxy.cc",
+ ]
+ deps = [
+ "//net",
+ "//net:test_support",
+ ]
+}
+
+fuchsia_package("layout_test_proxy_pkg") {
+ testonly = true
+ binary = ":layout_test_proxy"
+ package_name_override = "layout_test_proxy"
+}
+
+fuchsia_package_runner("layout_test_proxy_runner") {
+ testonly = true
+ package = ":layout_test_proxy_pkg"
+ package_name_override = "layout_test_proxy"
+}
diff --git a/deps/v8/build/fuchsia/layout_test_proxy/DEPS b/deps/v8/build/fuchsia/layout_test_proxy/DEPS
new file mode 100644
index 0000000000..8fa9d48d88
--- /dev/null
+++ b/deps/v8/build/fuchsia/layout_test_proxy/DEPS
@@ -0,0 +1,3 @@
+include_rules = [
+ "+net",
+]
diff --git a/deps/v8/build/fuchsia/layout_test_proxy/layout_test_proxy.cc b/deps/v8/build/fuchsia/layout_test_proxy/layout_test_proxy.cc
new file mode 100644
index 0000000000..1d14df99ea
--- /dev/null
+++ b/deps/v8/build/fuchsia/layout_test_proxy/layout_test_proxy.cc
@@ -0,0 +1,78 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/command_line.h"
+#include "base/message_loop/message_loop.h"
+#include "base/strings/string_number_conversions.h"
+#include "base/strings/string_split.h"
+#include "net/base/ip_endpoint.h"
+#include "net/test/tcp_socket_proxy.h"
+
+const char kPortsSwitch[] = "ports";
+const char kRemoteAddressSwitch[] = "remote-address";
+
+int main(int argc, char** argv) {
+ base::CommandLine::Init(argc, argv);
+
+ base::CommandLine* command_line = base::CommandLine::ForCurrentProcess();
+
+ if (!command_line->HasSwitch(kPortsSwitch)) {
+ LOG(ERROR) << "--" << kPortsSwitch << " was not specified.";
+ return 1;
+ }
+
+ std::vector<std::string> ports_strings =
+ base::SplitString(command_line->GetSwitchValueASCII(kPortsSwitch), ",",
+ base::TRIM_WHITESPACE, base::SPLIT_WANT_NONEMPTY);
+ if (ports_strings.empty()) {
+ LOG(ERROR) << "At least one port must be specified with --" << kPortsSwitch;
+ return 1;
+ }
+
+ std::vector<int> ports;
+ for (auto& port_string : ports_strings) {
+ int port;
+ if (!base::StringToInt(port_string, &port) || port <= 0 || port > 65535) {
+ LOG(ERROR) << "Invalid value specified for --" << kPortsSwitch << ": "
+ << port_string;
+ return 1;
+ }
+ ports.push_back(port);
+ }
+
+ if (!command_line->HasSwitch(kRemoteAddressSwitch)) {
+ LOG(ERROR) << "--" << kRemoteAddressSwitch << " was not specified.";
+ return 1;
+ }
+
+ std::string remote_address_str =
+ command_line->GetSwitchValueASCII(kRemoteAddressSwitch);
+ net::IPAddress remote_address;
+ if (!remote_address.AssignFromIPLiteral(remote_address_str)) {
+ LOG(ERROR) << "Invalid value specified for --" << kRemoteAddressSwitch
+ << ": " << remote_address_str;
+ return 1;
+ }
+
+ base::MessageLoopForIO message_loop;
+
+ std::vector<std::unique_ptr<net::TcpSocketProxy>> proxies;
+
+ for (int port : ports) {
+ auto test_server_proxy =
+ std::make_unique<net::TcpSocketProxy>(message_loop.task_runner());
+ if (!test_server_proxy->Initialize(port)) {
+ LOG(ERROR) << "Can't bind proxy to port " << port;
+ return 1;
+ }
+ LOG(INFO) << "Listening on port " << test_server_proxy->local_port();
+ test_server_proxy->Start(net::IPEndPoint(remote_address, port));
+ proxies.push_back(std::move(test_server_proxy));
+ }
+
+ // Run the message loop indefinitely.
+ base::RunLoop().Run();
+
+ return 0;
+} \ No newline at end of file
diff --git a/deps/v8/build/fuchsia/linux.sdk.sha1 b/deps/v8/build/fuchsia/linux.sdk.sha1
new file mode 100644
index 0000000000..b891b02143
--- /dev/null
+++ b/deps/v8/build/fuchsia/linux.sdk.sha1
@@ -0,0 +1 @@
+8915992854282451632 \ No newline at end of file
diff --git a/deps/v8/build/fuchsia/mac.sdk.sha1 b/deps/v8/build/fuchsia/mac.sdk.sha1
new file mode 100644
index 0000000000..b622d13683
--- /dev/null
+++ b/deps/v8/build/fuchsia/mac.sdk.sha1
@@ -0,0 +1 @@
+8916000087704284384 \ No newline at end of file
diff --git a/deps/v8/build/fuchsia/net_test_server.py b/deps/v8/build/fuchsia/net_test_server.py
new file mode 100644
index 0000000000..5b7023c82d
--- /dev/null
+++ b/deps/v8/build/fuchsia/net_test_server.py
@@ -0,0 +1,89 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import common
+import json
+import logging
+import os
+import re
+import socket
+import sys
+import subprocess
+import tempfile
+
+DIR_SOURCE_ROOT = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+sys.path.append(os.path.join(DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common'))
+import chrome_test_server_spawner
+
+
+# Implementation of chrome_test_server_spawner.PortForwarder that uses SSH's
+# remote port forwarding feature to forward ports.
+class SSHPortForwarder(chrome_test_server_spawner.PortForwarder):
+ def __init__(self, target):
+ self._target = target
+
+ # Maps the host (server) port to the device port number.
+ self._port_mapping = {}
+
+ def Map(self, port_pairs):
+ for p in port_pairs:
+ _, host_port = p
+ self._port_mapping[host_port] = \
+ common.ConnectPortForwardingTask(self._target, host_port)
+
+ def GetDevicePortForHostPort(self, host_port):
+ return self._port_mapping[host_port]
+
+ def Unmap(self, device_port):
+ for host_port, entry in self._port_mapping.iteritems():
+ if entry == device_port:
+ forwarding_args = [
+ '-NT', '-O', 'cancel', '-R', '0:localhost:%d' % host_port]
+ task = self._target.RunCommandPiped([],
+ ssh_args=forwarding_args,
+ stderr=subprocess.PIPE)
+ task.wait()
+ if task.returncode != 0:
+ raise Exception(
+ 'Error %d when unmapping port %d' % (task.returncode,
+ device_port))
+ del self._port_mapping[host_port]
+ return
+
+ raise Exception('Unmap called for unknown port: %d' % device_port)
+
+
+def SetupTestServer(target, test_concurrency):
+ """Provisions a forwarding test server and configures |target| to use it.
+
+ Returns a Popen object for the test server process."""
+
+ logging.debug('Starting test server.')
+ # The TestLauncher can launch more jobs than the limit specified with
+ # --test-launcher-jobs so the max number of spawned test servers is set to
+ # twice that limit here. See https://crbug.com/913156#c19.
+ spawning_server = chrome_test_server_spawner.SpawningServer(
+ 0, SSHPortForwarder(target), test_concurrency * 2)
+ forwarded_port = common.ConnectPortForwardingTask(
+ target, spawning_server.server_port)
+ spawning_server.Start()
+
+ logging.debug('Test server listening for connections (port=%d)' %
+ spawning_server.server_port)
+ logging.debug('Forwarded port is %d' % forwarded_port)
+
+ config_file = tempfile.NamedTemporaryFile(delete=True)
+
+ # Clean up the config JSON to only pass ports. See https://crbug.com/810209 .
+ config_file.write(json.dumps({
+ 'name': 'testserver',
+ 'address': '127.0.0.1',
+ 'spawner_url_base': 'http://localhost:%d' % forwarded_port
+ }))
+
+ config_file.flush()
+ target.PutFile(config_file.name, '/tmp/net-test-server-config')
+
+ return spawning_server
diff --git a/deps/v8/build/fuchsia/qemu_target.py b/deps/v8/build/fuchsia/qemu_target.py
new file mode 100644
index 0000000000..168364acfb
--- /dev/null
+++ b/deps/v8/build/fuchsia/qemu_target.py
@@ -0,0 +1,178 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Implements commands for running and interacting with Fuchsia on QEMU."""
+
+import boot_data
+import common
+import logging
+import target
+import os
+import platform
+import socket
+import subprocess
+import sys
+import tempfile
+import time
+
+from common import GetQemuRootForPlatform, EnsurePathExists
+
+
+# Virtual networking configuration data for QEMU.
+GUEST_NET = '192.168.3.0/24'
+GUEST_IP_ADDRESS = '192.168.3.9'
+HOST_IP_ADDRESS = '192.168.3.2'
+GUEST_MAC_ADDRESS = '52:54:00:63:5e:7b'
+
+
+class QemuTarget(target.Target):
+ def __init__(self, output_dir, target_cpu, cpu_cores, system_log_file,
+ require_kvm, ram_size_mb=2048):
+ """output_dir: The directory which will contain the files that are
+ generated to support the QEMU deployment.
+ target_cpu: The emulated target CPU architecture.
+ Can be 'x64' or 'arm64'."""
+ super(QemuTarget, self).__init__(output_dir, target_cpu)
+ self._qemu_process = None
+ self._ram_size_mb = ram_size_mb
+ self._system_log_file = system_log_file
+ self._cpu_cores = cpu_cores
+ self._require_kvm = require_kvm
+
+ def __enter__(self):
+ return self
+
+ # Used by the context manager to ensure that QEMU is killed when the Python
+ # process exits.
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.Shutdown();
+
+ def Start(self):
+ qemu_path = os.path.join(GetQemuRootForPlatform(), 'bin',
+ 'qemu-system-' + self._GetTargetSdkLegacyArch())
+ kernel_args = boot_data.GetKernelArgs(self._output_dir)
+
+ # TERM=dumb tells the guest OS to not emit ANSI commands that trigger
+ # noisy ANSI spew from the user's terminal emulator.
+ kernel_args.append('TERM=dumb')
+
+ # Enable logging to the serial port. This is a temporary fix to investigate
+ # the root cause for https://crbug.com/869753 .
+ kernel_args.append('kernel.serial=legacy')
+
+ qemu_command = [qemu_path,
+ '-m', str(self._ram_size_mb),
+ '-nographic',
+ '-kernel', EnsurePathExists(
+ boot_data.GetTargetFile(self._GetTargetSdkArch(),
+ 'qemu-kernel.bin')),
+ '-initrd', EnsurePathExists(
+ boot_data.GetBootImage(self._output_dir, self._GetTargetSdkArch())),
+ '-smp', str(self._cpu_cores),
+
+ # Attach the blobstore and data volumes. Use snapshot mode to discard
+ # any changes.
+ '-snapshot',
+ '-drive', 'file=%s,format=qcow2,if=none,id=blobstore,snapshot=on' %
+ EnsurePathExists(
+ os.path.join(self._output_dir, 'fvm.blk.qcow2')),
+ '-device', 'virtio-blk-pci,drive=blobstore',
+
+ # Use stdio for the guest OS only; don't attach the QEMU interactive
+ # monitor.
+ '-serial', 'stdio',
+ '-monitor', 'none',
+
+ '-append', ' '.join(kernel_args)
+ ]
+
+ # Configure the machine to emulate, based on the target architecture.
+ if self._target_cpu == 'arm64':
+ qemu_command.extend([
+ '-machine','virt',
+ ])
+ netdev_type = 'virtio-net-pci'
+ else:
+ qemu_command.extend([
+ '-machine', 'q35',
+ ])
+ netdev_type = 'e1000'
+
+ # Configure the CPU to emulate.
+ # On Linux, we can enable lightweight virtualization (KVM) if the host and
+ # guest architectures are the same.
+ enable_kvm = self._require_kvm or (sys.platform.startswith('linux') and (
+ (self._target_cpu == 'arm64' and platform.machine() == 'aarch64') or
+ (self._target_cpu == 'x64' and platform.machine() == 'x86_64')) and
+ os.access('/dev/kvm', os.R_OK | os.W_OK))
+ if enable_kvm:
+ qemu_command.extend(['-enable-kvm', '-cpu', 'host,migratable=no'])
+ else:
+ logging.warning('Unable to launch QEMU with KVM acceleration.')
+ if self._target_cpu == 'arm64':
+ qemu_command.extend(['-cpu', 'cortex-a53'])
+ else:
+ qemu_command.extend(['-cpu', 'Haswell,+smap,-check,-fsgsbase'])
+
+ # Configure virtual network. It is used in the tests to connect to
+ # testserver running on the host.
+ netdev_config = 'user,id=net0,net=%s,dhcpstart=%s,host=%s' % \
+ (GUEST_NET, GUEST_IP_ADDRESS, HOST_IP_ADDRESS)
+
+ self._host_ssh_port = common.GetAvailableTcpPort()
+ netdev_config += ",hostfwd=tcp::%s-:22" % self._host_ssh_port
+ qemu_command.extend([
+ '-netdev', netdev_config,
+ '-device', '%s,netdev=net0,mac=%s' % (netdev_type, GUEST_MAC_ADDRESS),
+ ])
+
+ # We pass a separate stdin stream to qemu. Sharing stdin across processes
+ # leads to flakiness due to the OS prematurely killing the stream and the
+ # Python script panicking and aborting.
+ # The precise root cause is still nebulous, but this fix works.
+ # See crbug.com/741194.
+ logging.debug('Launching QEMU.')
+ logging.debug(' '.join(qemu_command))
+
+ # Zircon sends debug logs to serial port (see kernel.serial=legacy flag
+ # above). Serial port is redirected to a file through QEMU stdout.
+ # Unless a |_system_log_file| is explicitly set, we output the kernel serial
+ # log to a temporary file, and print that out if we are unable to connect to
+ # the QEMU guest, to make it easier to diagnose connectivity issues.
+ temporary_system_log_file = None
+ if self._system_log_file:
+ stdout = self._system_log_file
+ stderr = subprocess.STDOUT
+ else:
+ temporary_system_log_file = tempfile.NamedTemporaryFile('w')
+ stdout = temporary_system_log_file
+ stderr = sys.stderr
+
+ self._qemu_process = subprocess.Popen(qemu_command, stdin=open(os.devnull),
+ stdout=stdout, stderr=stderr)
+ try:
+ self._WaitUntilReady();
+ except target.FuchsiaTargetException:
+ if temporary_system_log_file:
+ logging.info("Kernel logs:\n" +
+ open(temporary_system_log_file.name, 'r').read())
+ raise
+
+ def Shutdown(self):
+ if self._IsQemuStillRunning():
+ logging.info('Shutting down QEMU.')
+ self._qemu_process.kill()
+
+ def _IsQemuStillRunning(self):
+ if not self._qemu_process:
+ return False
+ return os.waitpid(self._qemu_process.pid, os.WNOHANG)[0] == 0
+
+ def _GetEndpoint(self):
+ if not self._IsQemuStillRunning():
+ raise Exception('QEMU quit unexpectedly.')
+ return ('localhost', self._host_ssh_port)
+
+ def _GetSshConfigPath(self):
+ return boot_data.GetSSHConfigPath(self._output_dir)
diff --git a/deps/v8/build/fuchsia/qemu_target_test.py b/deps/v8/build/fuchsia/qemu_target_test.py
new file mode 100755
index 0000000000..da596ee5b0
--- /dev/null
+++ b/deps/v8/build/fuchsia/qemu_target_test.py
@@ -0,0 +1,58 @@
+#!/usr/bin/python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import qemu_target
+import shutil
+import subprocess
+import tempfile
+import time
+import unittest
+
+TEST_PAYLOAD = "Let's get this payload across the finish line!"
+
+tmpdir = tempfile.mkdtemp()
+
+# Register the target with the context manager so that it always gets
+# torn down on process exit. Otherwise there might be lingering QEMU instances
+# if Python crashes or is interrupted.
+with qemu_target.QemuTarget(tmpdir, 'x64') as target:
+ class TestQemuTarget(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ target.Start()
+
+ @classmethod
+ def tearDownClass(cls):
+ target.Shutdown()
+ shutil.rmtree(tmpdir)
+
+ def testCopyBidirectional(self):
+ tmp_path = tmpdir + "/payload"
+ with open(tmp_path, "w") as tmpfile:
+ tmpfile.write(TEST_PAYLOAD)
+ target.PutFile(tmp_path, '/tmp/payload')
+
+ tmp_path_roundtrip = tmp_path + ".roundtrip"
+ target.GetFile('/tmp/payload', tmp_path_roundtrip)
+ with open(tmp_path_roundtrip) as roundtrip:
+ self.assertEqual(TEST_PAYLOAD, roundtrip.read())
+
+ def testRunCommand(self):
+ self.assertEqual(0, target.RunCommand(['true']))
+ self.assertEqual(1, target.RunCommand(['false']))
+
+ def testRunCommandPiped(self):
+ proc = target.RunCommandPiped(['cat'],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE)
+ proc.stdin.write(TEST_PAYLOAD)
+ proc.stdin.flush()
+ proc.stdin.close()
+ self.assertEqual(TEST_PAYLOAD, proc.stdout.readline())
+ proc.kill()
+
+
+ if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/fuchsia/remote_cmd.py b/deps/v8/build/fuchsia/remote_cmd.py
new file mode 100644
index 0000000000..cabdf1631d
--- /dev/null
+++ b/deps/v8/build/fuchsia/remote_cmd.py
@@ -0,0 +1,134 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import subprocess
+import sys
+import threading
+
+_SSH = ['ssh']
+_SCP = ['scp', '-C'] # Use gzip compression.
+_SSH_LOGGER = logging.getLogger('ssh')
+
+COPY_TO_TARGET = 0
+COPY_FROM_TARGET = 1
+
+
+def _IsLinkLocalIPv6(hostname):
+ return hostname.startswith('fe80::')
+
+# Adds ""
+def _EscapeIfIPv6Address(address):
+ if ':' in address:
+ return '[' + address + ']'
+ else:
+ return address
+
+class CommandRunner(object):
+ """Helper class used to execute commands on a remote host over SSH."""
+
+ def __init__(self, config_path, host, port):
+ """Creates a CommandRunner that connects to the specified |host| and |port|
+ using the ssh config at the specified |config_path|.
+
+ config_path: Full path to SSH configuration.
+ host: The hostname or IP address of the remote host.
+ port: The port to connect to."""
+
+ self._config_path = config_path
+ self._host = host
+ self._port = port
+
+ def _GetSshCommandLinePrefix(self):
+ return _SSH + ['-F', self._config_path, self._host, '-p', str(self._port)]
+
+ def RunCommand(self, command, silent, timeout_secs=None):
+ """Executes an SSH command on the remote host and blocks until completion.
+
+ command: A list of strings containing the command and its arguments.
+ silent: If true, suppresses all output from 'ssh'.
+ timeout_secs: If set, limits the amount of time that |command| may run.
+ Commands which exceed the timeout are killed.
+
+ Returns the exit code from the remote command."""
+
+ ssh_command = self._GetSshCommandLinePrefix() + command
+ _SSH_LOGGER.debug('ssh exec: ' + ' '.join(ssh_command))
+ if silent:
+ devnull = open(os.devnull, 'w')
+ process = subprocess.Popen(ssh_command, stderr=devnull, stdout=devnull)
+ else:
+ process = subprocess.Popen(ssh_command)
+
+ timeout_timer = None
+ if timeout_secs:
+ timeout_timer = threading.Timer(timeout_secs, process.kill)
+ timeout_timer.start()
+
+ process.wait()
+
+ if timeout_timer:
+ timeout_timer.cancel()
+
+ if process.returncode == -9:
+ raise Exception('Timeout when executing \"%s\".' % ' '.join(command))
+
+ return process.returncode
+
+
+ def RunCommandPiped(self, command = None, ssh_args = None, **kwargs):
+ """Executes an SSH command on the remote host and returns a process object
+ with access to the command's stdio streams. Does not block.
+
+ command: A list of strings containing the command and its arguments.
+ ssh_args: Arguments that will be passed to SSH.
+ kwargs: A dictionary of parameters to be passed to subprocess.Popen().
+ The parameters can be used to override stdin and stdout, for
+ example.
+
+ Returns a Popen object for the command."""
+
+ if not command:
+ command = []
+ if not ssh_args:
+ ssh_args = []
+
+ ssh_command = self._GetSshCommandLinePrefix() + ssh_args + ['--'] + command
+ _SSH_LOGGER.debug(' '.join(ssh_command))
+ return subprocess.Popen(ssh_command, **kwargs)
+
+
+ def RunScp(self, sources, dest, direction, recursive=False):
+ """Copies a file to or from a remote host using SCP and blocks until
+ completion.
+
+ sources: Paths of the files to be copied.
+ dest: The path that |source| will be copied to.
+ direction: Indicates whether the file should be copied to
+ or from the remote side.
+ Valid values are COPY_TO_TARGET or COPY_FROM_TARGET.
+ recursive: If true, performs a recursive copy.
+
+ Function will raise an assertion if a failure occurred."""
+
+ scp_command = _SCP[:]
+ if _SSH_LOGGER.getEffectiveLevel() == logging.DEBUG:
+ scp_command.append('-v')
+ if recursive:
+ scp_command.append('-r')
+
+ host = _EscapeIfIPv6Address(self._host)
+
+ if direction == COPY_TO_TARGET:
+ dest = "%s:%s" % (host, dest)
+ else:
+ sources = ["%s:%s" % (host, source) for source in sources]
+
+ scp_command += ['-F', self._config_path, '-P', str(self._port)]
+ scp_command += sources
+ scp_command += [dest]
+
+ _SSH_LOGGER.debug(' '.join(scp_command))
+ subprocess.check_call(scp_command, stdout=open(os.devnull, 'w'))
diff --git a/deps/v8/build/fuchsia/run_package.py b/deps/v8/build/fuchsia/run_package.py
new file mode 100644
index 0000000000..e8ea07d2f7
--- /dev/null
+++ b/deps/v8/build/fuchsia/run_package.py
@@ -0,0 +1,224 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Contains a helper function for deploying and executing a packaged
+executable on a Target."""
+
+import common
+import hashlib
+import logging
+import multiprocessing
+import os
+import re
+import select
+import subprocess
+import sys
+import time
+import threading
+import uuid
+
+from symbolizer import SymbolizerFilter
+
+FAR = os.path.join(common.SDK_ROOT, 'tools', 'far')
+
+# Amount of time to wait for the termination of the system log output thread.
+_JOIN_TIMEOUT_SECS = 5
+
+
+def _AttachKernelLogReader(target):
+ """Attaches a kernel log reader as a long-running SSH task."""
+
+ logging.info('Attaching kernel logger.')
+ return target.RunCommandPiped(['dlog', '-f'], stdin=open(os.devnull, 'r'),
+ stdout=subprocess.PIPE)
+
+
+class MergedInputStream(object):
+ """Merges a number of input streams into a UNIX pipe on a dedicated thread.
+ Terminates when the file descriptor of the primary stream (the first in
+ the sequence) is closed."""
+
+ def __init__(self, streams):
+ assert len(streams) > 0
+ self._streams = streams
+ self._read_pipe, write_pipe = os.pipe()
+ # Disable buffering for the stream to make sure there is no delay in logs.
+ self._output_stream = os.fdopen(write_pipe, 'w', 0)
+ self._thread = threading.Thread(target=self._Run)
+
+ def Start(self):
+ """Returns a file descriptor to the merged output stream."""
+
+ self._thread.start();
+ return self._read_pipe
+
+ def _Run(self):
+ streams_by_fd = {}
+ primary_fd = self._streams[0].fileno()
+ for s in self._streams:
+ streams_by_fd[s.fileno()] = s
+
+ # Set when the primary FD is closed. Input from other FDs will continue to
+ # be processed until select() runs dry.
+ flush = False
+
+ # The lifetime of the MergedInputStream is bound to the lifetime of
+ # |primary_fd|.
+ while primary_fd:
+ # When not flushing: block until data is read or an exception occurs.
+ rlist, _, xlist = select.select(streams_by_fd, [], streams_by_fd)
+
+ if len(rlist) == 0 and flush:
+ break
+
+ for fileno in xlist:
+ del streams_by_fd[fileno]
+ if fileno == primary_fd:
+ primary_fd = None
+
+ for fileno in rlist:
+ line = streams_by_fd[fileno].readline()
+ if line:
+ self._output_stream.write(line + '\n')
+ else:
+ del streams_by_fd[fileno]
+ if fileno == primary_fd:
+ primary_fd = None
+
+ # Flush the streams by executing nonblocking reads from the input file
+ # descriptors until no more data is available, or all the streams are
+ # closed.
+ while streams_by_fd:
+ rlist, _, _ = select.select(streams_by_fd, [], [], 0)
+
+ if not rlist:
+ break
+
+ for fileno in rlist:
+ line = streams_by_fd[fileno].readline()
+ if line:
+ self._output_stream.write(line + '\n')
+ else:
+ del streams_by_fd[fileno]
+
+
+def _GetComponentUri(package_name):
+ return 'fuchsia-pkg://fuchsia.com/%s#meta/%s.cmx' % (package_name,
+ package_name)
+
+
+class RunPackageArgs:
+ """RunPackage() configuration arguments structure.
+
+ install_only: If set, skips the package execution step.
+ symbolizer_config: A newline delimited list of source files contained
+ in the package. Omitting this parameter will disable symbolization.
+ system_logging: If set, connects a system log reader to the target.
+ target_staging_path: Path to which package FARs will be staged, during
+ installation. Defaults to staging into '/data'.
+ """
+ def __init__(self):
+ self.install_only = False
+ self.symbolizer_config = None
+ self.system_logging = False
+ self.target_staging_path = '/data'
+
+ @staticmethod
+ def FromCommonArgs(args):
+ run_package_args = RunPackageArgs()
+ run_package_args.install_only = args.install_only
+ run_package_args.system_logging = args.include_system_logs
+ run_package_args.target_staging_path = args.target_staging_path
+ return run_package_args
+
+
+def _DrainStreamToStdout(stream, quit_event):
+ """Outputs the contents of |stream| until |quit_event| is set."""
+
+ while not quit_event.is_set():
+ rlist, _, _ = select.select([ stream ], [], [], 0.1)
+ if rlist:
+ line = rlist[0].readline()
+ if not line:
+ return
+ print line.rstrip()
+
+
+def RunPackage(output_dir, target, package_path, package_name,
+ package_deps, package_args, args):
+ """Installs the Fuchsia package at |package_path| on the target,
+ executes it with |package_args|, and symbolizes its output.
+
+ output_dir: The path containing the build output files.
+ target: The deployment Target object that will run the package.
+ package_path: The path to the .far package file.
+ package_name: The name of app specified by package metadata.
+ package_args: The arguments which will be passed to the Fuchsia process.
+ args: Structure of arguments to configure how the package will be run.
+
+ Returns the exit code of the remote package process."""
+
+ system_logger = (
+ _AttachKernelLogReader(target) if args.system_logging else None)
+ try:
+ if system_logger:
+ # Spin up a thread to asynchronously dump the system log to stdout
+ # for easier diagnoses of early, pre-execution failures.
+ log_output_quit_event = multiprocessing.Event()
+ log_output_thread = threading.Thread(
+ target=lambda: _DrainStreamToStdout(system_logger.stdout,
+ log_output_quit_event))
+ log_output_thread.daemon = True
+ log_output_thread.start()
+
+ target.InstallPackage(package_path, package_name, package_deps)
+
+ if system_logger:
+ log_output_quit_event.set()
+ log_output_thread.join(timeout=_JOIN_TIMEOUT_SECS)
+
+ if args.install_only:
+ logging.info('Installation complete.')
+ return
+
+ logging.info('Running application.')
+ command = ['run', _GetComponentUri(package_name)] + package_args
+ process = target.RunCommandPiped(command,
+ stdin=open(os.devnull, 'r'),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+
+ if system_logger:
+ output_fd = MergedInputStream([process.stdout,
+ system_logger.stdout]).Start()
+ else:
+ output_fd = process.stdout.fileno()
+
+ # Run the log data through the symbolizer process.
+ build_ids_paths = map(
+ lambda package_path: os.path.join(
+ os.path.dirname(package_path), 'ids.txt'),
+ [package_path] + package_deps)
+ output_stream = SymbolizerFilter(output_fd, build_ids_paths)
+
+ for next_line in output_stream:
+ print next_line.rstrip()
+
+ process.wait()
+ if process.returncode == 0:
+ logging.info('Process exited normally with status code 0.')
+ else:
+ # The test runner returns an error status code if *any* tests fail,
+ # so we should proceed anyway.
+ logging.warning('Process exited with status code %d.' %
+ process.returncode)
+
+ finally:
+ if system_logger:
+ logging.info('Terminating kernel log reader.')
+ log_output_quit_event.set()
+ log_output_thread.join()
+ system_logger.kill()
+
+ return process.returncode
diff --git a/deps/v8/build/fuchsia/symbolizer.py b/deps/v8/build/fuchsia/symbolizer.py
new file mode 100644
index 0000000000..0b7c39e918
--- /dev/null
+++ b/deps/v8/build/fuchsia/symbolizer.py
@@ -0,0 +1,43 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import subprocess
+
+from common import SDK_ROOT
+
+
+def SymbolizerFilter(input_fd, build_ids_files):
+ """Symbolizes an output stream from a process.
+
+ input_fd: A file descriptor of the stream to be symbolized.
+ build_ids_file: Path to the ids.txt file which maps build IDs to
+ unstripped binaries on the filesystem.
+ Returns a generator that yields symbolized process output."""
+
+ llvm_symbolizer_path = os.path.join(SDK_ROOT, os.pardir, os.pardir,
+ 'llvm-build', 'Release+Asserts', 'bin',
+ 'llvm-symbolizer')
+ symbolizer = os.path.join(SDK_ROOT, 'tools', 'symbolize')
+ symbolizer_cmd = [symbolizer,
+ '-ids-rel', '-llvm-symbolizer', llvm_symbolizer_path,
+ '-build-id-dir', os.path.join(SDK_ROOT, '.build-id')]
+ for build_ids_file in build_ids_files:
+ symbolizer_cmd.extend(['-ids', build_ids_file])
+
+ logging.info('Running "%s".' % ' '.join(symbolizer_cmd))
+ symbolizer_proc = subprocess.Popen(
+ symbolizer_cmd,
+ stdout=subprocess.PIPE,
+ stdin=input_fd,
+ close_fds=True)
+
+ while True:
+ line = symbolizer_proc.stdout.readline()
+ if not line:
+ break
+ yield line
+
+ symbolizer_proc.wait()
diff --git a/deps/v8/build/fuchsia/target.py b/deps/v8/build/fuchsia/target.py
new file mode 100644
index 0000000000..a5a5d11c4b
--- /dev/null
+++ b/deps/v8/build/fuchsia/target.py
@@ -0,0 +1,346 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import boot_data
+import common
+import json
+import logging
+import os
+import remote_cmd
+import shutil
+import subprocess
+import sys
+import tempfile
+import time
+
+
+_SHUTDOWN_CMD = ['dm', 'poweroff']
+_ATTACH_MAX_RETRIES = 10
+_ATTACH_RETRY_INTERVAL = 1
+
+_PM = os.path.join(common.SDK_ROOT, 'tools', 'pm')
+_REPO_NAME = 'chrome_runner'
+
+# Amount of time to wait for Amber to complete package installation, as a
+# mitigation against hangs due to amber/network-related failures.
+_INSTALL_TIMEOUT_SECS = 5 * 60
+
+
+def _GetPackageInfo(package_path):
+ """Returns a tuple with the name and version of a package."""
+
+ # Query the metadata file which resides next to the package file.
+ package_info = json.load(
+ open(os.path.join(os.path.dirname(package_path), 'package')))
+ return (package_info['name'], package_info['version'])
+
+
+def _PublishPackage(tuf_root, package_path):
+ """Publishes a combined FAR package to a TUF repository root."""
+
+ subprocess.check_call(
+ [_PM, 'publish', '-a', '-f', package_path, '-r', tuf_root, '-vt', '-v'],
+ stderr=subprocess.STDOUT)
+
+
+class _MapRemoteDataPathForPackage:
+ """Callable object which remaps /data paths to their package-specific
+ locations."""
+
+ def __init__(self, package_name, package_version):
+ self.data_path = '/data/r/sys/fuchsia.com:{0}:{1}#meta:{0}.cmx'.format(
+ package_name, package_version)
+
+ def __call__(self, path):
+ if path[:5] == '/data':
+ return self.data_path + path[5:]
+ return path
+
+
+class FuchsiaTargetException(Exception):
+ def __init__(self, message):
+ super(FuchsiaTargetException, self).__init__(message)
+
+
+class Target(object):
+ """Base class representing a Fuchsia deployment target."""
+
+ def __init__(self, output_dir, target_cpu):
+ self._output_dir = output_dir
+ self._started = False
+ self._dry_run = False
+ self._target_cpu = target_cpu
+ self._command_runner = None
+
+ # Functions used by the Python context manager for teardown.
+ def __enter__(self):
+ return self
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ return self
+
+ def Start(self):
+ """Handles the instantiation and connection process for the Fuchsia
+ target instance."""
+
+ pass
+
+ def IsStarted(self):
+ """Returns True if the Fuchsia target instance is ready to accept
+ commands."""
+
+ return self._started
+
+ def IsNewInstance(self):
+ """Returns True if the connected target instance is newly provisioned."""
+
+ return True
+
+ def GetCommandRunner(self):
+ """Returns CommandRunner that can be used to execute commands on the
+ target. Most clients should prefer RunCommandPiped() and RunCommand()."""
+
+ self._AssertIsStarted()
+
+ if self._command_runner == None:
+ host, port = self._GetEndpoint()
+ self._command_runner = \
+ remote_cmd.CommandRunner(self._GetSshConfigPath(), host, port)
+
+ return self._command_runner
+
+ def RunCommandPiped(self, command, **kwargs):
+ """Starts a remote command and immediately returns a Popen object for the
+ command. The caller may interact with the streams, inspect the status code,
+ wait on command termination, etc.
+
+ command: A list of strings representing the command and arguments.
+ kwargs: A dictionary of parameters to be passed to subprocess.Popen().
+ The parameters can be used to override stdin and stdout, for
+ example.
+
+ Returns: a Popen object.
+
+ Note: method does not block."""
+
+ logging.debug('running (non-blocking) \'%s\'.' % ' '.join(command))
+ return self.GetCommandRunner().RunCommandPiped(command, **kwargs)
+
+ def RunCommand(self, command, silent=False, timeout_secs=None):
+ """Executes a remote command and waits for it to finish executing.
+
+ Returns the exit code of the command."""
+
+ logging.debug('running \'%s\'.' % ' '.join(command))
+ return self.GetCommandRunner().RunCommand(command, silent,
+ timeout_secs=timeout_secs)
+
+ def EnsurePackageDataDirectoryExists(self, package_name):
+ """Ensures that the specified package's isolated /data directory exists."""
+ return self.RunCommand(
+ ['mkdir','-p',_MapRemoteDataPathForPackage(package_name, 0)('/data')])
+
+ def PutFile(self, source, dest, recursive=False, for_package=None):
+ """Copies a file from the local filesystem to the target filesystem.
+
+ source: The path of the file being copied.
+ dest: The path on the remote filesystem which will be copied to.
+ recursive: If true, performs a recursive copy.
+ for_package: If specified, /data in the |dest| is mapped to the package's
+ isolated /data location.
+ """
+
+ assert type(source) is str
+ self.PutFiles([source], dest, recursive, for_package)
+
+ def PutFiles(self, sources, dest, recursive=False, for_package=None):
+ """Copies files from the local filesystem to the target filesystem.
+
+ sources: List of local file paths to copy from, or a single path.
+ dest: The path on the remote filesystem which will be copied to.
+ recursive: If true, performs a recursive copy.
+ for_package: If specified, /data in the |dest| is mapped to the package's
+ isolated /data location.
+ """
+
+ assert type(sources) is tuple or type(sources) is list
+ if for_package:
+ self.EnsurePackageDataDirectoryExists(for_package)
+ dest = _MapRemoteDataPathForPackage(for_package, 0)(dest)
+ logging.debug('copy local:%s => remote:%s' % (sources, dest))
+ self.GetCommandRunner().RunScp(sources, dest, remote_cmd.COPY_TO_TARGET,
+ recursive)
+
+ def GetFile(self, source, dest, for_package=None):
+ """Copies a file from the target filesystem to the local filesystem.
+
+ source: The path of the file being copied.
+ dest: The path on the local filesystem which will be copied to.
+ for_package: If specified, /data in paths in |sources| is mapped to the
+ package's isolated /data location.
+ """
+ assert type(source) is str
+ self.GetFiles([source], dest, for_package)
+
+ def GetFiles(self, sources, dest, for_package=None):
+ """Copies files from the target filesystem to the local filesystem.
+
+ sources: List of remote file paths to copy.
+ dest: The path on the local filesystem which will be copied to.
+ for_package: If specified, /data in paths in |sources| is mapped to the
+ package's isolated /data location.
+ """
+ assert type(sources) is tuple or type(sources) is list
+ self._AssertIsStarted()
+ if for_package:
+ sources = map(_MapRemoteDataPathForPackage(for_package, 0), sources)
+ logging.debug('copy remote:%s => local:%s' % (sources, dest))
+ return self.GetCommandRunner().RunScp(sources, dest,
+ remote_cmd.COPY_FROM_TARGET)
+
+ def _GetEndpoint(self):
+ """Returns a (host, port) tuple for the SSH connection to the target."""
+ raise NotImplementedError
+
+ def _GetTargetSdkArch(self):
+ """Returns the Fuchsia SDK architecture name for the target CPU."""
+ if self._target_cpu == 'arm64' or self._target_cpu == 'x64':
+ return self._target_cpu
+ raise FuchsiaTargetException('Unknown target_cpu:' + self._target_cpu)
+
+ def _AssertIsStarted(self):
+ assert self.IsStarted()
+
+ def _WaitUntilReady(self, retries=_ATTACH_MAX_RETRIES):
+ logging.info('Connecting to Fuchsia using SSH.')
+
+ for retry in xrange(retries + 1):
+ host, port = self._GetEndpoint()
+ runner = remote_cmd.CommandRunner(self._GetSshConfigPath(), host, port)
+ if runner.RunCommand(['true'], True) == 0:
+ logging.info('Connected!')
+ self._started = True
+ return True
+ time.sleep(_ATTACH_RETRY_INTERVAL)
+
+ logging.error('Timeout limit reached.')
+
+ raise FuchsiaTargetException('Couldn\'t connect using SSH.')
+
+ def _GetSshConfigPath(self, path):
+ raise NotImplementedError
+
+ # TODO: remove this once all instances of architecture names have been
+ # converted to the new naming pattern.
+ def _GetTargetSdkLegacyArch(self):
+ """Returns the Fuchsia SDK architecture name for the target CPU."""
+ if self._target_cpu == 'arm64':
+ return 'aarch64'
+ elif self._target_cpu == 'x64':
+ return 'x86_64'
+ raise Exception('Unknown target_cpu %s:' % self._target_cpu)
+
+
+ def InstallPackage(self, package_path, package_name, package_deps):
+ """Installs a package and it's dependencies on the device. If the package is
+ already installed then it will be updated to the new version.
+
+ package_path: Path to the .far file to be installed.
+ package_name: Package name.
+ package_deps: List of .far files with the packages that the main package
+ depends on. These packages are installed or updated as well.
+ """
+ try:
+ tuf_root = tempfile.mkdtemp()
+ pm_serve_task = None
+
+ # Publish all packages to the serving TUF repository under |tuf_root|.
+ subprocess.check_call([_PM, 'newrepo', '-repo', tuf_root])
+ all_packages = [package_path] + package_deps
+ for next_package_path in all_packages:
+ _PublishPackage(tuf_root, next_package_path)
+
+ # Serve the |tuf_root| using 'pm serve' and configure the target to pull
+ # from it.
+ serve_port = common.GetAvailableTcpPort()
+ pm_serve_task = subprocess.Popen(
+ [_PM, 'serve', '-d', os.path.join(tuf_root, 'repository'), '-l',
+ ':%d' % serve_port, '-q'])
+ remote_port = common.ConnectPortForwardingTask(self, serve_port, 0)
+ self._RegisterAmberRepository(tuf_root, remote_port)
+
+ # Install all packages.
+ for next_package_path in all_packages:
+ install_package_name, package_version = \
+ _GetPackageInfo(next_package_path)
+ logging.info('Installing %s version %s.' %
+ (install_package_name, package_version))
+ return_code = self.RunCommand(['amberctl', 'get_up', '-n',
+ install_package_name, '-v',
+ package_version],
+ timeout_secs=_INSTALL_TIMEOUT_SECS)
+ if return_code != 0:
+ raise Exception('Error while installing %s.' % install_package_name)
+
+ finally:
+ self._UnregisterAmberRepository()
+ if pm_serve_task:
+ pm_serve_task.kill()
+ shutil.rmtree(tuf_root)
+
+
+ def _RegisterAmberRepository(self, tuf_repo, remote_port):
+ """Configures a device to use a local TUF repository as an installation
+ source for packages.
+ |tuf_repo|: The host filesystem path to the TUF repository.
+ |remote_port|: The reverse-forwarded port used to connect to instance of
+ `pm serve` that is serving the contents of |tuf_repo|."""
+
+ # Extract the public signing key for inclusion in the config file.
+ root_keys = []
+ root_json_path = os.path.join(tuf_repo, 'repository', 'root.json')
+ root_json = json.load(open(root_json_path, 'r'))
+ for root_key_id in root_json['signed']['roles']['root']['keyids']:
+ root_keys.append({
+ 'Type': root_json['signed']['keys'][root_key_id]['keytype'],
+ 'Value': root_json['signed']['keys'][root_key_id]['keyval']['public']
+ })
+
+ # "pm serve" can automatically generate a "config.json" file at query time,
+ # but the file is unusable because it specifies URLs with port
+ # numbers that are unreachable from across the port forwarding boundary.
+ # So instead, we generate our own config file with the forwarded port
+ # numbers instead.
+ config_file = open(os.path.join(tuf_repo, 'repository', 'repo_config.json'),
+ 'w')
+ json.dump({
+ 'ID': _REPO_NAME,
+ 'RepoURL': "http://127.0.0.1:%d" % remote_port,
+ 'BlobRepoURL': "http://127.0.0.1:%d/blobs" % remote_port,
+ 'RatePeriod': 10,
+ 'RootKeys': root_keys,
+ 'StatusConfig': {
+ 'Enabled': True
+ },
+ 'Auto': True
+ }, config_file)
+ config_file.close()
+
+ # Register the repo.
+ return_code = self.RunCommand(
+ [('amberctl rm_src -n %s; ' +
+ 'amberctl add_src -f http://127.0.0.1:%d/repo_config.json')
+ % (_REPO_NAME, remote_port)])
+ if return_code != 0:
+ raise Exception('Error code %d when running amberctl.' % return_code)
+
+
+ def _UnregisterAmberRepository(self):
+ """Unregisters the Amber repository."""
+
+ logging.debug('Unregistering Amber repository.')
+ self.RunCommand(['amberctl', 'rm_src', '-n', _REPO_NAME])
+
+ # Re-enable 'devhost' repo if it's present. This is useful for devices that
+ # were booted with 'fx serve'.
+ self.RunCommand(['amberctl', 'enable_src', '-n', 'devhost'], silent=True)
diff --git a/deps/v8/build/fuchsia/test_runner.py b/deps/v8/build/fuchsia/test_runner.py
new file mode 100755
index 0000000000..ca0c176341
--- /dev/null
+++ b/deps/v8/build/fuchsia/test_runner.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Deploys and runs a test package on a Fuchsia target."""
+
+import argparse
+import json
+import logging
+import os
+import socket
+import subprocess
+import sys
+import tempfile
+import time
+
+from common_args import AddCommonArgs, ConfigureLogging, GetDeploymentTargetForArgs
+from net_test_server import SetupTestServer
+from run_package import RunPackage, RunPackageArgs
+
+DEFAULT_TEST_CONCURRENCY = 4
+
+TEST_RESULT_PATH = '/data/test_summary.json'
+TEST_FILTER_PATH = '/data/test_filter.txt'
+
+def main():
+ parser = argparse.ArgumentParser()
+ AddCommonArgs(parser)
+ parser.add_argument('--gtest_filter',
+ help='GTest filter to use in place of any default.')
+ parser.add_argument('--gtest_repeat',
+ help='GTest repeat value to use. This also disables the '
+ 'test launcher timeout.')
+ parser.add_argument('--test-launcher-retry-limit',
+ help='Number of times that test suite will retry failing '
+ 'tests. This is multiplicative with --gtest_repeat.')
+ parser.add_argument('--gtest_break_on_failure', action='store_true',
+ default=False,
+ help='Should GTest break on failure; useful with '
+ '--gtest_repeat.')
+ parser.add_argument('--single-process-tests', action='store_true',
+ default=False,
+ help='Runs the tests and the launcher in the same '
+ 'process. Useful for debugging.')
+ parser.add_argument('--test-launcher-batch-limit',
+ type=int,
+ help='Sets the limit of test batch to run in a single '
+ 'process.')
+ # --test-launcher-filter-file is specified relative to --output-directory,
+ # so specifying type=os.path.* will break it.
+ parser.add_argument('--test-launcher-filter-file',
+ default=None,
+ help='Override default filter file passed to target test '
+ 'process. Set an empty path to disable filtering.')
+ parser.add_argument('--test-launcher-jobs',
+ type=int,
+ help='Sets the number of parallel test jobs.')
+ parser.add_argument('--test-launcher-summary-output',
+ help='Where the test launcher will output its json.')
+ parser.add_argument('--enable-test-server', action='store_true',
+ default=False,
+ help='Enable Chrome test server spawner.')
+ parser.add_argument('child_args', nargs='*',
+ help='Arguments for the test process.')
+ parser.add_argument('--test-launcher-bot-mode', action='store_true',
+ default=False,
+ help='Informs the TestLauncher to that it should enable '
+ 'special allowances for running on a test bot.')
+ args = parser.parse_args()
+ ConfigureLogging(args)
+
+ child_args = ['--test-launcher-retry-limit=0']
+ if args.single_process_tests:
+ child_args.append('--single-process-tests')
+ if args.test_launcher_bot_mode:
+ child_args.append('--test-launcher-bot-mode')
+ if args.test_launcher_batch_limit:
+ child_args.append('--test-launcher-batch-limit=%d' %
+ args.test_launcher_batch_limit)
+
+ test_concurrency = args.test_launcher_jobs \
+ if args.test_launcher_jobs else DEFAULT_TEST_CONCURRENCY
+ child_args.append('--test-launcher-jobs=%d' % test_concurrency)
+
+ if args.gtest_filter:
+ child_args.append('--gtest_filter=' + args.gtest_filter)
+ if args.gtest_repeat:
+ child_args.append('--gtest_repeat=' + args.gtest_repeat)
+ child_args.append('--test-launcher-timeout=-1')
+ if args.test_launcher_retry_limit:
+ child_args.append(
+ '--test-launcher-retry-limit=' + args.test_launcher_retry_limit)
+ if args.gtest_break_on_failure:
+ child_args.append('--gtest_break_on_failure')
+ if args.child_args:
+ child_args.extend(args.child_args)
+
+ if args.test_launcher_summary_output:
+ child_args.append('--test-launcher-summary-output=' + TEST_RESULT_PATH)
+
+ with GetDeploymentTargetForArgs(args) as target:
+ target.Start()
+
+ if args.test_launcher_filter_file:
+ target.PutFile(args.test_launcher_filter_file, TEST_FILTER_PATH,
+ for_package=args.package_name)
+ child_args.append('--test-launcher-filter-file=' + TEST_FILTER_PATH)
+
+ test_server = None
+ if args.enable_test_server:
+ test_server = SetupTestServer(target, test_concurrency)
+
+ run_package_args = RunPackageArgs.FromCommonArgs(args)
+ returncode = RunPackage(
+ args.output_directory, target, args.package, args.package_name,
+ args.package_dep, child_args, run_package_args)
+
+ if test_server:
+ test_server.Stop()
+
+ if args.test_launcher_summary_output:
+ target.GetFile(TEST_RESULT_PATH, args.test_launcher_summary_output,
+ for_package=args.package_name)
+
+ return returncode
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/fuchsia/update_sdk.py b/deps/v8/build/fuchsia/update_sdk.py
new file mode 100755
index 0000000000..f7d6115247
--- /dev/null
+++ b/deps/v8/build/fuchsia/update_sdk.py
@@ -0,0 +1,168 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Updates the Fuchsia SDK to the given revision. Should be used in a 'hooks_os'
+entry so that it only runs when .gclient's target_os includes 'fuchsia'."""
+
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+
+from common import GetHostOsFromPlatform, GetHostArchFromPlatform
+
+REPOSITORY_ROOT = os.path.abspath(os.path.join(
+ os.path.dirname(__file__), '..', '..'))
+sys.path.append(os.path.join(REPOSITORY_ROOT, 'build'))
+
+import find_depot_tools
+
+SDK_SUBDIRS = ["arch", "pkg", "qemu", "sysroot", "target",
+ "toolchain_libs", "tools"]
+
+EXTRA_SDK_HASH_PREFIX = ''
+
+def GetSdkGeneration(hash):
+ if not hash:
+ return None
+
+ cmd = [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gsutil.py'), 'ls',
+ '-L', GetBucketForPlatform() + hash]
+ sdk_details = subprocess.check_output(cmd)
+ m = re.search('Generation:\s*(\d*)', sdk_details)
+ if not m:
+ return None
+ return int(m.group(1))
+
+
+def GetSdkHashForPlatform():
+ filename = '{platform}.sdk.sha1'.format(platform = GetHostOsFromPlatform())
+
+ # Get the hash of the SDK in chromium.
+ sdk_hash = None
+ hash_file = os.path.join(os.path.dirname(__file__), filename)
+ with open(hash_file, 'r') as f:
+ sdk_hash = f.read().strip()
+
+ # Get the hash of the SDK with the extra prefix.
+ extra_sdk_hash = None
+ if EXTRA_SDK_HASH_PREFIX:
+ extra_hash_file = os.path.join(os.path.dirname(__file__),
+ EXTRA_SDK_HASH_PREFIX + filename)
+ with open(extra_hash_file, 'r') as f:
+ extra_sdk_hash = f.read().strip()
+
+ # If both files are empty, return an error.
+ if not sdk_hash and not extra_sdk_hash:
+ print >>sys.stderr, 'No SHA1 found in {} or {}'.format(
+ hash_file, extra_hash_file)
+ return 1
+
+ # Return the newer SDK based on the generation number.
+ sdk_generation = GetSdkGeneration(sdk_hash)
+ extra_sdk_generation = GetSdkGeneration(extra_sdk_hash)
+ if extra_sdk_generation > sdk_generation:
+ return extra_sdk_hash
+ return sdk_hash
+
+def GetBucketForPlatform():
+ return 'gs://fuchsia/sdk/core/{platform}-amd64/'.format(
+ platform = GetHostOsFromPlatform())
+
+
+def EnsureDirExists(path):
+ if not os.path.exists(path):
+ print 'Creating directory %s' % path
+ os.makedirs(path)
+
+
+# Removes previous SDK from the specified path if it's detected there.
+def Cleanup(path):
+ hash_file = os.path.join(path, '.hash')
+ if os.path.exists(hash_file):
+ print 'Removing old SDK from %s.' % path
+ for d in SDK_SUBDIRS:
+ to_remove = os.path.join(path, d)
+ if os.path.isdir(to_remove):
+ shutil.rmtree(to_remove)
+ os.remove(hash_file)
+
+
+# Updates the modification timestamps of |path| and its contents to the
+# current time.
+def UpdateTimestampsRecursive(path):
+ for root, dirs, files in os.walk(path):
+ for f in files:
+ os.utime(os.path.join(root, f), None)
+ for d in dirs:
+ os.utime(os.path.join(root, d), None)
+
+
+def main():
+ if len(sys.argv) != 1:
+ print >>sys.stderr, 'usage: %s' % sys.argv[0]
+ return 1
+
+ # Quietly exit if there's no SDK support for this platform.
+ try:
+ GetHostOsFromPlatform()
+ except:
+ return 0
+
+ # Previously SDK was unpacked in //third_party/fuchsia-sdk instead of
+ # //third_party/fuchsia-sdk/sdk . Remove the old files if they are still
+ # there.
+ sdk_root = os.path.join(REPOSITORY_ROOT, 'third_party', 'fuchsia-sdk')
+ Cleanup(sdk_root)
+
+ sdk_hash = GetSdkHashForPlatform()
+ if not sdk_hash:
+ return 1
+
+ output_dir = os.path.join(sdk_root, 'sdk')
+
+ hash_filename = os.path.join(output_dir, '.hash')
+ if os.path.exists(hash_filename):
+ with open(hash_filename, 'r') as f:
+ if f.read().strip() == sdk_hash:
+ # Nothing to do. Generate sdk/BUILD.gn anyways, in case the conversion
+ # script changed.
+ subprocess.check_call([os.path.join(sdk_root, 'gen_build_defs.py')])
+ return 0
+
+ print 'Downloading SDK %s...' % sdk_hash
+
+ if os.path.isdir(output_dir):
+ shutil.rmtree(output_dir)
+
+ fd, tmp = tempfile.mkstemp()
+ os.close(fd)
+
+ try:
+ cmd = [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gsutil.py'),
+ 'cp', GetBucketForPlatform() + sdk_hash, tmp]
+ subprocess.check_call(cmd)
+ with open(tmp, 'rb') as f:
+ EnsureDirExists(output_dir)
+ tarfile.open(mode='r:gz', fileobj=f).extractall(path=output_dir)
+ finally:
+ os.remove(tmp)
+
+ # Generate sdk/BUILD.gn.
+ subprocess.check_call([os.path.join(sdk_root, 'gen_build_defs.py')])
+
+ with open(hash_filename, 'w') as f:
+ f.write(sdk_hash)
+
+ UpdateTimestampsRecursive(output_dir)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/gdb-add-index b/deps/v8/build/gdb-add-index
new file mode 100755
index 0000000000..73367c8350
--- /dev/null
+++ b/deps/v8/build/gdb-add-index
@@ -0,0 +1,184 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Saves the gdb index for a given binary and its shared library dependencies.
+#
+# This will run gdb index in parallel on a number of binaries using SIGUSR1
+# as the communication mechanism to simulate a semaphore. Because of the
+# nature of this technique, using "set -e" is very difficult. The SIGUSR1
+# terminates a "wait" with an error which we need to interpret.
+#
+# When modifying this code, most of the real logic is in the index_one_file
+# function. The rest is cleanup + sempahore plumbing.
+
+function usage_exit {
+ echo "Usage: $0 [-f] [-r] [-n] <paths-to-binaries>..."
+ echo " -f forces replacement of an existing index."
+ echo " -r removes the index section."
+ echo " -n don't extract the dependencies of each binary with lld."
+ echo " e.g., $0 -n out/Debug/lib.unstripped/lib*"
+ echo
+ echo " Set TOOLCHAIN_PREFIX to use a non-default set of binutils."
+ exit 1
+}
+
+# Cleanup temp directory and ensure all child jobs are dead-dead.
+function on_exit {
+ trap "" EXIT USR1 # Avoid reentrancy.
+
+ local jobs=$(jobs -p)
+ if [ -n "$jobs" ]; then
+ echo -n "Killing outstanding index jobs..."
+ kill -KILL $(jobs -p)
+ wait
+ echo "done"
+ fi
+
+ if [ -d "$directory" ]; then
+ echo -n "Removing temp directory $directory..."
+ rm -rf "$directory"
+ echo done
+ fi
+}
+
+# Add index to one binary.
+function index_one_file {
+ local file=$1
+ local basename=$(basename "$file")
+ local should_index_this_file="${should_index}"
+
+ local readelf_out=$(${TOOLCHAIN_PREFIX}readelf -S "$file")
+ if [[ $readelf_out =~ "gdb_index" ]]; then
+ if $remove_index; then
+ ${TOOLCHAIN_PREFIX}objcopy --remove-section .gdb_index "$file"
+ echo "Removed index from $basename."
+ else
+ echo "Skipped $basename -- already contains index."
+ should_index_this_file=false
+ fi
+ fi
+
+ if $should_index_this_file; then
+ local start=$(date +"%s%N")
+ echo "Adding index to $basename..."
+
+ ${TOOLCHAIN_PREFIX}gdb -batch "$file" -ex "save gdb-index $directory" \
+ -ex "quit"
+ local index_file="$directory/$basename.gdb-index"
+ if [ -f "$index_file" ]; then
+ ${TOOLCHAIN_PREFIX}objcopy --add-section .gdb_index="$index_file" \
+ --set-section-flags .gdb_index=readonly "$file" "$file"
+ local finish=$(date +"%s%N")
+ local elapsed=$(((finish - start) / 1000000))
+ echo " ...$basename indexed. [${elapsed}ms]"
+ else
+ echo " ...$basename unindexable."
+ fi
+ fi
+}
+
+# Functions that when combined, concurrently index all files in FILES_TO_INDEX
+# array. The global FILES_TO_INDEX is declared in the main body of the script.
+function async_index {
+ # Start a background subshell to run the index command.
+ {
+ index_one_file $1
+ kill -SIGUSR1 $$ # $$ resolves to the parent script.
+ exit 129 # See comment above wait loop at bottom.
+ } &
+}
+
+cur_file_num=0
+function index_next {
+ if ((cur_file_num >= ${#files_to_index[@]})); then
+ return
+ fi
+
+ async_index "${files_to_index[cur_file_num]}"
+ ((cur_file_num += 1)) || true
+}
+
+########
+### Main body of the script.
+
+remove_index=false
+should_index=true
+should_index_deps=true
+files_to_index=()
+while (($# > 0)); do
+ case "$1" in
+ -h)
+ usage_exit
+ ;;
+ -f)
+ remove_index=true
+ ;;
+ -r)
+ remove_index=true
+ should_index=false
+ ;;
+ -n)
+ should_index_deps=false
+ ;;
+ -*)
+ echo "Invalid option: $1" >&2
+ usage_exit
+ ;;
+ *)
+ if [[ ! -f "$1" ]]; then
+ echo "Path $1 does not exist."
+ exit 1
+ fi
+ files_to_index+=("$1")
+ ;;
+ esac
+ shift
+done
+
+if ((${#files_to_index[@]} == 0)); then
+ usage_exit
+fi
+
+dependencies=()
+if $should_index_deps; then
+ for file in "${files_to_index[@]}"; do
+ # Append the shared library dependencies of this file that
+ # have the same dirname. The dirname is a signal that these
+ # shared libraries were part of the same build as the binary.
+ dependencies+=( \
+ $(ldd "$file" 2>/dev/null \
+ | grep $(dirname "$file") \
+ | sed "s/.*[ \t]\(.*\) (.*/\1/") \
+ )
+ done
+fi
+files_to_index+=("${dependencies[@]}")
+
+# Ensure we cleanup on on exit.
+trap on_exit EXIT INT
+
+# We're good to go! Create temp directory for index files.
+directory=$(mktemp -d)
+echo "Made temp directory $directory."
+
+# Start concurrent indexing.
+trap index_next USR1
+
+# 4 is an arbitrary default. When changing, remember we are likely IO bound
+# so basing this off the number of cores is not sensible.
+index_tasks=${INDEX_TASKS:-4}
+for ((i = 0; i < index_tasks; i++)); do
+ index_next
+done
+
+# Do a wait loop. Bash waits that terminate due a trap have an exit
+# code > 128. We also ensure that our subshell's "normal" exit occurs with
+# an exit code > 128. This allows us to do consider a > 128 exit code as
+# an indication that the loop should continue. Unfortunately, it also means
+# we cannot use set -e since technically the "wait" is failing.
+wait
+while (($? > 128)); do
+ wait
+done
diff --git a/deps/v8/build/get_landmines.py b/deps/v8/build/get_landmines.py
new file mode 100755
index 0000000000..a32ab9937c
--- /dev/null
+++ b/deps/v8/build/get_landmines.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This file emits the list of reasons why a particular build needs to be clobbered
+(or a list of 'landmines').
+"""
+
+from __future__ import print_function
+
+import sys
+
+import landmine_utils
+
+
+host_os = landmine_utils.host_os
+
+
+def print_landmines():
+ """
+ ALL LANDMINES ARE EMITTED FROM HERE.
+ """
+ # DO NOT add landmines as part of a regular CL. Landmines are a last-effort
+ # bandaid fix if a CL that got landed has a build dependency bug and all bots
+ # need to be cleaned up. If you're writing a new CL that causes build
+ # dependency problems, fix the dependency problems instead of adding a
+ # landmine.
+ #
+ # Before adding or changing a landmine consider the consequences of doing so.
+ # Doing so will wipe out every output directory on every Chrome developer's
+ # machine. This can be particularly problematic on Windows where the directory
+ # deletion may well fail (locked files, command prompt in the directory,
+ # etc.), and generated .sln and .vcxproj files will be deleted.
+ #
+ # This output directory deletion will be repeated when going back and forth
+ # across the change that added the landmine, adding to the cost. There are
+ # usually less troublesome alternatives.
+
+ if host_os() == 'win':
+ print('Compile on cc_unittests fails due to symbols removed in r185063.')
+ if host_os() == 'linux':
+ print('Builders switching from make to ninja will clobber on this.')
+ if host_os() == 'mac':
+ print('Switching from bundle to unbundled dylib (issue 14743002).')
+ if host_os() in ('win', 'mac'):
+ print('Improper dependency for create_nmf.py broke in r240802, '
+ 'fixed in r240860.')
+ if host_os() == 'win':
+ print('Switch to VS2015 Update 3, 14393 SDK')
+ print('Need to clobber everything due to an IDL change in r154579 (blink)')
+ print('Need to clobber everything due to gen file moves in r175513 (Blink)')
+ print('Clobber to get rid of obselete test plugin after r248358')
+ print('Clobber to rebuild GN files for V8')
+ print('Clobber to get rid of stale generated mojom.h files')
+ print('Need to clobber everything due to build_nexe change in nacl r13424')
+ print(
+ '[chromium-dev] PSA: clobber build needed for IDR_INSPECTOR_* compil...')
+ print('blink_resources.grd changed: crbug.com/400860')
+ print('ninja dependency cycle: crbug.com/408192')
+ print('Clobber to fix missing NaCl gyp dependencies (crbug.com/427427).')
+ print('Another clobber for missing NaCl gyp deps (crbug.com/427427).')
+ print(
+ 'Clobber to fix GN not picking up increased ID range (crbug.com/444902)')
+ print('Remove NaCl toolchains from the output dir (crbug.com/456902)')
+ if host_os() == 'win':
+ print('Clobber to delete stale generated files (crbug.com/510086)')
+ if host_os() == 'mac':
+ print('Clobber to get rid of evil libsqlite3.dylib (crbug.com/526208)')
+ if host_os() == 'mac':
+ print('Clobber to remove libsystem.dylib. See crbug.com/620075')
+ if host_os() == 'mac':
+ print('Clobber to get past mojo gen build error (crbug.com/679607)')
+ if host_os() == 'win':
+ print('Clobber Windows to fix strange PCH-not-rebuilt errors.')
+ print('CLobber all to fix GN breakage (crbug.com/736215)')
+ print('The Great Blink mv for source files (crbug.com/768828)')
+ if host_os() == 'linux':
+ print('Clobber to workaround buggy .ninja_deps cycle (crbug.com/934404)')
+
+
+def main():
+ print_landmines()
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/git-hooks/OWNERS b/deps/v8/build/git-hooks/OWNERS
new file mode 100644
index 0000000000..3e327dc711
--- /dev/null
+++ b/deps/v8/build/git-hooks/OWNERS
@@ -0,0 +1,3 @@
+set noparent
+szager@chromium.org
+cmp@chromium.org
diff --git a/deps/v8/build/git-hooks/pre-commit b/deps/v8/build/git-hooks/pre-commit
new file mode 100755
index 0000000000..41b596344c
--- /dev/null
+++ b/deps/v8/build/git-hooks/pre-commit
@@ -0,0 +1,60 @@
+#!/bin/sh
+
+submodule_diff() {
+ if test -n "$2"; then
+ git diff-tree -r --ignore-submodules=dirty "$1" "$2" | grep -e '^:160000' -e '^:...... 160000' | xargs
+ else
+ git diff-index --cached --ignore-submodules=dirty "$1" | grep -e '^:160000' -e '^:...... 160000' | xargs
+ fi
+}
+
+if git rev-parse --verify --quiet --no-revs MERGE_HEAD; then
+ merge_base=$(git merge-base HEAD MERGE_HEAD)
+ if test -z "$(submodule_diff $merge_base HEAD)"; then
+ # Most up-to-date submodules are in MERGE_HEAD.
+ head_ref=MERGE_HEAD
+ else
+ # Most up-to-date submodules are in HEAD.
+ head_ref=HEAD
+ fi
+else
+ # No merge in progress. Submodules must match HEAD.
+ head_ref=HEAD
+fi
+
+submods=$(submodule_diff $head_ref)
+if test "$submods"; then
+ echo "You are trying to commit changes to the following submodules:" 1>&2
+ echo 1>&2
+ echo $submods | cut -d ' ' -f 6 | sed 's/^/ /g' 1>&2
+ cat <<EOF 1>&2
+
+Submodule commits are not allowed. Please run:
+
+ git status --ignore-submodules=dirty
+
+and/or:
+
+ git diff-index --cached --ignore-submodules=dirty HEAD
+
+... to see what's in your index.
+
+If you're really and truly trying to roll the version of a submodule, you should
+commit the new version to DEPS, instead.
+EOF
+ exit 1
+fi
+
+gitmodules_diff() {
+ git diff-index --cached "$1" .gitmodules
+}
+
+if [ "$(git ls-files .gitmodules)" ] && [ "$(gitmodules_diff $head_ref)" ]; then
+ cat <<EOF 1>&2
+You are trying to commit a change to .gitmodules. That is not allowed.
+To make changes to submodule names/paths, edit DEPS.
+EOF
+ exit 1
+fi
+
+exit 0
diff --git a/deps/v8/build/gn_helpers.py b/deps/v8/build/gn_helpers.py
new file mode 100644
index 0000000000..53543e669a
--- /dev/null
+++ b/deps/v8/build/gn_helpers.py
@@ -0,0 +1,369 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions useful when writing scripts that integrate with GN.
+
+The main functions are ToGNString and FromGNString which convert between
+serialized GN veriables and Python variables.
+
+To use in a random python file in the build:
+
+ import os
+ import sys
+
+ sys.path.append(os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir, "build"))
+ import gn_helpers
+
+Where the sequence of parameters to join is the relative path from your source
+file to the build directory."""
+
+import sys
+
+
+class GNException(Exception):
+ pass
+
+
+def ToGNString(value, allow_dicts = True):
+ """Returns a stringified GN equivalent of the Python value.
+
+ allow_dicts indicates if this function will allow converting dictionaries
+ to GN scopes. This is only possible at the top level, you can't nest a
+ GN scope in a list, so this should be set to False for recursive calls."""
+ if isinstance(value, str):
+ if value.find('\n') >= 0:
+ raise GNException("Trying to print a string with a newline in it.")
+ return '"' + \
+ value.replace('\\', '\\\\').replace('"', '\\"').replace('$', '\\$') + \
+ '"'
+
+ if sys.version_info.major < 3 and isinstance(value, unicode):
+ return ToGNString(value.encode('utf-8'))
+
+ if isinstance(value, bool):
+ if value:
+ return "true"
+ return "false"
+
+ if isinstance(value, list):
+ return '[ %s ]' % ', '.join(ToGNString(v) for v in value)
+
+ if isinstance(value, dict):
+ if not allow_dicts:
+ raise GNException("Attempting to recursively print a dictionary.")
+ result = ""
+ for key in sorted(value):
+ if not isinstance(key, str) and not isinstance(key, unicode):
+ raise GNException("Dictionary key is not a string.")
+ result += "%s = %s\n" % (key, ToGNString(value[key], False))
+ return result
+
+ if isinstance(value, int):
+ return str(value)
+
+ raise GNException("Unsupported type when printing to GN.")
+
+
+def FromGNString(input_string):
+ """Converts the input string from a GN serialized value to Python values.
+
+ For details on supported types see GNValueParser.Parse() below.
+
+ If your GN script did:
+ something = [ "file1", "file2" ]
+ args = [ "--values=$something" ]
+ The command line would look something like:
+ --values="[ \"file1\", \"file2\" ]"
+ Which when interpreted as a command line gives the value:
+ [ "file1", "file2" ]
+
+ You can parse this into a Python list using GN rules with:
+ input_values = FromGNValues(options.values)
+ Although the Python 'ast' module will parse many forms of such input, it
+ will not handle GN escaping properly, nor GN booleans. You should use this
+ function instead.
+
+
+ A NOTE ON STRING HANDLING:
+
+ If you just pass a string on the command line to your Python script, or use
+ string interpolation on a string variable, the strings will not be quoted:
+ str = "asdf"
+ args = [ str, "--value=$str" ]
+ Will yield the command line:
+ asdf --value=asdf
+ The unquoted asdf string will not be valid input to this function, which
+ accepts only quoted strings like GN scripts. In such cases, you can just use
+ the Python string literal directly.
+
+ The main use cases for this is for other types, in particular lists. When
+ using string interpolation on a list (as in the top example) the embedded
+ strings will be quoted and escaped according to GN rules so the list can be
+ re-parsed to get the same result."""
+ parser = GNValueParser(input_string)
+ return parser.Parse()
+
+
+def FromGNArgs(input_string):
+ """Converts a string with a bunch of gn arg assignments into a Python dict.
+
+ Given a whitespace-separated list of
+
+ <ident> = (integer | string | boolean | <list of the former>)
+
+ gn assignments, this returns a Python dict, i.e.:
+
+ FromGNArgs("foo=true\nbar=1\n") -> { 'foo': True, 'bar': 1 }.
+
+ Only simple types and lists supported; variables, structs, calls
+ and other, more complicated things are not.
+
+ This routine is meant to handle only the simple sorts of values that
+ arise in parsing --args.
+ """
+ parser = GNValueParser(input_string)
+ return parser.ParseArgs()
+
+
+def UnescapeGNString(value):
+ """Given a string with GN escaping, returns the unescaped string.
+
+ Be careful not to feed with input from a Python parsing function like
+ 'ast' because it will do Python unescaping, which will be incorrect when
+ fed into the GN unescaper."""
+ result = ''
+ i = 0
+ while i < len(value):
+ if value[i] == '\\':
+ if i < len(value) - 1:
+ next_char = value[i + 1]
+ if next_char in ('$', '"', '\\'):
+ # These are the escaped characters GN supports.
+ result += next_char
+ i += 1
+ else:
+ # Any other backslash is a literal.
+ result += '\\'
+ else:
+ result += value[i]
+ i += 1
+ return result
+
+
+def _IsDigitOrMinus(char):
+ return char in "-0123456789"
+
+
+class GNValueParser(object):
+ """Duplicates GN parsing of values and converts to Python types.
+
+ Normally you would use the wrapper function FromGNValue() below.
+
+ If you expect input as a specific type, you can also call one of the Parse*
+ functions directly. All functions throw GNException on invalid input. """
+ def __init__(self, string):
+ self.input = string
+ self.cur = 0
+
+ def IsDone(self):
+ return self.cur == len(self.input)
+
+ def ConsumeWhitespace(self):
+ while not self.IsDone() and self.input[self.cur] in ' \t\n':
+ self.cur += 1
+
+ def ConsumeComment(self):
+ if self.IsDone() or self.input[self.cur] != '#':
+ return
+
+ # Consume each comment, line by line.
+ while not self.IsDone() and self.input[self.cur] == '#':
+ # Consume the rest of the comment, up until the end of the line.
+ while not self.IsDone() and self.input[self.cur] != '\n':
+ self.cur += 1
+ # Move the cursor to the next line (if there is one).
+ if not self.IsDone():
+ self.cur += 1
+
+ def Parse(self):
+ """Converts a string representing a printed GN value to the Python type.
+
+ See additional usage notes on FromGNString above.
+
+ - GN booleans ('true', 'false') will be converted to Python booleans.
+
+ - GN numbers ('123') will be converted to Python numbers.
+
+ - GN strings (double-quoted as in '"asdf"') will be converted to Python
+ strings with GN escaping rules. GN string interpolation (embedded
+ variables preceded by $) are not supported and will be returned as
+ literals.
+
+ - GN lists ('[1, "asdf", 3]') will be converted to Python lists.
+
+ - GN scopes ('{ ... }') are not supported."""
+ result = self._ParseAllowTrailing()
+ self.ConsumeWhitespace()
+ if not self.IsDone():
+ raise GNException("Trailing input after parsing:\n " +
+ self.input[self.cur:])
+ return result
+
+ def ParseArgs(self):
+ """Converts a whitespace-separated list of ident=literals to a dict.
+
+ See additional usage notes on FromGNArgs, above.
+ """
+ d = {}
+
+ self.ConsumeWhitespace()
+ self.ConsumeComment()
+ while not self.IsDone():
+ ident = self._ParseIdent()
+ self.ConsumeWhitespace()
+ if self.input[self.cur] != '=':
+ raise GNException("Unexpected token: " + self.input[self.cur:])
+ self.cur += 1
+ self.ConsumeWhitespace()
+ val = self._ParseAllowTrailing()
+ self.ConsumeWhitespace()
+ self.ConsumeComment()
+ d[ident] = val
+
+ return d
+
+ def _ParseAllowTrailing(self):
+ """Internal version of Parse that doesn't check for trailing stuff."""
+ self.ConsumeWhitespace()
+ if self.IsDone():
+ raise GNException("Expected input to parse.")
+
+ next_char = self.input[self.cur]
+ if next_char == '[':
+ return self.ParseList()
+ elif _IsDigitOrMinus(next_char):
+ return self.ParseNumber()
+ elif next_char == '"':
+ return self.ParseString()
+ elif self._ConstantFollows('true'):
+ return True
+ elif self._ConstantFollows('false'):
+ return False
+ else:
+ raise GNException("Unexpected token: " + self.input[self.cur:])
+
+ def _ParseIdent(self):
+ ident = ''
+
+ next_char = self.input[self.cur]
+ if not next_char.isalpha() and not next_char=='_':
+ raise GNException("Expected an identifier: " + self.input[self.cur:])
+
+ ident += next_char
+ self.cur += 1
+
+ next_char = self.input[self.cur]
+ while next_char.isalpha() or next_char.isdigit() or next_char=='_':
+ ident += next_char
+ self.cur += 1
+ next_char = self.input[self.cur]
+
+ return ident
+
+ def ParseNumber(self):
+ self.ConsumeWhitespace()
+ if self.IsDone():
+ raise GNException('Expected number but got nothing.')
+
+ begin = self.cur
+
+ # The first character can include a negative sign.
+ if not self.IsDone() and _IsDigitOrMinus(self.input[self.cur]):
+ self.cur += 1
+ while not self.IsDone() and self.input[self.cur].isdigit():
+ self.cur += 1
+
+ number_string = self.input[begin:self.cur]
+ if not len(number_string) or number_string == '-':
+ raise GNException("Not a valid number.")
+ return int(number_string)
+
+ def ParseString(self):
+ self.ConsumeWhitespace()
+ if self.IsDone():
+ raise GNException('Expected string but got nothing.')
+
+ if self.input[self.cur] != '"':
+ raise GNException('Expected string beginning in a " but got:\n ' +
+ self.input[self.cur:])
+ self.cur += 1 # Skip over quote.
+
+ begin = self.cur
+ while not self.IsDone() and self.input[self.cur] != '"':
+ if self.input[self.cur] == '\\':
+ self.cur += 1 # Skip over the backslash.
+ if self.IsDone():
+ raise GNException("String ends in a backslash in:\n " +
+ self.input)
+ self.cur += 1
+
+ if self.IsDone():
+ raise GNException('Unterminated string:\n ' + self.input[begin:])
+
+ end = self.cur
+ self.cur += 1 # Consume trailing ".
+
+ return UnescapeGNString(self.input[begin:end])
+
+ def ParseList(self):
+ self.ConsumeWhitespace()
+ if self.IsDone():
+ raise GNException('Expected list but got nothing.')
+
+ # Skip over opening '['.
+ if self.input[self.cur] != '[':
+ raise GNException("Expected [ for list but got:\n " +
+ self.input[self.cur:])
+ self.cur += 1
+ self.ConsumeWhitespace()
+ if self.IsDone():
+ raise GNException("Unterminated list:\n " + self.input)
+
+ list_result = []
+ previous_had_trailing_comma = True
+ while not self.IsDone():
+ if self.input[self.cur] == ']':
+ self.cur += 1 # Skip over ']'.
+ return list_result
+
+ if not previous_had_trailing_comma:
+ raise GNException("List items not separated by comma.")
+
+ list_result += [ self._ParseAllowTrailing() ]
+ self.ConsumeWhitespace()
+ if self.IsDone():
+ break
+
+ # Consume comma if there is one.
+ previous_had_trailing_comma = self.input[self.cur] == ','
+ if previous_had_trailing_comma:
+ # Consume comma.
+ self.cur += 1
+ self.ConsumeWhitespace()
+
+ raise GNException("Unterminated list:\n " + self.input)
+
+ def _ConstantFollows(self, constant):
+ """Returns true if the given constant follows immediately at the current
+ location in the input. If it does, the text is consumed and the function
+ returns true. Otherwise, returns false and the current position is
+ unchanged."""
+ end = self.cur + len(constant)
+ if end > len(self.input):
+ return False # Not enough room.
+ if self.input[self.cur:end] == constant:
+ self.cur = end
+ return True
+ return False
diff --git a/deps/v8/build/gn_helpers_unittest.py b/deps/v8/build/gn_helpers_unittest.py
new file mode 100644
index 0000000000..43c084b3aa
--- /dev/null
+++ b/deps/v8/build/gn_helpers_unittest.py
@@ -0,0 +1,126 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import gn_helpers
+import unittest
+
+class UnitTest(unittest.TestCase):
+ def test_ToGNString(self):
+ self.assertEqual(
+ gn_helpers.ToGNString([1, 'two', [ '"thr$\\', True, False, [] ]]),
+ '[ 1, "two", [ "\\"thr\\$\\\\", true, false, [ ] ] ]')
+
+ def test_UnescapeGNString(self):
+ # Backslash followed by a \, $, or " means the folling character without
+ # the special meaning. Backslash followed by everything else is a literal.
+ self.assertEqual(
+ gn_helpers.UnescapeGNString('\\as\\$\\\\asd\\"'),
+ '\\as$\\asd"')
+
+ def test_FromGNString(self):
+ self.assertEqual(
+ gn_helpers.FromGNString('[1, -20, true, false,["as\\"", []]]'),
+ [ 1, -20, True, False, [ 'as"', [] ] ])
+
+ with self.assertRaises(gn_helpers.GNException):
+ parser = gn_helpers.GNValueParser('123 456')
+ parser.Parse()
+
+ def test_ParseBool(self):
+ parser = gn_helpers.GNValueParser('true')
+ self.assertEqual(parser.Parse(), True)
+
+ parser = gn_helpers.GNValueParser('false')
+ self.assertEqual(parser.Parse(), False)
+
+ def test_ParseNumber(self):
+ parser = gn_helpers.GNValueParser('123')
+ self.assertEqual(parser.ParseNumber(), 123)
+
+ with self.assertRaises(gn_helpers.GNException):
+ parser = gn_helpers.GNValueParser('')
+ parser.ParseNumber()
+ with self.assertRaises(gn_helpers.GNException):
+ parser = gn_helpers.GNValueParser('a123')
+ parser.ParseNumber()
+
+ def test_ParseString(self):
+ parser = gn_helpers.GNValueParser('"asdf"')
+ self.assertEqual(parser.ParseString(), 'asdf')
+
+ with self.assertRaises(gn_helpers.GNException):
+ parser = gn_helpers.GNValueParser('') # Empty.
+ parser.ParseString()
+ with self.assertRaises(gn_helpers.GNException):
+ parser = gn_helpers.GNValueParser('asdf') # Unquoted.
+ parser.ParseString()
+ with self.assertRaises(gn_helpers.GNException):
+ parser = gn_helpers.GNValueParser('"trailing') # Unterminated.
+ parser.ParseString()
+
+ def test_ParseList(self):
+ parser = gn_helpers.GNValueParser('[1,]') # Optional end comma OK.
+ self.assertEqual(parser.ParseList(), [ 1 ])
+
+ with self.assertRaises(gn_helpers.GNException):
+ parser = gn_helpers.GNValueParser('') # Empty.
+ parser.ParseList()
+ with self.assertRaises(gn_helpers.GNException):
+ parser = gn_helpers.GNValueParser('asdf') # No [].
+ parser.ParseList()
+ with self.assertRaises(gn_helpers.GNException):
+ parser = gn_helpers.GNValueParser('[1, 2') # Unterminated
+ parser.ParseList()
+ with self.assertRaises(gn_helpers.GNException):
+ parser = gn_helpers.GNValueParser('[1 2]') # No separating comma.
+ parser.ParseList()
+
+ def test_FromGNArgs(self):
+ # Booleans and numbers should work; whitespace is allowed works.
+ self.assertEqual(gn_helpers.FromGNArgs('foo = true\nbar = 1\n'),
+ {'foo': True, 'bar': 1})
+
+ # Whitespace is not required; strings should also work.
+ self.assertEqual(gn_helpers.FromGNArgs('foo="bar baz"'),
+ {'foo': 'bar baz'})
+
+ # Comments should work (and be ignored).
+ gn_args_lines = [
+ '# Top-level comment.',
+ 'foo = true',
+ 'bar = 1 # In-line comment.',
+ ]
+ self.assertEqual(gn_helpers.FromGNArgs('\n'.join(gn_args_lines)),
+ {'foo': True, 'bar': 1})
+
+ # Lists should work.
+ self.assertEqual(gn_helpers.FromGNArgs('foo=[1, 2, 3]'),
+ {'foo': [1, 2, 3]})
+
+ # Empty strings should return an empty dict.
+ self.assertEqual(gn_helpers.FromGNArgs(''), {})
+ self.assertEqual(gn_helpers.FromGNArgs(' \n '), {})
+
+ # Non-identifiers should raise an exception.
+ with self.assertRaises(gn_helpers.GNException):
+ gn_helpers.FromGNArgs('123 = true')
+
+ # References to other variables should raise an exception.
+ with self.assertRaises(gn_helpers.GNException):
+ gn_helpers.FromGNArgs('foo = bar')
+
+ # References to functions should raise an exception.
+ with self.assertRaises(gn_helpers.GNException):
+ gn_helpers.FromGNArgs('foo = exec_script("//build/baz.py")')
+
+ # Underscores in identifiers should work.
+ self.assertEqual(gn_helpers.FromGNArgs('_foo = true'),
+ {'_foo': True})
+ self.assertEqual(gn_helpers.FromGNArgs('foo_bar = true'),
+ {'foo_bar': True})
+ self.assertEqual(gn_helpers.FromGNArgs('foo_=true'),
+ {'foo_': True})
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/gn_run_binary.py b/deps/v8/build/gn_run_binary.py
new file mode 100644
index 0000000000..d1685a346a
--- /dev/null
+++ b/deps/v8/build/gn_run_binary.py
@@ -0,0 +1,34 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper script for GN to run an arbitrary binary. See compiled_action.gni.
+
+Run with:
+ python gn_run_binary.py <binary_name> [args ...]
+"""
+
+import os
+import subprocess
+import sys
+
+# This script is designed to run binaries produced by the current build. We
+# may prefix it with "./" to avoid picking up system versions that might
+# also be on the path.
+path = sys.argv[1]
+if not os.path.isabs(path):
+ path = './' + path
+
+# The rest of the arguments are passed directly to the executable.
+args = [path] + sys.argv[2:]
+
+ret = subprocess.call(args)
+if ret != 0:
+ if ret <= -100:
+ # Windows error codes such as 0xC0000005 and 0xC0000409 are much easier to
+ # recognize and differentiate in hex. In order to print them as unsigned
+ # hex we need to add 4 Gig to them.
+ print '%s failed with exit code 0x%08X' % (sys.argv[1], ret + (1 << 32))
+ else:
+ print '%s failed with exit code %d' % (sys.argv[1], ret)
+sys.exit(ret)
diff --git a/deps/v8/build/install-build-deps-android.sh b/deps/v8/build/install-build-deps-android.sh
new file mode 100755
index 0000000000..15fea8517e
--- /dev/null
+++ b/deps/v8/build/install-build-deps-android.sh
@@ -0,0 +1,74 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install everything needed to build chromium on android, including
+# items requiring sudo privileges.
+# See https://www.chromium.org/developers/how-tos/android-build-instructions
+
+args="$@"
+
+if ! uname -m | egrep -q "i686|x86_64"; then
+ echo "Only x86 architectures are currently supported" >&2
+ exit
+fi
+
+# Exit if any commands fail.
+set -e
+
+lsb_release=$(lsb_release --codename --short)
+
+# Install first the default Linux build deps.
+"$(dirname "${BASH_SOURCE[0]}")/install-build-deps.sh" \
+ --no-syms --lib32 --no-arm --no-chromeos-fonts --no-nacl --no-prompt "${args}"
+
+# Fix deps
+sudo apt-get -f install
+
+# common
+sudo apt-get -y install lib32z1 lighttpd python-pexpect xvfb x11-utils
+
+# Some binaries in the Android SDK require 32-bit libraries on the host.
+# See https://developer.android.com/sdk/installing/index.html?pkg=tools
+sudo apt-get -y install libncurses5:i386 libstdc++6:i386 zlib1g:i386
+
+# Required for apk-patch-size-estimator
+sudo apt-get -y install bsdiff
+
+# Do our own error handling for java.
+set +e
+
+function IsJava8() {
+ # Arg is either "java" or "javac"
+ $1 -version 2>&1 | grep -q '1\.8'
+}
+
+if ! (IsJava8 java && IsJava8 javac); then
+ sudo apt-get -y install openjdk-8-jre openjdk-8-jdk
+fi
+
+# There can be several reasons why java8 is not default despite being installed.
+# Just show an error and exit.
+if ! (IsJava8 java && IsJava8 javac); then
+ echo
+ echo "Automatic java installation failed."
+ echo '`java -version` reports:'
+ java -version
+ echo
+ echo '`javac -version` reports:'
+ javac -version
+ echo
+ echo "Please ensure that JDK 8 is installed and resolves first in your PATH."
+ echo -n '`which java` reports: '
+ which java
+ echo -n '`which javac` reports: '
+ which javac
+ echo
+ echo "You might also try running:"
+ echo " sudo update-java-alternatives -s java-1.8.0-openjdk-amd64"
+ exit 1
+fi
+
+echo "install-build-deps-android.sh complete."
diff --git a/deps/v8/build/install-build-deps.sh b/deps/v8/build/install-build-deps.sh
new file mode 100755
index 0000000000..8e14d2caad
--- /dev/null
+++ b/deps/v8/build/install-build-deps.sh
@@ -0,0 +1,717 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install everything needed to build chromium (well, ideally, anyway)
+# See https://chromium.googlesource.com/chromium/src/+/master/docs/linux_build_instructions.md
+
+usage() {
+ echo "Usage: $0 [--options]"
+ echo "Options:"
+ echo "--[no-]syms: enable or disable installation of debugging symbols"
+ echo "--lib32: enable installation of 32-bit libraries, e.g. for V8 snapshot"
+ echo "--[no-]arm: enable or disable installation of arm cross toolchain"
+ echo "--[no-]chromeos-fonts: enable or disable installation of Chrome OS"\
+ "fonts"
+ echo "--[no-]nacl: enable or disable installation of prerequisites for"\
+ "building standalone NaCl and all its toolchains"
+ echo "--[no-]backwards-compatible: enable or disable installation of packages
+ that are no longer currently needed and have been removed from this
+ script. Useful for bisection."
+ echo "--no-prompt: silently select standard options/defaults"
+ echo "--quick-check: quickly try to determine if dependencies are installed"
+ echo " (this avoids interactive prompts and sudo commands,"
+ echo " so might not be 100% accurate)"
+ echo "--unsupported: attempt installation even on unsupported systems"
+ echo "Script will prompt interactively if options not given."
+ exit 1
+}
+
+# Build list of apt packages in dpkg --get-selections format.
+build_apt_package_list() {
+ echo "Building apt package list." >&2
+ apt-cache dumpavail | \
+ python -c '\
+ import re,sys; \
+ o = sys.stdin.read(); \
+ p = {"i386": ":i386"}; \
+ f = re.M | re.S; \
+ r = re.compile(r"^Package: (.+?)$.+?^Architecture: (.+?)$", f); \
+ m = ["%s%s" % (x, p.get(y, "")) for x, y in re.findall(r, o)]; \
+ print "\n".join(m)'
+}
+
+# Checks whether a particular package is available in the repos.
+# Uses pre-formatted ${apt_package_list}.
+# USAGE: $ package_exists <package name>
+package_exists() {
+ if [ -z "${apt_package_list}" ]; then
+ echo "Call build_apt_package_list() prior to calling package_exists()" >&2
+ apt_package_list=$(build_apt_package_list)
+ fi
+ # 'apt-cache search' takes a regex string, so eg. the +'s in packages like
+ # "libstdc++" need to be escaped.
+ local escaped="$(echo $1 | sed 's/[\~\+\.\:-]/\\&/g')"
+ [ ! -z "$(grep "^${escaped}$" <<< "${apt_package_list}")" ]
+}
+
+# These default to on because (some) bots need them and it keeps things
+# simple for the bot setup if all bots just run the script in its default
+# mode. Developers who don't want stuff they don't need installed on their
+# own workstations can pass --no-arm --no-nacl when running the script.
+do_inst_arm=1
+do_inst_nacl=1
+
+while [ "$1" != "" ]
+do
+ case "$1" in
+ --syms) do_inst_syms=1;;
+ --no-syms) do_inst_syms=0;;
+ --lib32) do_inst_lib32=1;;
+ --arm) do_inst_arm=1;;
+ --no-arm) do_inst_arm=0;;
+ --chromeos-fonts) do_inst_chromeos_fonts=1;;
+ --no-chromeos-fonts) do_inst_chromeos_fonts=0;;
+ --nacl) do_inst_nacl=1;;
+ --no-nacl) do_inst_nacl=0;;
+ --backwards-compatible) do_inst_backwards_compatible=1;;
+ --no-backwards-compatible) do_inst_backwards_compatible=0;;
+ --add-cross-tool-repo) add_cross_tool_repo=1;;
+ --no-prompt) do_default=1
+ do_quietly="-qq --assume-yes"
+ ;;
+ --quick-check) do_quick_check=1;;
+ --unsupported) do_unsupported=1;;
+ *) usage;;
+ esac
+ shift
+done
+
+if [ "$do_inst_arm" = "1" ]; then
+ do_inst_lib32=1
+fi
+
+# Check for lsb_release command in $PATH
+if ! which lsb_release > /dev/null; then
+ echo "ERROR: lsb_release not found in \$PATH" >&2
+ exit 1;
+fi
+
+distro_codename=$(lsb_release --codename --short)
+distro_id=$(lsb_release --id --short)
+supported_codenames="(trusty|xenial|artful|bionic)"
+supported_ids="(Debian)"
+if [ 0 -eq "${do_unsupported-0}" ] && [ 0 -eq "${do_quick_check-0}" ] ; then
+ if [[ ! $distro_codename =~ $supported_codenames &&
+ ! $distro_id =~ $supported_ids ]]; then
+ echo -e "ERROR: The only supported distros are\n" \
+ "\tUbuntu 14.04 LTS (trusty)\n" \
+ "\tUbuntu 16.04 LTS (xenial)\n" \
+ "\tUbuntu 17.10 (artful)\n" \
+ "\tUbuntu 18.04 LTS (bionic)\n" \
+ "\tDebian 8 (jessie) or later" >&2
+ exit 1
+ fi
+
+ if ! uname -m | egrep -q "i686|x86_64"; then
+ echo "Only x86 architectures are currently supported" >&2
+ exit
+ fi
+fi
+
+if [ "x$(id -u)" != x0 ] && [ 0 -eq "${do_quick_check-0}" ]; then
+ echo "Running as non-root user."
+ echo "You might have to enter your password one or more times for 'sudo'."
+ echo
+fi
+
+if [ "$do_inst_lib32" = "1" ] || [ "$do_inst_nacl" = "1" ]; then
+ sudo dpkg --add-architecture i386
+fi
+sudo apt-get update
+
+# Populate ${apt_package_list} for package_exists() parsing.
+apt_package_list=$(build_apt_package_list)
+
+# Packages needed for chromeos only
+chromeos_dev_list="libbluetooth-dev libxkbcommon-dev"
+
+if package_exists realpath; then
+ chromeos_dev_list="${chromeos_dev_list} realpath"
+fi
+
+# Packages needed for development
+dev_list="\
+ binutils
+ bison
+ bzip2
+ cdbs
+ curl
+ dbus-x11
+ dpkg-dev
+ elfutils
+ devscripts
+ fakeroot
+ flex
+ g++
+ git-core
+ git-svn
+ gperf
+ libappindicator3-dev
+ libasound2-dev
+ libatspi2.0-dev
+ libbrlapi-dev
+ libbz2-dev
+ libcairo2-dev
+ libcap-dev
+ libc6-dev
+ libcups2-dev
+ libcurl4-gnutls-dev
+ libdrm-dev
+ libelf-dev
+ libffi-dev
+ libgbm-dev
+ libglib2.0-dev
+ libglu1-mesa-dev
+ libgnome-keyring-dev
+ libgtk-3-dev
+ libkrb5-dev
+ libnspr4-dev
+ libnss3-dev
+ libpam0g-dev
+ libpci-dev
+ libpulse-dev
+ libsctp-dev
+ libspeechd-dev
+ libsqlite3-dev
+ libssl-dev
+ libudev-dev
+ libwww-perl
+ libxslt1-dev
+ libxss-dev
+ libxt-dev
+ libxtst-dev
+ locales
+ openbox
+ p7zip
+ patch
+ perl
+ pkg-config
+ python
+ python-cherrypy3
+ python-crypto
+ python-dev
+ python-numpy
+ python-opencv
+ python-openssl
+ python-psutil
+ python-yaml
+ rpm
+ ruby
+ subversion
+ uuid-dev
+ wdiff
+ x11-utils
+ xcompmgr
+ xz-utils
+ zip
+ $chromeos_dev_list
+"
+
+# 64-bit systems need a minimum set of 32-bit compat packages for the pre-built
+# NaCl binaries.
+if file -L /sbin/init | grep -q 'ELF 64-bit'; then
+ dev_list="${dev_list} libc6-i386 lib32gcc1 lib32stdc++6"
+fi
+
+# Run-time libraries required by chromeos only
+chromeos_lib_list="libpulse0 libbz2-1.0"
+
+# List of required run-time libraries
+common_lib_list="\
+ libappindicator3-1
+ libasound2
+ libatk1.0-0
+ libatspi2.0-0
+ libc6
+ libcairo2
+ libcap2
+ libcups2
+ libexpat1
+ libffi6
+ libfontconfig1
+ libfreetype6
+ libglib2.0-0
+ libgnome-keyring0
+ libgtk-3-0
+ libpam0g
+ libpango1.0-0
+ libpci3
+ libpcre3
+ libpixman-1-0
+ libspeechd2
+ libstdc++6
+ libsqlite3-0
+ libuuid1
+ libwayland-egl1-mesa
+ libx11-6
+ libx11-xcb1
+ libxau6
+ libxcb1
+ libxcomposite1
+ libxcursor1
+ libxdamage1
+ libxdmcp6
+ libxext6
+ libxfixes3
+ libxi6
+ libxinerama1
+ libxrandr2
+ libxrender1
+ libxtst6
+ zlib1g
+"
+
+# Full list of required run-time libraries
+lib_list="\
+ $common_lib_list
+ $chromeos_lib_list
+"
+
+# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf
+lib32_list="linux-libc-dev:i386 libpci3:i386"
+
+# 32-bit libraries needed for a 32-bit build
+lib32_list="$lib32_list libx11-xcb1:i386"
+
+# Packages that have been removed from this script. Regardless of configuration
+# or options passed to this script, whenever a package is removed, it should be
+# added here.
+backwards_compatible_list="\
+ 7za
+ fonts-indic
+ fonts-ipafont
+ fonts-stix
+ fonts-thai-tlwg
+ fonts-tlwg-garuda
+ language-pack-da
+ language-pack-fr
+ language-pack-he
+ language-pack-zh-hant
+ libappindicator-dev
+ libappindicator1
+ libdconf-dev
+ libdconf1
+ libdconf1:i386
+ libexif-dev
+ libexif12
+ libexif12:i386
+ libgbm-dev
+ libgconf-2-4:i386
+ libgconf2-dev
+ libgl1-mesa-dev
+ libgl1-mesa-glx:i386
+ libgles2-mesa-dev
+ libgtk-3-0:i386
+ libgtk2.0-0
+ libgtk2.0-0:i386
+ libgtk2.0-dev
+ mesa-common-dev
+ msttcorefonts
+ ttf-dejavu-core
+ ttf-indic-fonts
+ ttf-kochi-gothic
+ ttf-kochi-mincho
+ ttf-mscorefonts-installer
+ xfonts-mathml
+"
+case $distro_codename in
+ trusty)
+ backwards_compatible_list+=" \
+ libgbm-dev-lts-trusty
+ libgl1-mesa-dev-lts-trusty
+ libgl1-mesa-glx-lts-trusty:i386
+ libgles2-mesa-dev-lts-trusty
+ mesa-common-dev-lts-trusty"
+ ;;
+ xenial)
+ backwards_compatible_list+=" \
+ libgbm-dev-lts-xenial
+ libgl1-mesa-dev-lts-xenial
+ libgl1-mesa-glx-lts-xenial:i386
+ libgles2-mesa-dev-lts-xenial
+ mesa-common-dev-lts-xenial"
+ ;;
+esac
+
+# arm cross toolchain packages needed to build chrome on armhf
+EM_REPO="deb http://emdebian.org/tools/debian/ jessie main"
+EM_SOURCE=$(cat <<EOF
+# Repo added by Chromium $0
+${EM_REPO}
+# deb-src http://emdebian.org/tools/debian/ jessie main
+EOF
+)
+EM_ARCHIVE_KEY_FINGER="084C6C6F39159EDB67969AA87DE089671804772E"
+GPP_ARM_PACKAGE="g++-arm-linux-gnueabihf"
+case $distro_codename in
+ jessie)
+ eval $(apt-config shell APT_SOURCESDIR 'Dir::Etc::sourceparts/d')
+ CROSSTOOLS_LIST="${APT_SOURCESDIR}/crosstools.list"
+ arm_list="libc6-dev:armhf
+ linux-libc-dev:armhf"
+ if [ "$do_inst_arm" = "1" ]; then
+ if $(dpkg-query -W ${GPP_ARM_PACKAGE} &>/dev/null); then
+ arm_list+=" ${GPP_ARM_PACKAGE}"
+ else
+ if [ "${add_cross_tool_repo}" = "1" ]; then
+ gpg --keyserver pgp.mit.edu --recv-keys ${EM_ARCHIVE_KEY_FINGER}
+ gpg -a --export ${EM_ARCHIVE_KEY_FINGER} | sudo apt-key add -
+ if ! grep "^${EM_REPO}" "${CROSSTOOLS_LIST}" &>/dev/null; then
+ echo "${EM_SOURCE}" | sudo tee -a "${CROSSTOOLS_LIST}" >/dev/null
+ fi
+ arm_list+=" ${GPP_ARM_PACKAGE}"
+ else
+ echo "The Debian Cross-toolchains repository is necessary to"
+ echo "cross-compile Chromium for arm."
+ echo "Rerun with --add-deb-cross-tool-repo to have it added for you."
+ fi
+ fi
+ fi
+ ;;
+ # All necessary ARM packages are available on the default repos on
+ # Debian 9 and later.
+ *)
+ arm_list="libc6-dev-armhf-cross
+ linux-libc-dev-armhf-cross
+ ${GPP_ARM_PACKAGE}"
+ ;;
+esac
+
+# Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056
+case $distro_codename in
+ trusty)
+ arm_list+=" g++-4.8-multilib-arm-linux-gnueabihf
+ gcc-4.8-multilib-arm-linux-gnueabihf"
+ ;;
+ xenial|artful|bionic)
+ arm_list+=" g++-5-multilib-arm-linux-gnueabihf
+ gcc-5-multilib-arm-linux-gnueabihf
+ gcc-arm-linux-gnueabihf"
+ ;;
+esac
+
+# Packages to build NaCl, its toolchains, and its ports.
+naclports_list="ant autoconf bison cmake gawk intltool xutils-dev xsltproc"
+nacl_list="\
+ g++-mingw-w64-i686
+ lib32z1-dev
+ libasound2:i386
+ libcap2:i386
+ libelf-dev:i386
+ libfontconfig1:i386
+ libglib2.0-0:i386
+ libgpm2:i386
+ libncurses5:i386
+ lib32ncurses5-dev
+ libnss3:i386
+ libpango1.0-0:i386
+ libssl-dev:i386
+ libtinfo-dev
+ libtinfo-dev:i386
+ libtool
+ libuuid1:i386
+ libxcomposite1:i386
+ libxcursor1:i386
+ libxdamage1:i386
+ libxi6:i386
+ libxrandr2:i386
+ libxss1:i386
+ libxtst6:i386
+ texinfo
+ xvfb
+ ${naclports_list}
+"
+
+if package_exists libssl1.1; then
+ nacl_list="${nacl_list} libssl1.1:i386"
+elif package_exists libssl1.0.2; then
+ nacl_list="${nacl_list} libssl1.0.2:i386"
+else
+ nacl_list="${nacl_list} libssl1.0.0:i386"
+fi
+
+# Some package names have changed over time
+if package_exists libpng16-16; then
+ lib_list="${lib_list} libpng16-16"
+else
+ lib_list="${lib_list} libpng12-0"
+fi
+if package_exists libnspr4; then
+ lib_list="${lib_list} libnspr4 libnss3"
+else
+ lib_list="${lib_list} libnspr4-0d libnss3-1d"
+fi
+if package_exists libjpeg-dev; then
+ dev_list="${dev_list} libjpeg-dev"
+else
+ dev_list="${dev_list} libjpeg62-dev"
+fi
+if package_exists libudev1; then
+ dev_list="${dev_list} libudev1"
+ nacl_list="${nacl_list} libudev1:i386"
+else
+ dev_list="${dev_list} libudev0"
+ nacl_list="${nacl_list} libudev0:i386"
+fi
+if package_exists libbrlapi0.6; then
+ dev_list="${dev_list} libbrlapi0.6"
+else
+ dev_list="${dev_list} libbrlapi0.5"
+fi
+if package_exists apache2.2-bin; then
+ dev_list="${dev_list} apache2.2-bin"
+else
+ dev_list="${dev_list} apache2-bin"
+fi
+if package_exists libav-tools; then
+ dev_list="${dev_list} libav-tools"
+fi
+if package_exists php7.2-cgi; then
+ dev_list="${dev_list} php7.2-cgi libapache2-mod-php7.2"
+elif package_exists php7.1-cgi; then
+ dev_list="${dev_list} php7.1-cgi libapache2-mod-php7.1"
+elif package_exists php7.0-cgi; then
+ dev_list="${dev_list} php7.0-cgi libapache2-mod-php7.0"
+else
+ dev_list="${dev_list} php5-cgi libapache2-mod-php5"
+fi
+
+# Some packages are only needed if the distribution actually supports
+# installing them.
+if package_exists appmenu-gtk; then
+ lib_list="$lib_list appmenu-gtk"
+fi
+
+# Cross-toolchain strip is needed for building the sysroots.
+if package_exists binutils-arm-linux-gnueabihf; then
+ dev_list="${dev_list} binutils-arm-linux-gnueabihf"
+fi
+if package_exists binutils-aarch64-linux-gnu; then
+ dev_list="${dev_list} binutils-aarch64-linux-gnu"
+fi
+if package_exists binutils-mipsel-linux-gnu; then
+ dev_list="${dev_list} binutils-mipsel-linux-gnu"
+fi
+if package_exists binutils-mips64el-linux-gnuabi64; then
+ dev_list="${dev_list} binutils-mips64el-linux-gnuabi64"
+fi
+
+# When cross building for arm/Android on 64-bit systems the host binaries
+# that are part of v8 need to be compiled with -m32 which means
+# that basic multilib support is needed.
+if file -L /sbin/init | grep -q 'ELF 64-bit'; then
+ # gcc-multilib conflicts with the arm cross compiler (at least in trusty) but
+ # g++-X.Y-multilib gives us the 32-bit support that we need. Find out the
+ # appropriate value of X and Y by seeing what version the current
+ # distribution's g++-multilib package depends on.
+ multilib_package=$(apt-cache depends g++-multilib --important | \
+ grep -E --color=never --only-matching '\bg\+\+-[0-9.]+-multilib\b')
+ lib32_list="$lib32_list $multilib_package"
+fi
+
+if [ "$do_inst_syms" = "1" ]; then
+ echo "Including debugging symbols."
+
+ # Debian is in the process of transitioning to automatic debug packages, which
+ # have the -dbgsym suffix (https://wiki.debian.org/AutomaticDebugPackages).
+ # Untransitioned packages have the -dbg suffix. And on some systems, neither
+ # will be available, so exclude the ones that are missing.
+ dbg_package_name() {
+ if package_exists "$1-dbgsym"; then
+ echo "$1-dbgsym"
+ elif package_exists "$1-dbg"; then
+ echo "$1-dbg"
+ fi
+ }
+
+ for package in "${common_lib_list}"; do
+ dbg_list="$dbg_list $(dbg_package_name ${package})"
+ done
+
+ # Debugging symbols packages not following common naming scheme
+ if [ "$(dbg_package_name libstdc++6)" == "" ]; then
+ if package_exists libstdc++6-8-dbg; then
+ dbg_list="${dbg_list} libstdc++6-8-dbg"
+ elif package_exists libstdc++6-7-dbg; then
+ dbg_list="${dbg_list} libstdc++6-7-dbg"
+ elif package_exists libstdc++6-6-dbg; then
+ dbg_list="${dbg_list} libstdc++6-6-dbg"
+ elif package_exists libstdc++6-5-dbg; then
+ dbg_list="${dbg_list} libstdc++6-5-dbg"
+ elif package_exists libstdc++6-4.9-dbg; then
+ dbg_list="${dbg_list} libstdc++6-4.9-dbg"
+ elif package_exists libstdc++6-4.8-dbg; then
+ dbg_list="${dbg_list} libstdc++6-4.8-dbg"
+ elif package_exists libstdc++6-4.7-dbg; then
+ dbg_list="${dbg_list} libstdc++6-4.7-dbg"
+ elif package_exists libstdc++6-4.6-dbg; then
+ dbg_list="${dbg_list} libstdc++6-4.6-dbg"
+ fi
+ fi
+ if [ "$(dbg_package_name libatk1.0-0)" == "" ]; then
+ dbg_list="$dbg_list $(dbg_package_name libatk1.0)"
+ fi
+ if [ "$(dbg_package_name libpango1.0-0)" == "" ]; then
+ dbg_list="$dbg_list $(dbg_package_name libpango1.0-dev)"
+ fi
+else
+ echo "Skipping debugging symbols."
+ dbg_list=
+fi
+
+if [ "$do_inst_lib32" = "1" ]; then
+ echo "Including 32-bit libraries."
+else
+ echo "Skipping 32-bit libraries."
+ lib32_list=
+fi
+
+if [ "$do_inst_arm" = "1" ]; then
+ echo "Including ARM cross toolchain."
+else
+ echo "Skipping ARM cross toolchain."
+ arm_list=
+fi
+
+if [ "$do_inst_nacl" = "1" ]; then
+ echo "Including NaCl, NaCl toolchain, NaCl ports dependencies."
+else
+ echo "Skipping NaCl, NaCl toolchain, NaCl ports dependencies."
+ nacl_list=
+fi
+
+filtered_backwards_compatible_list=
+if [ "$do_inst_backwards_compatible" = "1" ]; then
+ echo "Including backwards compatible packages."
+ for package in ${backwards_compatible_list}; do
+ if package_exists ${package}; then
+ filtered_backwards_compatible_list+=" ${package}"
+ fi
+ done
+fi
+
+# The `sort -r -s -t: -k2` sorts all the :i386 packages to the front, to avoid
+# confusing dpkg-query (crbug.com/446172).
+packages="$(
+ echo "${dev_list} ${lib_list} ${dbg_list} ${lib32_list} ${arm_list}" \
+ "${nacl_list}" ${filtered_backwards_compatible_list} | tr " " "\n" | \
+ sort -u | sort -r -s -t: -k2 | tr "\n" " "
+)"
+
+if [ 1 -eq "${do_quick_check-0}" ] ; then
+ if ! missing_packages="$(dpkg-query -W -f ' ' ${packages} 2>&1)"; then
+ # Distinguish between packages that actually aren't available to the
+ # system (i.e. not in any repo) and packages that just aren't known to
+ # dpkg (i.e. managed by apt).
+ missing_packages="$(echo "${missing_packages}" | awk '{print $NF}')"
+ not_installed=""
+ unknown=""
+ for p in ${missing_packages}; do
+ if apt-cache show ${p} > /dev/null 2>&1; then
+ not_installed="${p}\n${not_installed}"
+ else
+ unknown="${p}\n${unknown}"
+ fi
+ done
+ if [ -n "${not_installed}" ]; then
+ echo "WARNING: The following packages are not installed:"
+ echo -e "${not_installed}" | sed -e "s/^/ /"
+ fi
+ if [ -n "${unknown}" ]; then
+ echo "WARNING: The following packages are unknown to your system"
+ echo "(maybe missing a repo or need to 'sudo apt-get update'):"
+ echo -e "${unknown}" | sed -e "s/^/ /"
+ fi
+ exit 1
+ fi
+ exit 0
+fi
+
+echo "Finding missing packages..."
+# Intentionally leaving $packages unquoted so it's more readable.
+echo "Packages required: " $packages
+echo
+query_cmd="apt-get --just-print install $(echo $packages)"
+if cmd_output="$(LANGUAGE=en LANG=C $query_cmd)"; then
+ new_list=$(echo "$cmd_output" |
+ sed -e '1,/The following NEW packages will be installed:/d;s/^ //;t;d' |
+ sed 's/ *$//')
+ upgrade_list=$(echo "$cmd_output" |
+ sed -e '1,/The following packages will be upgraded:/d;s/^ //;t;d' |
+ sed 's/ *$//')
+ if [ -z "$new_list" ] && [ -z "$upgrade_list" ]; then
+ echo "No missing packages, and the packages are up to date."
+ else
+ echo "Installing and upgrading packages: $new_list $upgrade_list."
+ sudo apt-get install ${do_quietly-} ${new_list} ${upgrade_list}
+ fi
+ echo
+else
+ # An apt-get exit status of 100 indicates that a real error has occurred.
+
+ # I am intentionally leaving out the '"'s around query_cmd,
+ # as this makes it easier to cut and paste the output
+ echo "The following command failed: " ${query_cmd}
+ echo
+ echo "It produced the following output:"
+ echo "$cmd_output"
+ echo
+ echo "You will have to install the above packages yourself."
+ echo
+ exit 100
+fi
+
+# Install the Chrome OS default fonts. This must go after running
+# apt-get, since install-chromeos-fonts depends on curl.
+if [ "$do_inst_chromeos_fonts" != "0" ]; then
+ echo
+ echo "Installing Chrome OS fonts."
+ dir=`echo $0 | sed -r -e 's/\/[^/]+$//'`
+ if ! sudo $dir/linux/install-chromeos-fonts.py; then
+ echo "ERROR: The installation of the Chrome OS default fonts failed."
+ if [ `stat -f -c %T $dir` == "nfs" ]; then
+ echo "The reason is that your repo is installed on a remote file system."
+ else
+ echo "This is expected if your repo is installed on a remote file system."
+ fi
+ echo "It is recommended to install your repo on a local file system."
+ echo "You can skip the installation of the Chrome OS default founts with"
+ echo "the command line option: --no-chromeos-fonts."
+ exit 1
+ fi
+else
+ echo "Skipping installation of Chrome OS fonts."
+fi
+
+echo "Installing locales."
+CHROMIUM_LOCALES="da_DK.UTF-8 fr_FR.UTF-8 he_IL.UTF-8 zh_TW.UTF-8"
+LOCALE_GEN=/etc/locale.gen
+if [ -e ${LOCALE_GEN} ]; then
+ OLD_LOCALE_GEN="$(cat /etc/locale.gen)"
+ for CHROMIUM_LOCALE in ${CHROMIUM_LOCALES}; do
+ sudo sed -i "s/^# ${CHROMIUM_LOCALE}/${CHROMIUM_LOCALE}/" ${LOCALE_GEN}
+ done
+ # Regenerating locales can take a while, so only do it if we need to.
+ if (echo "${OLD_LOCALE_GEN}" | cmp -s ${LOCALE_GEN}); then
+ echo "Locales already up-to-date."
+ else
+ sudo locale-gen
+ fi
+else
+ for CHROMIUM_LOCALE in ${CHROMIUM_LOCALES}; do
+ sudo locale-gen ${CHROMIUM_LOCALE}
+ done
+fi
diff --git a/deps/v8/build/install-chroot.sh b/deps/v8/build/install-chroot.sh
new file mode 100755
index 0000000000..d76d53563b
--- /dev/null
+++ b/deps/v8/build/install-chroot.sh
@@ -0,0 +1,888 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script installs Debian-derived distributions in a chroot environment.
+# It can for example be used to have an accurate 32bit build and test
+# environment when otherwise working on a 64bit machine.
+# N. B. it is unlikely that this script will ever work on anything other than a
+# Debian-derived system.
+
+# Older Debian based systems had both "admin" and "adm" groups, with "admin"
+# apparently being used in more places. Newer distributions have standardized
+# on just the "adm" group. Check /etc/group for the preferred name of the
+# administrator group.
+admin=$(grep '^admin:' /etc/group >&/dev/null && echo admin || echo adm)
+
+usage() {
+ echo "usage: ${0##*/} [-m mirror] [-g group,...] [-s] [-c]"
+ echo "-b dir additional directories that should be bind mounted,"
+ echo ' or "NONE".'
+ echo " Default: if local filesystems present, ask user for help"
+ echo "-g group,... groups that can use the chroot unauthenticated"
+ echo " Default: '${admin}' and current user's group ('$(id -gn)')"
+ echo "-l List all installed chroot environments"
+ echo "-m mirror an alternate repository mirror for package downloads"
+ echo "-s configure default deb-srcs"
+ echo "-c always copy 64bit helper binaries to 32bit chroot"
+ echo "-h this help message"
+}
+
+process_opts() {
+ local OPTNAME OPTIND OPTERR OPTARG
+ while getopts ":b:g:lm:sch" OPTNAME; do
+ case "$OPTNAME" in
+ b)
+ if [ "${OPTARG}" = "NONE" -a -z "${bind_mounts}" ]; then
+ bind_mounts="${OPTARG}"
+ else
+ if [ "${bind_mounts}" = "NONE" -o "${OPTARG}" = "${OPTARG#/}" -o \
+ ! -d "${OPTARG}" ]; then
+ echo "Invalid -b option(s)"
+ usage
+ exit 1
+ fi
+ bind_mounts="${bind_mounts}
+${OPTARG} ${OPTARG} none rw,bind 0 0"
+ fi
+ ;;
+ g)
+ [ -n "${OPTARG}" ] &&
+ chroot_groups="${chroot_groups}${chroot_groups:+,}${OPTARG}"
+ ;;
+ l)
+ list_all_chroots
+ exit
+ ;;
+ m)
+ if [ -n "${mirror}" ]; then
+ echo "You can only specify exactly one mirror location"
+ usage
+ exit 1
+ fi
+ mirror="$OPTARG"
+ ;;
+ s)
+ add_srcs="y"
+ ;;
+ c)
+ copy_64="y"
+ ;;
+ h)
+ usage
+ exit 0
+ ;;
+ \:)
+ echo "'-$OPTARG' needs an argument."
+ usage
+ exit 1
+ ;;
+ *)
+ echo "invalid command-line option: $OPTARG"
+ usage
+ exit 1
+ ;;
+ esac
+ done
+
+ if [ $# -ge ${OPTIND} ]; then
+ eval echo "Unexpected command line argument: \${${OPTIND}}"
+ usage
+ exit 1
+ fi
+}
+
+list_all_chroots() {
+ for i in /var/lib/chroot/*; do
+ i="${i##*/}"
+ [ "${i}" = "*" ] && continue
+ [ -x "/usr/local/bin/${i%bit}" ] || continue
+ grep -qs "^\[${i%bit}\]\$" /etc/schroot/schroot.conf || continue
+ [ -r "/etc/schroot/script-${i}" -a \
+ -r "/etc/schroot/mount-${i}" ] || continue
+ echo "${i%bit}"
+ done
+}
+
+getkey() {
+ (
+ trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT HUP
+ stty -echo iuclc -icanon 2>/dev/null
+ dd count=1 bs=1 2>/dev/null
+ )
+}
+
+chr() {
+ printf "\\$(printf '%03o' "$1")"
+}
+
+ord() {
+ printf '%d' $(printf '%c' "$1" | od -tu1 -An)
+}
+
+is_network_drive() {
+ stat -c %T -f "$1/" 2>/dev/null |
+ egrep -qs '^nfs|cifs|smbfs'
+}
+
+# Check that we are running as a regular user
+[ "$(id -nu)" = root ] && {
+ echo "Run this script as a regular user and provide your \"sudo\"" \
+ "password if requested" >&2
+ exit 1
+}
+
+process_opts "$@"
+
+echo "This script will help you through the process of installing a"
+echo "Debian or Ubuntu distribution in a chroot environment. You will"
+echo "have to provide your \"sudo\" password when requested."
+echo
+
+# Error handler
+trap 'exit 1' INT TERM QUIT HUP
+trap 'sudo apt-get clean; tput bel; echo; echo Failed' EXIT
+
+# Install any missing applications that this script relies on. If these packages
+# are already installed, don't force another "apt-get install". That would
+# prevent them from being auto-removed, if they ever become eligible for that.
+# And as this script only needs the packages once, there is no good reason to
+# introduce a hard dependency on things such as dchroot and debootstrap.
+dep=
+for i in dchroot debootstrap libwww-perl; do
+ [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
+done
+[ -n "$dep" ] && sudo apt-get -y install $dep
+sudo apt-get -y install schroot
+
+# Create directory for chroot
+sudo mkdir -p /var/lib/chroot
+
+# Find chroot environments that can be installed with debootstrap
+targets="$(cd /usr/share/debootstrap/scripts
+ ls | grep '^[a-z]*$')"
+
+# Ask user to pick one of the available targets
+echo "The following targets are available to be installed in a chroot:"
+j=1; for i in $targets; do
+ printf '%4d: %s\n' "$j" "$i"
+ j=$(($j+1))
+done
+while :; do
+ printf "Which target would you like to install: "
+ read n
+ [ "$n" -gt 0 -a "$n" -lt "$j" ] >&/dev/null && break
+done
+j=1; for i in $targets; do
+ [ "$j" -eq "$n" ] && { distname="$i"; break; }
+ j=$(($j+1))
+done
+echo
+
+# On x86-64, ask whether the user wants to install x86-32 or x86-64
+archflag=
+arch=
+if [ "$(uname -m)" = x86_64 ]; then
+ while :; do
+ echo "You are running a 64bit kernel. This allows you to install either a"
+ printf "32bit or a 64bit chroot environment. %s" \
+ "Which one do you want (32, 64) "
+ read arch
+ [ "${arch}" == 32 -o "${arch}" == 64 ] && break
+ done
+ [ "${arch}" == 32 ] && archflag="--arch i386" || archflag="--arch amd64"
+ arch="${arch}bit"
+ echo
+fi
+target="${distname}${arch}"
+
+# Don't accidentally overwrite an existing installation
+[ -d /var/lib/chroot/"${target}" ] && {
+ while :; do
+ echo "This chroot already exists on your machine."
+ if schroot -l --all-sessions 2>&1 |
+ sed 's/^session://' |
+ grep -qs "^${target%bit}-"; then
+ echo "And it appears to be in active use. Terminate all programs that"
+ echo "are currently using the chroot environment and then re-run this"
+ echo "script."
+ echo "If you still get an error message, you might have stale mounts"
+ echo "that you forgot to delete. You can always clean up mounts by"
+ echo "executing \"${target%bit} -c\"."
+ exit 1
+ fi
+ echo "I can abort installation, I can overwrite the existing chroot,"
+ echo "or I can delete the old one and then exit. What would you like to"
+ printf "do (a/o/d)? "
+ read choice
+ case "${choice}" in
+ a|A) exit 1;;
+ o|O) sudo rm -rf "/var/lib/chroot/${target}"; break;;
+ d|D) sudo rm -rf "/var/lib/chroot/${target}" \
+ "/usr/local/bin/${target%bit}" \
+ "/etc/schroot/mount-${target}" \
+ "/etc/schroot/script-${target}" \
+ "/etc/schroot/${target}"
+ sudo sed -ni '/^[[]'"${target%bit}"']$/,${
+ :1;n;/^[[]/b2;b1;:2;p;n;b2};p' \
+ "/etc/schroot/schroot.conf"
+ trap '' INT TERM QUIT HUP
+ trap '' EXIT
+ echo "Deleted!"
+ exit 0;;
+ esac
+ done
+ echo
+}
+sudo mkdir -p /var/lib/chroot/"${target}"
+
+# Offer to include additional standard repositories for Ubuntu-based chroots.
+alt_repos=
+grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && {
+ while :; do
+ echo "Would you like to add ${distname}-updates and ${distname}-security "
+ printf "to the chroot's sources.list (y/n)? "
+ read alt_repos
+ case "${alt_repos}" in
+ y|Y)
+ alt_repos="y"
+ break
+ ;;
+ n|N)
+ break
+ ;;
+ esac
+ done
+ echo
+}
+
+# Check for non-standard file system mount points and ask the user whether
+# they should be imported into the chroot environment
+# We limit to the first 26 mount points that much some basic heuristics,
+# because a) that allows us to enumerate choices with a single character,
+# and b) if we find more than 26 mount points, then these are probably
+# false-positives and something is very unusual about the system's
+# configuration. No need to spam the user with even more information that
+# is likely completely irrelevant.
+if [ -z "${bind_mounts}" ]; then
+ mounts="$(awk '$2 != "/" && $2 !~ "^/boot" && $2 !~ "^/home" &&
+ $2 !~ "^/media" && $2 !~ "^/run" &&
+ ($3 ~ "ext[2-4]" || $3 == "reiserfs" || $3 == "btrfs" ||
+ $3 == "xfs" || $3 == "jfs" || $3 == "u?msdos" ||
+ $3 == "v?fat" || $3 == "hfs" || $3 == "ntfs" ||
+ $3 ~ "nfs[4-9]?" || $3 == "smbfs" || $3 == "cifs") {
+ print $2
+ }' /proc/mounts |
+ head -n26)"
+ if [ -n "${mounts}" ]; then
+ echo "You appear to have non-standard mount points that you"
+ echo "might want to import into the chroot environment:"
+ echo
+ sel=
+ while :; do
+ # Print a menu, listing all non-default mounts of local or network
+ # file systems.
+ j=1; for m in ${mounts}; do
+ c="$(printf $(printf '\\%03o' $((64+$j))))"
+ echo "$sel" | grep -qs $c &&
+ state="mounted in chroot" || state="$(tput el)"
+ printf " $c) %-40s${state}\n" "$m"
+ j=$(($j+1))
+ done
+ # Allow user to interactively (de-)select any of the entries
+ echo
+ printf "Select mount points that you want to be included or press %s" \
+ "SPACE to continue"
+ c="$(getkey | tr a-z A-Z)"
+ [ "$c" == " " ] && { echo; echo; break; }
+ if [ -z "$c" ] ||
+ [ "$c" '<' 'A' -o $(ord "$c") -gt $((64 + $(ord "$j"))) ]; then
+ # Invalid input, ring the console bell
+ tput bel
+ else
+ # Toggle the selection for the given entry
+ if echo "$sel" | grep -qs $c; then
+ sel="$(printf "$sel" | sed "s/$c//")"
+ else
+ sel="$sel$c"
+ fi
+ fi
+ # Reposition cursor to the top of the list of entries
+ tput cuu $(($j + 1))
+ echo
+ done
+ fi
+ j=1; for m in ${mounts}; do
+ c="$(chr $(($j + 64)))"
+ if echo "$sel" | grep -qs $c; then
+ bind_mounts="${bind_mounts}$m $m none rw,bind 0 0
+"
+ fi
+ j=$(($j+1))
+ done
+fi
+
+# Remove stale entry from /etc/schroot/schroot.conf. Entries start
+# with the target name in square brackets, followed by an arbitrary
+# number of lines. The entry stops when either the end of file has
+# been reached, or when the beginning of a new target is encountered.
+# This means, we cannot easily match for a range of lines in
+# "sed". Instead, we actually have to iterate over each line and check
+# whether it is the beginning of a new entry.
+sudo sed -ni '/^[[]'"${target%bit}"']$/,${:1;n;/^[[]/b2;b1;:2;p;n;b2};p' \
+ /etc/schroot/schroot.conf
+
+# Download base system. This takes some time
+if [ -z "${mirror}" ]; then
+ grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
+ mirror="http://archive.ubuntu.com/ubuntu" ||
+ mirror="http://ftp.us.debian.org/debian"
+fi
+
+sudo ${http_proxy:+http_proxy="${http_proxy}"} debootstrap ${archflag} \
+ "${distname}" "/var/lib/chroot/${target}" "$mirror"
+
+# Add new entry to /etc/schroot/schroot.conf
+grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
+ brand="Ubuntu" || brand="Debian"
+if [ -z "${chroot_groups}" ]; then
+ chroot_groups="${admin},$(id -gn)"
+fi
+
+if [ -d '/etc/schroot/default' ]; then
+ new_version=1
+ fstab="/etc/schroot/${target}/fstab"
+else
+ new_version=0
+ fstab="/etc/schroot/mount-${target}"
+fi
+
+if [ "$new_version" = "1" ]; then
+ sudo cp -ar /etc/schroot/default /etc/schroot/${target}
+
+ sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
+[${target%bit}]
+description=${brand} ${distname} ${arch}
+type=directory
+directory=/var/lib/chroot/${target}
+users=root
+groups=${chroot_groups}
+root-groups=${chroot_groups}
+personality=linux$([ "${arch}" != 64bit ] && echo 32)
+profile=${target}
+
+EOF
+ [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] &&
+ printf "${bind_mounts}" |
+ sudo sh -c "cat >>${fstab}"
+else
+ # Older versions of schroot wanted a "priority=" line, whereas recent
+ # versions deprecate "priority=" and warn if they see it. We don't have
+ # a good feature test, but scanning for the string "priority=" in the
+ # existing "schroot.conf" file is a good indication of what to do.
+ priority=$(grep -qs 'priority=' /etc/schroot/schroot.conf &&
+ echo 'priority=3' || :)
+ sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
+[${target%bit}]
+description=${brand} ${distname} ${arch}
+type=directory
+directory=/var/lib/chroot/${target}
+users=root
+groups=${chroot_groups}
+root-groups=${chroot_groups}
+personality=linux$([ "${arch}" != 64bit ] && echo 32)
+script-config=script-${target}
+${priority}
+
+EOF
+
+ # Set up a list of mount points that is specific to this
+ # chroot environment.
+ sed '/^FSTAB=/s,"[^"]*","'"${fstab}"'",' \
+ /etc/schroot/script-defaults |
+ sudo sh -c 'cat >/etc/schroot/script-'"${target}"
+ sed '\,^/home[/[:space:]],s/\([,[:space:]]\)bind[[:space:]]/\1rbind /' \
+ /etc/schroot/mount-defaults |
+ sudo sh -c "cat > ${fstab}"
+fi
+
+# Add the extra mount points that the user told us about
+[ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] &&
+ printf "${bind_mounts}" |
+ sudo sh -c 'cat >>'"${fstab}"
+
+# If this system has a "/media" mountpoint, import it into the chroot
+# environment. Most modern distributions use this mount point to
+# automatically mount devices such as CDROMs, USB sticks, etc...
+if [ -d /media ] &&
+ ! grep -qs '^/media' "${fstab}"; then
+ echo '/media /media none rw,rbind 0 0' |
+ sudo sh -c 'cat >>'"${fstab}"
+fi
+
+# Share /dev/shm, /run and /run/shm.
+grep -qs '^/dev/shm' "${fstab}" ||
+ echo '/dev/shm /dev/shm none rw,bind 0 0' |
+ sudo sh -c 'cat >>'"${fstab}"
+if [ ! -d "/var/lib/chroot/${target}/run" ] &&
+ ! grep -qs '^/run' "${fstab}"; then
+ echo '/run /run none rw,bind 0 0' |
+ sudo sh -c 'cat >>'"${fstab}"
+fi
+if ! grep -qs '^/run/shm' "${fstab}"; then
+ { [ -d /run ] && echo '/run/shm /run/shm none rw,bind 0 0' ||
+ echo '/dev/shm /run/shm none rw,bind 0 0'; } |
+ sudo sh -c 'cat >>'"${fstab}"
+fi
+
+# Set up a special directory that changes contents depending on the target
+# that is executing.
+d="$(readlink -f "${HOME}/chroot" 2>/dev/null || echo "${HOME}/chroot")"
+s="${d}/.${target}"
+echo "${s} ${d} none rw,bind 0 0" |
+ sudo sh -c 'cat >>'"${target}"
+mkdir -p "${s}"
+
+# Install a helper script to launch commands in the chroot
+sudo sh -c 'cat >/usr/local/bin/'"${target%bit}" <<'EOF'
+#!/bin/bash
+
+chroot="${0##*/}"
+
+wrap() {
+ # Word-wrap the text passed-in on stdin. Optionally, on continuation lines
+ # insert the same number of spaces as the number of characters in the
+ # parameter(s) passed to this function.
+ # If the "fold" program cannot be found, or if the actual width of the
+ # terminal cannot be determined, this function doesn't attempt to do any
+ # wrapping.
+ local f="$(type -P fold)"
+ [ -z "${f}" ] && { cat; return; }
+ local c="$(stty -a </dev/tty 2>/dev/null |
+ sed 's/.*columns[[:space:]]*\([0-9]*\).*/\1/;t;d')"
+ [ -z "${c}" ] && { cat; return; }
+ local i="$(echo "$*"|sed 's/./ /g')"
+ local j="$(printf %s "${i}"|wc -c)"
+ if [ "${c}" -gt "${j}" ]; then
+ dd bs=1 count="${j}" 2>/dev/null
+ "${f}" -sw "$((${c}-${j}))" | sed '2,$s/^/'"${i}"'/'
+ else
+ "${f}" -sw "${c}"
+ fi
+}
+
+help() {
+ echo "Usage ${0##*/} [-h|--help] [-c|--clean] [-C|--clean-all] [-l|--list] [--] args" | wrap "Usage ${0##*/} "
+ echo " help: print this message" | wrap " "
+ echo " list: list all known chroot environments" | wrap " "
+ echo " clean: remove all old chroot sessions for \"${chroot}\"" | wrap " "
+ echo " clean-all: remove all old chroot sessions for all environments" | wrap " "
+ exit 0
+}
+
+clean() {
+ local s t rc
+ rc=0
+ for s in $(schroot -l --all-sessions); do
+ if [ -n "$1" ]; then
+ t="${s#session:}"
+ [ "${t#${chroot}-}" == "${t}" ] && continue
+ fi
+ if ls -l /proc/*/{cwd,fd} 2>/dev/null |
+ fgrep -qs "/var/lib/schroot/mount/${t}"; then
+ echo "Session \"${t}\" still has active users, not cleaning up" | wrap
+ rc=1
+ continue
+ fi
+ sudo schroot -c "${s}" -e || rc=1
+ done
+ exit ${rc}
+}
+
+list() {
+ for e in $(schroot -l); do
+ e="${e#chroot:}"
+ [ -x "/usr/local/bin/${e}" ] || continue
+ if schroot -l --all-sessions 2>/dev/null |
+ sed 's/^session://' |
+ grep -qs "^${e}-"; then
+ echo "${e} is currently active"
+ else
+ echo "${e}"
+ fi
+ done
+ exit 0
+}
+
+while [ "$#" -ne 0 ]; do
+ case "$1" in
+ --) shift; break;;
+ -h|--help) shift; help;;
+ -l|--list) shift; list;;
+ -c|--clean) shift; clean "${chroot}";;
+ -C|--clean-all) shift; clean;;
+ *) break;;
+ esac
+done
+
+# Start a new chroot session and keep track of the session id. We inject this
+# id into all processes that run inside the chroot. Unless they go out of their
+# way to clear their environment, we can then later identify our child and
+# grand-child processes by scanning their environment.
+session="$(schroot -c "${chroot}" -b)"
+export CHROOT_SESSION_ID="${session}"
+
+# Set GOMA_TMP_DIR for better handling of goma inside chroot.
+export GOMA_TMP_DIR="/tmp/goma_tmp_$CHROOT_SESSION_ID"
+mkdir -p "$GOMA_TMP_DIR"
+
+if [ $# -eq 0 ]; then
+ # Run an interactive shell session
+ schroot -c "${session}" -r -p
+else
+ # Run a command inside of the chroot environment
+ p="$1"; shift
+ schroot -c "${session}" -r -p "$p" -- "$@"
+fi
+rc=$?
+
+# Compute the inode of the root directory inside of the chroot environment.
+i=$(schroot -c "${session}" -r -p ls -- -id /proc/self/root/. |
+ awk '{ print $1 }') 2>/dev/null
+other_pids=
+while [ -n "$i" ]; do
+ # Identify processes by the inode number of their root directory. Then
+ # remove all processes that we know belong to other sessions. We use
+ # "sort | uniq -u" to do what amounts to a "set subtraction operation".
+ pids=$({ ls -id1 /proc/*/root/. 2>/dev/null |
+ sed -e 's,^[^0-9]*'$i'.*/\([1-9][0-9]*\)/.*$,\1,
+ t
+ d';
+ echo "${other_pids}";
+ echo "${other_pids}"; } | sort | uniq -u) >/dev/null 2>&1
+ # Kill all processes that are still left running in the session. This is
+ # typically an assortment of daemon processes that were started
+ # automatically. They result in us being unable to tear down the session
+ # cleanly.
+ [ -z "${pids}" ] && break
+ for j in $pids; do
+ # Unfortunately, the way that schroot sets up sessions has the
+ # side-effect of being unable to tell one session apart from another.
+ # This can result in us attempting to kill processes in other sessions.
+ # We make a best-effort to avoid doing so.
+ k="$( ( xargs -0 -n1 </proc/$j/environ ) 2>/dev/null |
+ sed 's/^CHROOT_SESSION_ID=/x/;t1;d;:1;q')"
+ if [ -n "${k}" -a "${k#x}" != "${session}" ]; then
+ other_pids="${other_pids}
+${j}"
+ continue
+ fi
+ kill -9 $pids
+ done
+done
+# End the chroot session. This should clean up all temporary files. But if we
+# earlier failed to terminate all (daemon) processes inside of the session,
+# deleting the session could fail. When that happens, the user has to manually
+# clean up the stale files by invoking us with "--clean" after having killed
+# all running processes.
+schroot -c "${session}" -e
+# Since no goma processes are running, we can remove goma directory.
+rm -rf "$GOMA_TMP_DIR"
+exit $rc
+EOF
+sudo chown root:root /usr/local/bin/"${target%bit}"
+sudo chmod 755 /usr/local/bin/"${target%bit}"
+
+# Add the standard Ubuntu update repositories if requested.
+[ "${alt_repos}" = "y" -a \
+ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i '/^deb .* [^ -]\+ main$/p
+ s/^\(deb .* [^ -]\+\) main/\1-security main/
+ p
+ t1
+ d
+ :1;s/-security main/-updates main/
+ t
+ d' "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Add a few more repositories to the chroot
+[ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i 's/ main$/ main restricted universe multiverse/' \
+ "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Add the Ubuntu "partner" repository, if available
+if [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+ HEAD "http://archive.canonical.com/ubuntu/dists/${distname}/partner" \
+ >&/dev/null; then
+ sudo sh -c '
+ echo "deb http://archive.canonical.com/ubuntu" \
+ "'"${distname}"' partner" \
+ >>"/var/lib/chroot/'"${target}"'/etc/apt/sources.list"'
+fi
+
+# Add source repositories, if the user requested we do so
+[ "${add_srcs}" = "y" -a \
+ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i '/^deb[^-]/p
+ s/^deb\([^-]\)/deb-src\1/' \
+ "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Set apt proxy if host has set http_proxy
+if [ -n "${http_proxy}" ]; then
+ sudo sh -c '
+ echo "Acquire::http::proxy \"'"${http_proxy}"'\";" \
+ >>"/var/lib/chroot/'"${target}"'/etc/apt/apt.conf"'
+fi
+
+# Update packages
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+ apt-get update; apt-get -y dist-upgrade' || :
+
+# Install a couple of missing packages
+for i in debian-keyring ubuntu-keyring locales sudo; do
+ [ -d "/var/lib/chroot/${target}/usr/share/doc/$i" ] ||
+ sudo "/usr/local/bin/${target%bit}" apt-get -y install "$i" || :
+done
+
+# Configure locales
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+ l='"${LANG:-en_US}"'; l="${l%%.*}"
+ [ -r /etc/locale.gen ] &&
+ sed -i "s/^# \($l\)/\1/" /etc/locale.gen
+ locale-gen $LANG en_US en_US.UTF-8' || :
+
+# Enable multi-arch support, if available
+sudo "/usr/local/bin/${target%bit}" dpkg --assert-multi-arch >&/dev/null &&
+ [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && {
+ sudo sed -i 's/ / [arch=amd64,i386] /' \
+ "/var/lib/chroot/${target}/etc/apt/sources.list"
+ [ -d /var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/ ] &&
+ sudo "/usr/local/bin/${target%bit}" dpkg --add-architecture \
+ $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) >&/dev/null ||
+ echo foreign-architecture \
+ $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) |
+ sudo sh -c \
+ "cat >'/var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/multiarch'"
+}
+
+# Configure "sudo" package
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+ egrep -qs '"'^$(id -nu) '"' /etc/sudoers ||
+ echo '"'$(id -nu) ALL=(ALL) ALL'"' >>/etc/sudoers'
+
+# Install a few more commonly used packages
+sudo "/usr/local/bin/${target%bit}" apt-get -y install \
+ autoconf automake1.9 dpkg-dev g++-multilib gcc-multilib gdb less libtool \
+ lsof strace
+
+# If running a 32bit environment on a 64bit machine, install a few binaries
+# as 64bit. This is only done automatically if the chroot distro is the same as
+# the host, otherwise there might be incompatibilities in build settings or
+# runtime dependencies. The user can force it with the '-c' flag.
+host_distro=$(grep -s DISTRIB_CODENAME /etc/lsb-release | \
+ cut -d "=" -f 2)
+if [ "${copy_64}" = "y" -o \
+ "${host_distro}" = "${distname}" -a "${arch}" = 32bit ] && \
+ file /bin/bash 2>/dev/null | grep -q x86-64; then
+ readlinepkg=$(sudo "/usr/local/bin/${target%bit}" sh -c \
+ 'apt-cache search "lib64readline.\$" | sort | tail -n 1 | cut -d " " -f 1')
+ sudo "/usr/local/bin/${target%bit}" apt-get -y install \
+ lib64expat1 lib64ncurses5 ${readlinepkg} lib64z1 lib64stdc++6
+ dep=
+ for i in binutils gdb; do
+ [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
+ done
+ [ -n "$dep" ] && sudo apt-get -y install $dep
+ sudo mkdir -p "/var/lib/chroot/${target}/usr/local/lib/amd64"
+ for i in libbfd libpython; do
+ lib="$({ ldd /usr/bin/ld; ldd /usr/bin/gdb; } |
+ grep -s "$i" | awk '{ print $3 }')"
+ if [ -n "$lib" -a -r "$lib" ]; then
+ sudo cp "$lib" "/var/lib/chroot/${target}/usr/local/lib/amd64"
+ fi
+ done
+ for lib in libssl libcrypt; do
+ for path in /usr/lib /usr/lib/x86_64-linux-gnu; do
+ sudo cp $path/$lib* \
+ "/var/lib/chroot/${target}/usr/local/lib/amd64/" >&/dev/null || :
+ done
+ done
+ for i in gdb ld; do
+ sudo cp /usr/bin/$i "/var/lib/chroot/${target}/usr/local/lib/amd64/"
+ sudo sh -c "cat >'/var/lib/chroot/${target}/usr/local/bin/$i'" <<EOF
+#!/bin/sh
+exec /lib64/ld-linux-x86-64.so.2 --library-path /usr/local/lib/amd64 \
+ /usr/local/lib/amd64/$i "\$@"
+EOF
+ sudo chmod 755 "/var/lib/chroot/${target}/usr/local/bin/$i"
+ done
+fi
+
+
+# If the install-build-deps.sh script can be found, offer to run it now
+script="$(dirname $(readlink -f "$0"))/install-build-deps.sh"
+if [ -x "${script}" ]; then
+ while :; do
+ echo
+ echo "If you plan on building Chrome inside of the new chroot environment,"
+ echo "you now have to install the build dependencies. Do you want me to"
+ printf "start the script that does this for you (y/n)? "
+ read install_deps
+ case "${install_deps}" in
+ y|Y)
+ echo
+ # We prefer running the script in-place, but this might not be
+ # possible, if it lives on a network filesystem that denies
+ # access to root.
+ tmp_script=
+ if ! sudo /usr/local/bin/"${target%bit}" \
+ sh -c "[ -x '${script}' ]" >&/dev/null; then
+ tmp_script="/tmp/${script##*/}"
+ cp "${script}" "${tmp_script}"
+ fi
+ # Some distributions automatically start an instance of the system-
+ # wide dbus daemon, cron daemon or of the logging daemon, when
+ # installing the Chrome build depencies. This prevents the chroot
+ # session from being closed. So, we always try to shut down any running
+ # instance of dbus and rsyslog.
+ sudo /usr/local/bin/"${target%bit}" sh -c "${script};
+ rc=$?;
+ /etc/init.d/cron stop >/dev/null 2>&1 || :;
+ /etc/init.d/rsyslog stop >/dev/null 2>&1 || :;
+ /etc/init.d/dbus stop >/dev/null 2>&1 || :;
+ exit $rc"
+ rc=$?
+ [ -n "${tmp_script}" ] && rm -f "${tmp_script}"
+ [ $rc -ne 0 ] && exit $rc
+ break
+ ;;
+ n|N)
+ break
+ ;;
+ esac
+ done
+ echo
+fi
+
+# Check whether ~/chroot is on a (slow) network file system and offer to
+# relocate it. Also offer relocation, if the user appears to have multiple
+# spindles (as indicated by "${bind_mount}" being non-empty).
+# We only offer this option, if it doesn't look as if a chroot environment
+# is currently active. Otherwise, relocation is unlikely to work and it
+# can be difficult for the user to recover from the failed attempt to relocate
+# the ~/chroot directory.
+# We don't aim to solve this problem for every configuration,
+# but try to help with the common cases. For more advanced configuration
+# options, the user can always manually adjust things.
+mkdir -p "${HOME}/chroot/"
+if [ ! -h "${HOME}/chroot" ] &&
+ ! egrep -qs '^[^[:space:]]*/chroot' /etc/fstab &&
+ { [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] ||
+ is_network_drive "${HOME}/chroot"; } &&
+ ! egrep -qs '/var/lib/[^/]*chroot/.*/chroot' /proc/mounts; then
+ echo "${HOME}/chroot is currently located on the same device as your"
+ echo "home directory."
+ echo "This might not be what you want. Do you want me to move it somewhere"
+ echo "else?"
+ # If the computer has multiple spindles, many users configure all or part of
+ # the secondary hard disk to be writable by the primary user of this machine.
+ # Make some reasonable effort to detect this type of configuration and
+ # then offer a good location for where to put the ~/chroot directory.
+ suggest=
+ for i in $(echo "${bind_mounts}"|cut -d ' ' -f 1); do
+ if [ -d "$i" -a -w "$i" -a \( ! -a "$i/chroot" -o -w "$i/chroot/." \) ] &&
+ ! is_network_drive "$i"; then
+ suggest="$i"
+ else
+ for j in "$i/"*; do
+ if [ -d "$j" -a -w "$j" -a \
+ \( ! -a "$j/chroot" -o -w "$j/chroot/." \) ] &&
+ ! is_network_drive "$j"; then
+ suggest="$j"
+ else
+ for k in "$j/"*; do
+ if [ -d "$k" -a -w "$k" -a \
+ \( ! -a "$k/chroot" -o -w "$k/chroot/." \) ] &&
+ ! is_network_drive "$k"; then
+ suggest="$k"
+ break
+ fi
+ done
+ fi
+ [ -n "${suggest}" ] && break
+ done
+ fi
+ [ -n "${suggest}" ] && break
+ done
+ def_suggest="${HOME}"
+ if [ -n "${suggest}" ]; then
+ # For home directories that reside on network drives, make our suggestion
+ # the default option. For home directories that reside on a local drive,
+ # require that the user manually enters the new location.
+ if is_network_drive "${HOME}"; then
+ def_suggest="${suggest}"
+ else
+ echo "A good location would probably be in \"${suggest}\""
+ fi
+ fi
+ while :; do
+ printf "Physical location [${def_suggest}]: "
+ read dir
+ [ -z "${dir}" ] && dir="${def_suggest}"
+ [ "${dir%%/}" == "${HOME%%/}" ] && break
+ if ! [ -d "${dir}" -a -w "${dir}" ] ||
+ [ -a "${dir}/chroot" -a ! -w "${dir}/chroot/." ]; then
+ echo "Cannot write to ${dir}/chroot. Please try again"
+ else
+ mv "${HOME}/chroot" "${dir}/chroot"
+ ln -s "${dir}/chroot" "${HOME}/chroot"
+ for i in $(list_all_chroots); do
+ sudo "$i" mkdir -p "${dir}/chroot"
+ done
+ sudo sed -i "s,${HOME}/chroot,${dir}/chroot,g" /etc/schroot/mount-*
+ break
+ fi
+ done
+fi
+
+# Clean up package files
+sudo schroot -c "${target%bit}" -p -- apt-get clean
+sudo apt-get clean
+
+trap '' INT TERM QUIT HUP
+trap '' EXIT
+
+# Let the user know what we did
+cat <<EOF
+
+
+Successfully installed ${distname} ${arch}
+
+You can run programs inside of the chroot by invoking the
+"/usr/local/bin/${target%bit}" command.
+
+This command can be used with arguments, in order to just run a single
+program inside of the chroot environment (e.g. "${target%bit} make chrome")
+or without arguments, in order to run an interactive shell session inside
+of the chroot environment.
+
+If you need to run things as "root", you can use "sudo" (e.g. try
+"sudo ${target%bit} apt-get update").
+
+Your home directory is shared between the host and the chroot. But I
+configured "${HOME}/chroot" to be private to the chroot environment.
+You can use it for files that need to differ between environments. This
+would be a good place to store binaries that you have built from your
+source files.
+
+For Chrome, this probably means you want to make your "out" directory a
+symbolic link that points somewhere inside of "${HOME}/chroot".
+
+You still need to run "gclient runhooks" whenever you switch from building
+outside of the chroot to inside of the chroot. But you will find that you
+don't have to repeatedly erase and then completely rebuild all your object
+and binary files.
+
+EOF
diff --git a/deps/v8/build/internal/README.chromium b/deps/v8/build/internal/README.chromium
new file mode 100644
index 0000000000..4624830d21
--- /dev/null
+++ b/deps/v8/build/internal/README.chromium
@@ -0,0 +1,24 @@
+Internal property sheets:
+ essential.vsprops
+ Contains the common settings used throughout the projects. Is included by either ..\debug.vsprops or ..\release.vsprops, so in general, it is not included directly.
+
+ release_defaults.vsprops
+ Included by ..\release.vsprops. Its settings are overriden by release_impl$(CHROME_BUILD_TYPE).vsprops. Uses the default VS setting which is "Maximize Speed". Results in relatively fast build with reasonable optimization level but without whole program optimization to reduce build time.
+
+ release_impl.vsprops
+ Included by ..\release.vsprops by default when CHROME_BUILD_TYPE is undefined. Includes release_defaults.vsprops.
+
+ release_impl_checksenabled.vsprops
+ Included by ..\release.vsprops when CHROME_BUILD_TYPE=_checksenabled. Matches what release_defaults.vsprops does, but doesn't actually inherit from it as we couldn't quite get that working. The only difference is that _DEBUG is set instead of NDEBUG. Used for keeping debug checks enabled with a build that is fast enough to dogfood with.
+
+ release_impl_official.vsprops
+ Included by ..\release.vsprops when CHROME_BUILD_TYPE=_official. Includes release_defaults.vsprops. Enables Whole Program Optimizations (WPO), which doubles the build time. Results in much more optimized build. Uses "Full Optimization" and "Flavor small code".
+
+ release_impl_pgo_instrument.vsprops
+ Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_instrument. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) instrumentation (first pass). Uses "Full Optimization" and "Flavor small code".
+
+ release_impl_pgo_optimize.vsprops
+ Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_optimize. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) optimization (second pass). Uses "Full Optimization" and "Flavor small code".
+
+ release_impl_purify.vsprops
+ Included by ..\release.vsprops when CHROME_BUILD_TYPE=_purify. Includes release_defaults.vsprops. Disables optimizations. Used with Purify to test without debug tools and without optimization; i.e. NDEBUG is defined but the compiler doesn't optimize the binary.
diff --git a/deps/v8/build/ios/OWNERS b/deps/v8/build/ios/OWNERS
new file mode 100644
index 0000000000..40a68c74b1
--- /dev/null
+++ b/deps/v8/build/ios/OWNERS
@@ -0,0 +1 @@
+rohitrao@chromium.org
diff --git a/deps/v8/build/ios/chrome_ios.croc b/deps/v8/build/ios/chrome_ios.croc
new file mode 100644
index 0000000000..938a2e9d01
--- /dev/null
+++ b/deps/v8/build/ios/chrome_ios.croc
@@ -0,0 +1,71 @@
+# -*- python -*-
+# Crocodile config file for Chromium iOS.
+#
+# Note that Chromium iOS also uses the config file at src/build/common.croc.
+#
+# See src/tools/code_coverage/example.croc for more info on config files.
+
+{
+ # List of rules, applied in order
+ 'rules' : [
+ # Specify inclusions before exclusions, since rules are in order.
+
+ # Exclude everything to negate whatever is in src/build/common.croc
+ {
+ 'regexp' : '.*',
+ 'include' : 0,
+ },
+
+ # Include all directories (but not the files in the directories).
+ # This is a workaround for how croc.py walks the directory tree. See the
+ # TODO in the AddFiles method of src/tools/code_coverage/croc.py
+ {
+ 'regexp' : '.*/$',
+ 'include' : 1,
+ },
+
+ # Include any file with an 'ios' directory in the path.
+ {
+ 'regexp' : '.*/ios/.*',
+ 'include' : 1,
+ 'add_if_missing' : 1,
+ },
+
+ # Include any file that ends with _ios.
+ {
+ 'regexp' : '.*_ios\\.(c|cc|m|mm)$',
+ 'include' : 1,
+ 'add_if_missing' : 1,
+ },
+
+ # Include any file that ends with _ios_unittest (and label it a test).
+ {
+ 'regexp' : '.*_ios_unittest\\.(c|cc|m|mm)$',
+ 'include' : 1,
+ 'add_if_missing' : 1,
+ 'group' : 'test',
+ },
+
+ # Don't scan for executable lines in uninstrumented header files
+ {
+ 'regexp' : '.*\\.(h|hpp)$',
+ 'add_if_missing' : 0,
+ },
+
+ # Don't measure coverage of perftests.
+ {
+ 'regexp' : '.*perftest\\.(c|cc|m|mm)$',
+ 'include' : 0,
+ },
+
+ # Languages
+ {
+ 'regexp' : '.*\\.m$',
+ 'language' : 'ObjC',
+ },
+ {
+ 'regexp' : '.*\\.mm$',
+ 'language' : 'ObjC++',
+ },
+ ],
+}
diff --git a/deps/v8/build/ios/clean_env.py b/deps/v8/build/ios/clean_env.py
new file mode 100755
index 0000000000..bf56b2fe81
--- /dev/null
+++ b/deps/v8/build/ios/clean_env.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+def Main(argv):
+ """This is like 'env -i', but it uses a whitelist of env variables to allow
+ through to the command being run. It attempts to strip off Xcode-added
+ values from PATH.
+ """
+ # Note: An attempt was made to do something like: env -i bash -lc '[command]'
+ # but that fails to set the things set by login (USER, etc.), so instead
+ # the only approach that seems to work is to have a whitelist.
+ env_key_whitelist = (
+ 'HOME',
+ 'LOGNAME',
+ # 'PATH' added below (but filtered).
+ 'PWD',
+ 'SHELL',
+ 'TEMP',
+ 'TMPDIR',
+ 'USER'
+ )
+
+ # Need something to run.
+ # TODO(lliabraa): Make this output a usage string and exit (here and below).
+ assert(len(argv) > 0)
+
+ add_to_path = [];
+ first_entry = argv[0];
+ if first_entry.startswith('ADD_TO_PATH='):
+ argv = argv[1:];
+ add_to_path = first_entry.replace('ADD_TO_PATH=', '', 1).split(':')
+
+ # Still need something to run.
+ assert(len(argv) > 0)
+
+ clean_env = {}
+
+ # Pull over the whitelisted keys.
+ for key in env_key_whitelist:
+ val = os.environ.get(key, None)
+ if not val is None:
+ clean_env[key] = val
+
+ # Collect the developer dir as set via Xcode, defaulting it.
+ dev_prefix = os.environ.get('DEVELOPER_DIR', '/Developer/')
+ if dev_prefix[-1:] != '/':
+ dev_prefix += '/'
+
+ # Now pull in PATH, but remove anything Xcode might have added.
+ initial_path = os.environ.get('PATH', '')
+ filtered_chunks = \
+ [x for x in initial_path.split(':') if not x.startswith(dev_prefix)]
+ if filtered_chunks:
+ clean_env['PATH'] = ':'.join(add_to_path + filtered_chunks)
+
+ # Add any KEY=VALUE args before the command to the cleaned environment.
+ args = argv[:]
+ while '=' in args[0]:
+ (key, val) = args[0].split('=', 1)
+ clean_env[key] = val
+ args = args[1:]
+
+ # Still need something to run.
+ assert(len(args) > 0)
+
+ # Off it goes...
+ os.execvpe(args[0], args, clean_env)
+ # Should never get here, so return a distinctive, non-zero status code.
+ return 66
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv[1:]))
diff --git a/deps/v8/build/landmine_utils.py b/deps/v8/build/landmine_utils.py
new file mode 100644
index 0000000000..a3f21ff1b8
--- /dev/null
+++ b/deps/v8/build/landmine_utils.py
@@ -0,0 +1,33 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import sys
+
+
+def IsWindows():
+ return sys.platform in ['win32', 'cygwin']
+
+
+def IsLinux():
+ return sys.platform.startswith(('linux', 'freebsd', 'netbsd', 'openbsd'))
+
+
+def IsMac():
+ return sys.platform == 'darwin'
+
+
+def host_os():
+ """
+ Returns a string representing the host_os of the current system.
+ Possible values: 'win', 'mac', 'linux', 'unknown'.
+ """
+ if IsWindows():
+ return 'win'
+ elif IsLinux():
+ return 'linux'
+ elif IsMac():
+ return 'mac'
+ else:
+ return 'unknown'
diff --git a/deps/v8/build/landmines.py b/deps/v8/build/landmines.py
new file mode 100755
index 0000000000..d0f429809a
--- /dev/null
+++ b/deps/v8/build/landmines.py
@@ -0,0 +1,145 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This script runs every build as the first hook (See DEPS). If it detects that
+the build should be clobbered, it will delete the contents of the build
+directory.
+
+A landmine is tripped when a builder checks out a different revision, and the
+diff between the new landmines and the old ones is non-null. At this point, the
+build is clobbered.
+
+Before adding or changing a landmine consider the consequences of doing so.
+Doing so will wipe out every output directory on every Chrome developer's
+machine. This can be particularly problematic on Windows where the directory
+deletion may well fail (locked files, command prompt in the directory, etc.),
+and generated .sln and .vcxproj files will be deleted.
+
+This output directory deletion will be repated when going back and forth across
+the change that added the landmine, adding to the cost. There are usually less
+troublesome alternatives.
+"""
+
+import difflib
+import errno
+import logging
+import optparse
+import os
+import sys
+import subprocess
+import time
+
+import clobber
+import landmine_utils
+
+
+def get_build_dir(src_dir):
+ """
+ Returns output directory absolute path dependent on build and targets.
+ Examples:
+ r'c:\b\build\slave\win\build\src\out'
+ '/mnt/data/b/build/slave/linux/build/src/out'
+ '/b/build/slave/ios_rel_device/build/src/out'
+
+ Keep this function in sync with tools/build/scripts/slave/compile.py
+ """
+ if 'CHROMIUM_OUT_DIR' in os.environ:
+ output_dir = os.environ.get('CHROMIUM_OUT_DIR').strip()
+ if not output_dir:
+ raise Error('CHROMIUM_OUT_DIR environment variable is set but blank!')
+ else:
+ output_dir = 'out'
+ return os.path.abspath(os.path.join(src_dir, output_dir))
+
+
+def clobber_if_necessary(new_landmines, src_dir):
+ """Does the work of setting, planting, and triggering landmines."""
+ out_dir = get_build_dir(src_dir)
+ landmines_path = os.path.normpath(os.path.join(src_dir, '.landmines'))
+ try:
+ os.makedirs(out_dir)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ pass
+
+ if os.path.exists(landmines_path):
+ with open(landmines_path, 'r') as f:
+ old_landmines = f.readlines()
+ if old_landmines != new_landmines:
+ old_date = time.ctime(os.stat(landmines_path).st_ctime)
+ diff = difflib.unified_diff(old_landmines, new_landmines,
+ fromfile='old_landmines', tofile='new_landmines',
+ fromfiledate=old_date, tofiledate=time.ctime(), n=0)
+ sys.stdout.write('Clobbering due to:\n')
+ sys.stdout.writelines(diff)
+ sys.stdout.flush()
+
+ clobber.clobber(out_dir)
+
+ # Save current set of landmines for next time.
+ with open(landmines_path, 'w') as f:
+ f.writelines(new_landmines)
+
+
+def process_options():
+ """Returns an options object containing the configuration for this script."""
+ parser = optparse.OptionParser()
+ parser.add_option(
+ '-s', '--landmine-scripts', action='append',
+ help='Path to the script which emits landmines to stdout. The target '
+ 'is passed to this script via option -t. Note that an extra '
+ 'script can be specified via an env var EXTRA_LANDMINES_SCRIPT.')
+ parser.add_option('-d', '--src-dir',
+ help='Path of the source root dir. Overrides the default location of the '
+ 'source root dir when calculating the build directory.')
+ parser.add_option('-v', '--verbose', action='store_true',
+ default=('LANDMINES_VERBOSE' in os.environ),
+ help=('Emit some extra debugging information (default off). This option '
+ 'is also enabled by the presence of a LANDMINES_VERBOSE environment '
+ 'variable.'))
+
+ options, args = parser.parse_args()
+
+ if args:
+ parser.error('Unknown arguments %s' % args)
+
+ logging.basicConfig(
+ level=logging.DEBUG if options.verbose else logging.ERROR)
+
+ if options.src_dir:
+ if not os.path.isdir(options.src_dir):
+ parser.error('Cannot find source root dir at %s' % options.src_dir)
+ logging.debug('Overriding source root dir. Using: %s', options.src_dir)
+ else:
+ options.src_dir = \
+ os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+
+ if not options.landmine_scripts:
+ options.landmine_scripts = [os.path.join(options.src_dir, 'build',
+ 'get_landmines.py')]
+
+ extra_script = os.environ.get('EXTRA_LANDMINES_SCRIPT')
+ if extra_script:
+ options.landmine_scripts += [extra_script]
+
+ return options
+
+
+def main():
+ options = process_options()
+
+ landmines = []
+ for s in options.landmine_scripts:
+ proc = subprocess.Popen([sys.executable, s], stdout=subprocess.PIPE)
+ output, _ = proc.communicate()
+ landmines.extend([('%s\n' % l.strip()) for l in output.splitlines()])
+ clobber_if_necessary(landmines, options.src_dir)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/linux/BUILD.gn b/deps/v8/build/linux/BUILD.gn
new file mode 100644
index 0000000000..54314c7687
--- /dev/null
+++ b/deps/v8/build/linux/BUILD.gn
@@ -0,0 +1,31 @@
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/features.gni")
+import("//build/config/freetype/freetype.gni")
+import("//build/config/linux/pkg_config.gni")
+
+if (use_gio) {
+ pkg_config("gio_config") {
+ packages = [ "gio-2.0" ]
+
+ defines = [ "USE_GIO" ]
+ }
+}
+
+# Looking for libspeechd? Use //third_party/speech-dispatcher
+
+if (use_system_freetype) {
+ assert(!is_chromecast)
+
+ # Only provided for distributions which prefer to keep linking to FreeType on
+ # the system, use with caution,for details see build/config/freetype/BUILD.gn.
+ pkg_config("freetype_from_pkgconfig") {
+ visibility = [
+ "//third_party:freetype_harfbuzz",
+ "//third_party/harfbuzz-ng:harfbuzz_source",
+ ]
+ packages = [ "freetype2" ]
+ }
+}
diff --git a/deps/v8/build/linux/OWNERS b/deps/v8/build/linux/OWNERS
new file mode 100644
index 0000000000..8e1cb55729
--- /dev/null
+++ b/deps/v8/build/linux/OWNERS
@@ -0,0 +1,3 @@
+mmoss@chromium.org
+thestig@chromium.org
+thomasanderson@chromium.org
diff --git a/deps/v8/build/linux/chrome_linux.croc b/deps/v8/build/linux/chrome_linux.croc
new file mode 100644
index 0000000000..f4003060f6
--- /dev/null
+++ b/deps/v8/build/linux/chrome_linux.croc
@@ -0,0 +1,29 @@
+# -*- python -*-
+# Crocodile config file for Chromium linux
+
+# TODO(jhawkins): We'll need to add a chromeos.croc once we get a coverage bot
+# for that platform.
+
+{
+ # List of rules, applied in order
+ 'rules' : [
+ # Specify inclusions before exclusions, since rules are in order.
+
+ # Don't include non-Linux platform dirs
+ {
+ 'regexp' : '.*/(chromeos|views)/',
+ 'include' : 0,
+ },
+ # Don't include chromeos, windows, or mac specific files
+ {
+ 'regexp' : '.*(_|/)(chromeos|mac|win|views)(\\.|_)',
+ 'include' : 0,
+ },
+
+ # Groups
+ {
+ 'regexp' : '.*_test_linux\\.',
+ 'group' : 'test',
+ },
+ ],
+}
diff --git a/deps/v8/build/linux/dump_app_syms.py b/deps/v8/build/linux/dump_app_syms.py
new file mode 100644
index 0000000000..12e693ef18
--- /dev/null
+++ b/deps/v8/build/linux/dump_app_syms.py
@@ -0,0 +1,29 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Helper script to run dump_syms on Chrome Linux executables and strip
+# them if needed.
+
+import os
+import subprocess
+import sys
+
+if len(sys.argv) != 5:
+ print "dump_app_syms.py <dump_syms_exe> <strip_binary>"
+ print " <binary_with_symbols> <symbols_output>"
+ sys.exit(1)
+
+dumpsyms = sys.argv[1]
+strip_binary = sys.argv[2]
+infile = sys.argv[3]
+outfile = sys.argv[4]
+
+# Dump only when the output file is out-of-date.
+if not os.path.isfile(outfile) or \
+ os.stat(outfile).st_mtime < os.stat(infile).st_mtime:
+ with open(outfile, 'w') as outfileobj:
+ subprocess.check_call([dumpsyms, infile], stdout=outfileobj)
+
+if strip_binary != '0':
+ subprocess.check_call(['strip', infile])
diff --git a/deps/v8/build/linux/extract_symbols.gni b/deps/v8/build/linux/extract_symbols.gni
new file mode 100644
index 0000000000..1cc315849a
--- /dev/null
+++ b/deps/v8/build/linux/extract_symbols.gni
@@ -0,0 +1,42 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/toolchain.gni")
+
+# Extracts symbols from a binary into a symbol file using dump_app_syms.py.
+#
+# Args:
+# binary: Path to the binary containing symbols to extract, e.g.:
+# "$root_out_dir/chrome"
+# symbol_file: Desired output file for symbols, e.g.:
+# "$root_out_dir/chrome.breakpad.$current_cpu"
+template("extract_symbols") {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "testonly",
+ ])
+ action("${target_name}") {
+ dump_syms_label = "//third_party/breakpad:dump_syms($host_toolchain)"
+ dump_syms_binary =
+ get_label_info(dump_syms_label, "root_out_dir") + "/" + "dump_syms"
+
+ script = "//build/linux/dump_app_syms.py"
+ inputs = [
+ invoker.binary,
+ dump_syms_binary,
+ ]
+ outputs = [
+ invoker.symbol_file,
+ ]
+ args = [
+ "./" + rebase_path(dump_syms_binary, root_build_dir),
+ "0", # strip_binary = false
+ rebase_path(invoker.binary, root_build_dir),
+ rebase_path(invoker.symbol_file, root_build_dir),
+ ]
+
+ deps += [ dump_syms_label ]
+ }
+}
diff --git a/deps/v8/build/linux/install-chromeos-fonts.py b/deps/v8/build/linux/install-chromeos-fonts.py
new file mode 100755
index 0000000000..54d0ede235
--- /dev/null
+++ b/deps/v8/build/linux/install-chromeos-fonts.py
@@ -0,0 +1,118 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install the Chrome OS fonts on Linux.
+# This script can be run manually (as root), but is also run as part
+# install-build-deps.sh.
+
+import os
+import shutil
+import subprocess
+import sys
+
+URL_TEMPLATE = ('https://commondatastorage.googleapis.com/chromeos-localmirror/'
+ 'distfiles/%(name)s-%(version)s.tar.bz2')
+
+# Taken from the media-fonts/<name> ebuilds in chromiumos-overlay.
+# noto-cjk used to be here, but is removed because fc-cache takes too long
+# regenerating the fontconfig cache (See crbug.com/697954.)
+# TODO(jshin): Add it back when the above issue can be avoided.
+SOURCES = [
+ {
+ 'name': 'notofonts',
+ 'version': '20161129'
+ }, {
+ 'name': 'robotofonts',
+ 'version': '2.132'
+ }
+]
+
+URLS = sorted([URL_TEMPLATE % d for d in SOURCES])
+FONTS_DIR = '/usr/local/share/fonts'
+
+def main(args):
+ if not sys.platform.startswith('linux'):
+ print "Error: %s must be run on Linux." % __file__
+ return 1
+
+ if os.getuid() != 0:
+ print "Error: %s must be run as root." % __file__
+ return 1
+
+ if not os.path.isdir(FONTS_DIR):
+ print "Error: Destination directory does not exist: %s" % FONTS_DIR
+ return 1
+
+ dest_dir = os.path.join(FONTS_DIR, 'chromeos')
+
+ stamp = os.path.join(dest_dir, ".stamp02")
+ if os.path.exists(stamp):
+ with open(stamp) as s:
+ if s.read() == '\n'.join(URLS):
+ print "Chrome OS fonts already up to date in %s." % dest_dir
+ return 0
+
+ if os.path.isdir(dest_dir):
+ shutil.rmtree(dest_dir)
+ os.mkdir(dest_dir)
+ os.chmod(dest_dir, 0755)
+
+ print "Installing Chrome OS fonts to %s." % dest_dir
+ for url in URLS:
+ tarball = os.path.join(dest_dir, os.path.basename(url))
+ subprocess.check_call(['curl', '-L', url, '-o', tarball])
+ subprocess.check_call(['tar', '--no-same-owner', '--no-same-permissions',
+ '-xf', tarball, '-C', dest_dir])
+ os.remove(tarball)
+
+ readme = os.path.join(dest_dir, "README")
+ with open(readme, 'w') as s:
+ s.write("This directory and its contents are auto-generated.\n")
+ s.write("It may be deleted and recreated. Do not modify.\n")
+ s.write("Script: %s\n" % __file__)
+
+ with open(stamp, 'w') as s:
+ s.write('\n'.join(URLS))
+
+ for base, dirs, files in os.walk(dest_dir):
+ for dir in dirs:
+ os.chmod(os.path.join(base, dir), 0755)
+ for file in files:
+ os.chmod(os.path.join(base, file), 0644)
+
+ print """\
+
+Chrome OS font rendering settings are specified using Fontconfig. If your
+system's configuration doesn't match Chrome OS's (which vary for different
+devices), fonts may be rendered with different subpixel rendering, subpixel
+positioning, or hinting settings. This may affect font metrics.
+
+Chrome OS's settings are stored in the media-libs/fontconfig package, which is
+at src/third_party/chromiumos-overlay/media-libs/fontconfig in a Chrome OS
+checkout. You can configure your system to match Chrome OS's defaults by
+creating or editing a ~/.fonts.conf file:
+
+<?xml version="1.0"?>
+<!DOCTYPE fontconfig SYSTEM "fonts.dtd">
+<fontconfig>
+ <match target="font">
+ <edit name="antialias" mode="assign"><bool>true</bool></edit>
+ <edit name="autohint" mode="assign"><bool>true</bool></edit>
+ <edit name="hinting" mode="assign"><bool>true</bool></edit>
+ <edit name="hintstyle" mode="assign"><const>hintslight</const></edit>
+ <edit name="rgba" mode="assign"><const>rgb</const></edit>
+ </match>
+</fontconfig>
+
+To load additional per-font configs (and assuming you have Chrome OS checked
+out), add the following immediately before the "</fontconfig>" line:
+
+ <include ignore_missing="yes">/path/to/src/third_party/chromiumos-overlay/media-libs/fontconfig/files/local.conf</include>
+"""
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/linux/libbrlapi/BUILD.gn b/deps/v8/build/linux/libbrlapi/BUILD.gn
new file mode 100644
index 0000000000..4ee3950455
--- /dev/null
+++ b/deps/v8/build/linux/libbrlapi/BUILD.gn
@@ -0,0 +1,26 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//tools/generate_library_loader/generate_library_loader.gni")
+
+generate_library_loader("libbrlapi") {
+ name = "LibBrlapiLoader"
+ output_h = "libbrlapi.h"
+ output_cc = "libbrlapi_loader.cc"
+ header = "<brlapi.h>"
+
+ functions = [
+ "brlapi_getHandleSize",
+ "brlapi_error_location",
+ "brlapi_strerror",
+ "brlapi__acceptKeys",
+ "brlapi__openConnection",
+ "brlapi__closeConnection",
+ "brlapi__getDisplaySize",
+ "brlapi__enterTtyModeWithPath",
+ "brlapi__leaveTtyMode",
+ "brlapi__writeDots",
+ "brlapi__readKey",
+ ]
+}
diff --git a/deps/v8/build/linux/libpci/BUILD.gn b/deps/v8/build/linux/libpci/BUILD.gn
new file mode 100644
index 0000000000..2d1e267e12
--- /dev/null
+++ b/deps/v8/build/linux/libpci/BUILD.gn
@@ -0,0 +1,22 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//tools/generate_library_loader/generate_library_loader.gni")
+
+# This generates a target named "libpci".
+generate_library_loader("libpci") {
+ name = "LibPciLoader"
+ output_h = "libpci.h"
+ output_cc = "libpci_loader.cc"
+ header = "<pci/pci.h>"
+
+ functions = [
+ "pci_alloc",
+ "pci_init",
+ "pci_cleanup",
+ "pci_scan_bus",
+ "pci_fill_info",
+ "pci_lookup_name",
+ ]
+}
diff --git a/deps/v8/build/linux/libudev/BUILD.gn b/deps/v8/build/linux/libudev/BUILD.gn
new file mode 100644
index 0000000000..9486a03292
--- /dev/null
+++ b/deps/v8/build/linux/libudev/BUILD.gn
@@ -0,0 +1,65 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//tools/generate_library_loader/generate_library_loader.gni")
+
+libudev_functions = [
+ "udev_device_get_action",
+ "udev_device_get_devnode",
+ "udev_device_get_parent",
+ "udev_device_get_parent_with_subsystem_devtype",
+ "udev_device_get_property_value",
+ "udev_device_get_subsystem",
+ "udev_device_get_sysattr_value",
+ "udev_device_get_sysname",
+ "udev_device_get_syspath",
+ "udev_device_new_from_devnum",
+ "udev_device_new_from_subsystem_sysname",
+ "udev_device_new_from_syspath",
+ "udev_device_unref",
+ "udev_enumerate_add_match_subsystem",
+ "udev_enumerate_get_list_entry",
+ "udev_enumerate_new",
+ "udev_enumerate_scan_devices",
+ "udev_enumerate_unref",
+ "udev_list_entry_get_next",
+ "udev_list_entry_get_name",
+ "udev_monitor_enable_receiving",
+ "udev_monitor_filter_add_match_subsystem_devtype",
+ "udev_monitor_get_fd",
+ "udev_monitor_new_from_netlink",
+ "udev_monitor_receive_device",
+ "udev_monitor_unref",
+ "udev_new",
+ "udev_set_log_fn",
+ "udev_set_log_priority",
+ "udev_unref",
+]
+
+# This generates a target named "udev0_loader".
+generate_library_loader("udev0_loader") {
+ name = "LibUdev0Loader"
+ output_h = "libudev0.h"
+ output_cc = "libudev0_loader.cc"
+ header = "\"third_party/libudev/libudev0.h\""
+
+ functions = libudev_functions
+}
+
+# This generates a target named "udev1_loader".
+generate_library_loader("udev1_loader") {
+ name = "LibUdev1Loader"
+ output_h = "libudev1.h"
+ output_cc = "libudev1_loader.cc"
+ header = "\"third_party/libudev/libudev1.h\""
+
+ functions = libudev_functions
+}
+
+group("libudev") {
+ public_deps = [
+ ":udev0_loader",
+ ":udev1_loader",
+ ]
+}
diff --git a/deps/v8/build/linux/pkg-config-wrapper b/deps/v8/build/linux/pkg-config-wrapper
new file mode 100755
index 0000000000..c4935d7b59
--- /dev/null
+++ b/deps/v8/build/linux/pkg-config-wrapper
@@ -0,0 +1,46 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This program wraps around pkg-config to generate the correct include and
+# library paths when cross-compiling using a sysroot.
+# The assumption is that the sysroot contains the .pc files in usr/lib/pkgconfig
+# and usr/share/pkgconfig (relative to the sysroot) and that they output paths
+# relative to some parent path of the sysroot.
+# This assumption is valid for a range of sysroots, in particular: a
+# LSB-compliant root filesystem mounted at the sysroot, and a board build
+# directory of a Chromium OS chroot.
+
+set -o nounset
+set -o errexit
+
+root="$1"
+shift
+target_arch="$1"
+shift
+libpath="$1"
+shift
+
+if [ -z "$root" -o -z "$target_arch" ]
+then
+ echo "usage: $0 /path/to/sysroot target_arch libdir [pkg-config-arguments] package" >&2
+ exit 1
+fi
+
+rewrite=`dirname $0`/rewrite_dirs.py
+package=${!#}
+
+libdir=$root/usr/$libpath/pkgconfig:$root/usr/share/pkgconfig
+
+set -e
+# Some sysroots, like the Chromium OS ones, may generate paths that are not
+# relative to the sysroot. For example,
+# /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all paths
+# relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr) instead of
+# relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
+# To support this correctly, it's necessary to extract the prefix to strip from
+# pkg-config's |prefix| variable.
+prefix=`PKG_CONFIG_LIBDIR=$libdir pkg-config --variable=prefix "$package" | sed -e 's|/usr$||'`
+result=`PKG_CONFIG_LIBDIR=$libdir pkg-config "$@"`
+echo "$result"| $rewrite --sysroot "$root" --strip-prefix "$prefix"
diff --git a/deps/v8/build/linux/rewrite_dirs.py b/deps/v8/build/linux/rewrite_dirs.py
new file mode 100755
index 0000000000..30f22f0cd6
--- /dev/null
+++ b/deps/v8/build/linux/rewrite_dirs.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Rewrites paths in -I, -L and other option to be relative to a sysroot."""
+
+import sys
+import os
+import optparse
+
+REWRITE_PREFIX = ['-I',
+ '-idirafter',
+ '-imacros',
+ '-imultilib',
+ '-include',
+ '-iprefix',
+ '-iquote',
+ '-isystem',
+ '-L']
+
+def RewritePath(path, opts):
+ """Rewrites a path by stripping the prefix and prepending the sysroot."""
+ sysroot = opts.sysroot
+ prefix = opts.strip_prefix
+ if os.path.isabs(path) and not path.startswith(sysroot):
+ if path.startswith(prefix):
+ path = path[len(prefix):]
+ path = path.lstrip('/')
+ return os.path.join(sysroot, path)
+ else:
+ return path
+
+
+def RewriteLine(line, opts):
+ """Rewrites all the paths in recognized options."""
+ args = line.split()
+ count = len(args)
+ i = 0
+ while i < count:
+ for prefix in REWRITE_PREFIX:
+ # The option can be either in the form "-I /path/to/dir" or
+ # "-I/path/to/dir" so handle both.
+ if args[i] == prefix:
+ i += 1
+ try:
+ args[i] = RewritePath(args[i], opts)
+ except IndexError:
+ sys.stderr.write('Missing argument following %s\n' % prefix)
+ break
+ elif args[i].startswith(prefix):
+ args[i] = prefix + RewritePath(args[i][len(prefix):], opts)
+ i += 1
+
+ return ' '.join(args)
+
+
+def main(argv):
+ parser = optparse.OptionParser()
+ parser.add_option('-s', '--sysroot', default='/', help='sysroot to prepend')
+ parser.add_option('-p', '--strip-prefix', default='', help='prefix to strip')
+ opts, args = parser.parse_args(argv[1:])
+
+ for line in sys.stdin.readlines():
+ line = RewriteLine(line.strip(), opts)
+ print line
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/deps/v8/build/linux/sysroot_ld_path.sh b/deps/v8/build/linux/sysroot_ld_path.sh
new file mode 100755
index 0000000000..623d47b861
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_ld_path.sh
@@ -0,0 +1,99 @@
+#!/bin/sh
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Reads etc/ld.so.conf and/or etc/ld.so.conf.d/*.conf and returns the
+# appropriate linker flags.
+#
+# sysroot_ld_path.sh /abspath/to/sysroot
+#
+
+log_error_and_exit() {
+ echo $0: $@
+ exit 1
+}
+
+process_entry() {
+ if [ -z "$1" ] || [ -z "$2" ]; then
+ log_error_and_exit "bad arguments to process_entry()"
+ fi
+ local root="$1"
+ local localpath="$2"
+
+ echo $localpath | grep -qs '^/'
+ if [ $? -ne 0 ]; then
+ log_error_and_exit $localpath does not start with /
+ fi
+ local entry="$root$localpath"
+ echo $entry
+}
+
+process_ld_so_conf() {
+ if [ -z "$1" ] || [ -z "$2" ]; then
+ log_error_and_exit "bad arguments to process_ld_so_conf()"
+ fi
+ local root="$1"
+ local ld_so_conf="$2"
+
+ # ld.so.conf may include relative include paths. pushd is a bashism.
+ local saved_pwd=$(pwd)
+ cd $(dirname "$ld_so_conf")
+
+ cat "$ld_so_conf" | \
+ while read ENTRY; do
+ echo "$ENTRY" | grep -qs ^include
+ if [ $? -eq 0 ]; then
+ local included_files=$(echo "$ENTRY" | sed 's/^include //')
+ echo "$included_files" | grep -qs ^/
+ if [ $? -eq 0 ]; then
+ if ls $root$included_files >/dev/null 2>&1 ; then
+ for inc_file in $root$included_files; do
+ process_ld_so_conf "$root" "$inc_file"
+ done
+ fi
+ else
+ if ls $(pwd)/$included_files >/dev/null 2>&1 ; then
+ for inc_file in $(pwd)/$included_files; do
+ process_ld_so_conf "$root" "$inc_file"
+ done
+ fi
+ fi
+ continue
+ fi
+
+ echo "$ENTRY" | grep -qs ^/
+ if [ $? -eq 0 ]; then
+ process_entry "$root" "$ENTRY"
+ fi
+ done
+
+ # popd is a bashism
+ cd "$saved_pwd"
+}
+
+# Main
+
+if [ $# -ne 1 ]; then
+ echo Usage $0 /abspath/to/sysroot
+ exit 1
+fi
+
+echo $1 | grep -qs ' '
+if [ $? -eq 0 ]; then
+ log_error_and_exit $1 contains whitespace.
+fi
+
+LD_SO_CONF="$1/etc/ld.so.conf"
+LD_SO_CONF_D="$1/etc/ld.so.conf.d"
+
+if [ -e "$LD_SO_CONF" ]; then
+ process_ld_so_conf "$1" "$LD_SO_CONF" | xargs echo
+elif [ -e "$LD_SO_CONF_D" ]; then
+ find "$LD_SO_CONF_D" -maxdepth 1 -name '*.conf' -print -quit > /dev/null
+ if [ $? -eq 0 ]; then
+ for entry in $LD_SO_CONF_D/*.conf; do
+ process_ld_so_conf "$1" "$entry"
+ done | xargs echo
+ fi
+fi
diff --git a/deps/v8/build/linux/sysroot_scripts/build_and_upload.py b/deps/v8/build/linux/sysroot_scripts/build_and_upload.py
new file mode 100755
index 0000000000..6a90fdcf2f
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/build_and_upload.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Automates running BuildPackageLists, BuildSysroot, and
+UploadSysroot for each supported arch of each sysroot creator.
+"""
+
+import glob
+import hashlib
+import json
+import multiprocessing
+import os
+import re
+import string
+import subprocess
+import sys
+
+
+def run_script(args):
+ fnull = open(os.devnull, 'w')
+ subprocess.check_call(args, stdout=fnull, stderr=fnull)
+
+
+def sha1sumfile(filename):
+ sha1 = hashlib.sha1()
+ with open(filename, 'rb') as f:
+ while True:
+ data = f.read(65536)
+ if not data:
+ break
+ sha1.update(data)
+ return sha1.hexdigest()
+
+
+def get_proc_output(args):
+ return subprocess.check_output(args).strip()
+
+
+def build_and_upload(script_path, distro, release, arch, lock):
+ script_dir = os.path.dirname(os.path.realpath(__file__))
+
+ run_script([script_path, 'BuildSysroot' + arch])
+ run_script([script_path, 'UploadSysroot' + arch])
+
+ tarball = '%s_%s_%s_sysroot.tar.xz' % (distro, release, arch.lower())
+ tarxz_path = os.path.join(script_dir, "..", "..", "..", "out",
+ "sysroot-build", release, tarball)
+ sha1sum = sha1sumfile(tarxz_path)
+ sysroot_dir = '%s_%s_%s-sysroot' % (distro, release, arch.lower())
+
+ sysroot_metadata = {
+ 'Tarball': tarball,
+ 'Sha1Sum': sha1sum,
+ 'SysrootDir': sysroot_dir,
+ }
+ with lock:
+ with open(os.path.join(script_dir, 'sysroots.json'), 'rw+') as f:
+ sysroots = json.load(f)
+ sysroots["%s_%s" % (release, arch.lower())] = sysroot_metadata
+ f.seek(0)
+ f.truncate()
+ f.write(
+ json.dumps(
+ sysroots, sort_keys=True, indent=4, separators=(',', ': ')))
+ f.write('\n')
+
+
+def main():
+ script_dir = os.path.dirname(os.path.realpath(__file__))
+ subprocess.check_call(
+ [os.path.join(script_dir, 'update-archive-timestamp.sh')])
+ procs = []
+ lock = multiprocessing.Lock()
+ for filename in glob.glob(os.path.join(script_dir, 'sysroot-creator-*.sh')):
+ script_path = os.path.join(script_dir, filename)
+ distro = get_proc_output([script_path, 'PrintDistro'])
+ release = get_proc_output([script_path, 'PrintRelease'])
+ architectures = get_proc_output([script_path, 'PrintArchitectures'])
+ for arch in architectures.split('\n'):
+ proc = multiprocessing.Process(
+ target=build_and_upload,
+ args=(script_path, distro, release, arch, lock))
+ procs.append(("%s %s (%s)" % (distro, release, arch), proc))
+ proc.start()
+ for _, proc in procs:
+ proc.join()
+
+ print "SYSROOT CREATION SUMMARY"
+ failures = 0
+ for name, proc in procs:
+ if proc.exitcode:
+ failures += 1
+ status = "FAILURE" if proc.exitcode else "SUCCESS"
+ print "%s sysroot creation\t%s" % (name, status)
+ return failures
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/linux/sysroot_scripts/debian-archive-sid-stable.gpg b/deps/v8/build/linux/sysroot_scripts/debian-archive-sid-stable.gpg
new file mode 100644
index 0000000000..a282485bc6
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/debian-archive-sid-stable.gpg
Binary files differ
diff --git a/deps/v8/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py b/deps/v8/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py
new file mode 100755
index 0000000000..426e17ff5d
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Find incompatible symbols in glibc and output a list of replacements.
+"""
+
+import re
+import sys
+
+# This constant comes from https://crbug.com/580892
+MAX_ALLOWED_GLIBC_VERSION = [2, 17]
+
+
+def get_replacements(nm_file, max_allowed_glibc_version):
+ symbol_format = re.compile('\S+ \S+ ([^@]+)@@?(\S+)\n')
+ version_format = re.compile('GLIBC_[0-9\.]+')
+ symbols = {}
+ for line in nm_file:
+ m = re.match(symbol_format, line)
+ symbol = m.group(1)
+ version = m.group(2)
+ if not re.match(version_format, version):
+ continue
+ if symbol in symbols:
+ symbols[symbol].add(version)
+ else:
+ symbols[symbol] = set([version])
+
+ replacements = []
+ for symbol, versions in symbols.iteritems():
+ if len(versions) <= 1:
+ continue
+ versions_parsed = [[
+ int(part) for part in version.lstrip('GLIBC_').split('.')
+ ] for version in versions]
+ if (max(versions_parsed) > max_allowed_glibc_version and
+ min(versions_parsed) <= max_allowed_glibc_version):
+ # Use the newest allowed version of the symbol.
+ replacement_version_parsed = max([
+ version for version in versions_parsed
+ if version <= max_allowed_glibc_version
+ ])
+ replacement_version = 'GLIBC_' + '.'.join(
+ [str(part) for part in replacement_version_parsed])
+ replacements.append('__asm__(".symver %s, %s@%s");' %
+ (symbol, symbol, replacement_version))
+ return sorted(replacements)
+
+
+if __name__ == '__main__':
+ replacements = get_replacements(sys.stdin, MAX_ALLOWED_GLIBC_VERSION)
+ if replacements:
+ print('// Chromium-specific hack.')
+ print('// See explanation in sysroot-creator.sh.')
+ for replacement in replacements:
+ print replacement
diff --git a/deps/v8/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py b/deps/v8/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py
new file mode 100755
index 0000000000..5af3eb2c25
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import cStringIO
+import find_incompatible_glibc_symbols
+
+NM_DATA = """\
+0000000000000001 W expf@GLIBC_2.2.5
+0000000000000002 W expf@@GLIBC_2.27
+0000000000000003 W foo@@GLIBC_2.2.5
+0000000000000004 W bar@GLIBC_2.2.5
+0000000000000005 W baz@GLIBC_2.2.5
+0000000000000006 T foo2@GLIBC_2.2
+0000000000000007 T foo2@GLIBC_2.3
+0000000000000008 T foo2@GLIBC_2.30
+0000000000000009 T foo2@@GLIBC_2.31
+000000000000000a T bar2@GLIBC_2.30
+000000000000000b T bar2@@GLIBC_2.31
+000000000000000c T baz2@GLIBC_2.2
+000000000000000d T baz2@@GLIBC_2.3
+"""
+
+EXPECTED_REPLACEMENTS = [
+ '__asm__(".symver expf, expf@GLIBC_2.2.5");',
+ '__asm__(".symver foo2, foo2@GLIBC_2.3");',
+]
+
+nm_file = cStringIO.StringIO()
+nm_file.write(NM_DATA)
+nm_file.seek(0)
+
+assert (
+ EXPECTED_REPLACEMENTS == find_incompatible_glibc_symbols.get_replacements(
+ nm_file, [2, 17]))
diff --git a/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.amd64 b/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.amd64
new file mode 100644
index 0000000000..5b11066eea
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.amd64
@@ -0,0 +1,329 @@
+http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2_1.1.6-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2-dev_1.1.6-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-0_2.30.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-dev_2.30.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.30.0-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.30.0-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-0_2.30.0-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-dev_2.30.0-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/a/attr/libattr1_2.4.47-2+b2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/a/audit/libaudit1_2.8.4-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-client3_0.7-4_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-common3_0.7-4_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth3_5.50-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth-dev_5.50-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi0.6_5.6-5_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi-dev_5.6-5_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2_1.15.12-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2-dev_1.15.12-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-gobject2_1.15.12-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-script-interpreter2_1.15.12-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/c/colord/libcolord2_1.4.3-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2_2.2.8-5_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2-dev_2.2.8-5_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2_2.2.8-5_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2-dev_2.2.8-5_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-3_1.12.10-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-dev_1.12.10-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/comerr-dev_2.1-1.44.4-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/libcom-err2_1.44.4-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf1_0.170-0.5_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf-dev_0.170-0.5_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1_2.2.6-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1-dev_2.2.6-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/f/flac/libflac8_1.3.2-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/f/flac/libflac-dev_1.3.2-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1_2.13.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1-dev_2.13.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6_2.8.1-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6-dev_2.8.1-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi0_1.0.5-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi-dev_1.0.5-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libasan3_6.4.0-22_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libgcc-6-dev_6.4.0-22_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libstdc++-6-dev_6.4.0-22_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-7/libcilkrts5_7.3.0-29_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-7/libubsan0_7.3.0-29_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libatomic1_8.2.0-7_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgcc1_8.2.0-7_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgomp1_8.2.0-7_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libitm1_8.2.0-7_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/liblsan0_8.2.0-7_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libmpx2_8.2.0-7_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libquadmath0_8.2.0-7_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libstdc++6_8.2.0-7_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libtsan0_8.2.0-7_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.38.0+dfsg-6_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.38.0+dfsg-6_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-0_2.58.1-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-dev_2.58.1-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6_2.27-6_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6-dev_2.27-6_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gmp/libgmp10_6.1.2+dfsg-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls28-dev_3.5.19-1+b1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls30_3.5.19-1+b1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-dane0_3.5.19-1+b1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-openssl27_3.5.19-1+b1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutlsxx28_3.5.19-1+b1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphene/libgraphene-1.0-0_1.8.2-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphene/libgraphene-1.0-dev_1.8.2-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-3_1.3.12-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-dev_1.3.12-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.32-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-0_3.24.1-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.1-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+4.0/libgtk-4-0_3.91.2-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+4.0/libgtk-4-dev_3.91.2-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz0b_1.9.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-dev_1.9.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-gobject0_1.9.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-icu0_1.9.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git161113-5_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu/libicu57_57.1-9_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu/libicu60_60.2-6_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/j/json-glib/libjson-glib-1.0-0_1.4.2-4_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/k/keyutils/libkeyutils1_1.5.9-9.3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/krb5-multidev_1.16.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssapi-krb5-2_1.16.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssrpc4_1.16.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libk5crypto3_1.16.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5clnt-mit11_1.16.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5srv-mit11_1.16.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkdb5-9_1.16.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-3_1.16.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-dev_1.16.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5support0_1.16.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator1_0.4.92-7_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-7_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-7_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-7_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libasyncns/libasyncns0_0.8-6_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libb/libbsd/libbsd0_0.9.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap2_2.25-1.2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap-dev_2.25-1.2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdatrie/libdatrie1_0.2.12-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr490+repack1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr490+repack1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr490+repack1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr490+repack1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm2_2.4.94-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.94-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-dev_2.4.94-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-intel1_2.4.94-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-nouveau2_2.4.94-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-radeon1_2.4.94-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy0_1.5.2-0.3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy-dev_1.5.2-0.3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libevent/libevent-2.1-6_2.1.8-stable-4_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi6_3.2.1-8_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi7_3.3~rc0-7_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi-dev_3.2.1-8_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.3-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.3-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libegl1_1.1.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgl1_1.1.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles1_1.1.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles2_1.1.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd0_1.1.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd-dev_1.1.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglx0_1.1.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libopengl0_1.1.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error0_1.32-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error-dev_1.32-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libice/libice6_1.0.9-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidl/libidl-2-0_0.8.14-4_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidn2/libidn2-0_2.0.5-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidn/libidn11_1.33-2.2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator7_0.5.0-4_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_1.5.2-2+b1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_1.5.2-2+b1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp1_1.7.4-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.7.4-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b5_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg0_1.3.2-1+b1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg-dev_1.3.2-1+b1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpciaccess/libpciaccess0_0.14-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng16-16_1.6.34-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng-dev_1.6.34-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpsl/libpsl5_0.20.2-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libr/librest/librest-0.7-0_0.8.0-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libselinux/libselinux1_2.8-1+b1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsm/libsm6_1.2.2-1+b3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsndfile/libsndfile1_1.0.28-4_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup2.4-1_2.64.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.64.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.13-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libthai/libthai0_0.1.28-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libtool/libltdl7_2.4.6-4_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libu/libunistring/libunistring2_0.9.10-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva2_2.3.0-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-dev_2.3.0-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-drm2_2.3.0-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-glx2_2.3.0-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-wayland2_2.3.0-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-x11-2_2.3.0-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbis0a_1.3.6-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbisenc2_1.3.6-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx5_1.7.0-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx-dev_1.7.0-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp6_0.6.1-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp-dev_0.6.1-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpmux3_0.6.1-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-6_1.6.6-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-dev_1.6.6-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb1_1.6.6-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb-dev_1.6.6-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau6_1.0.8-1+b2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau-dev_1.0.8-1+b2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1_1.13-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1-dev_1.13-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0_1.13-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.13-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri3-0_1.13-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0_1.13-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0-dev_1.13-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-present0_1.13-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0_1.13-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0-dev_1.13-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0_1.13-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0-dev_1.13-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-sync1_1.13-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-xfixes0_1.13-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite1_0.4.4-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.4-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor1_1.1.15-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor-dev_1.1.15-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage1_1.1.4-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage-dev_1.1.4-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext6_1.3.3-1+b2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext-dev_1.3.3-1+b2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes3_5.0.3-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi6_1.7.9-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi-dev_1.7.9-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama1_1.1.4-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon0_0.8.2-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon-dev_0.8.2-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2_2.9.4+dfsg1-7+b1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2-dev_2.9.4+dfsg1-7+b1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender1_0.9.10-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1.1_1.1.32-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1-dev_1.1.32-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss1_1.2.3-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss-dev_1.2.3-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt6_1.1.5-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt-dev_1.1.5-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst6_1.2.3-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/l/lcms2/liblcms2-2_2.9-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/l/linux/linux-libc-dev_4.18.10-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/l/lz4/liblz4-1_1.8.2-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/l/lzo2/liblzo2-2_2.10-0.1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa_18.1.8-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa-dev_18.1.8-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm1_18.1.8-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm-dev_18.1.8-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-dev_18.1.8-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-glx_18.1.8-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libglapi-mesa_18.1.8-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libwayland-egl1-mesa_18.1.8-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/mesa-common-dev_18.1.8-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip1_1.1-8+b1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip-dev_1.1-8+b1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/n/nettle/libhogweed4_3.4-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/n/nettle/libnettle6_3.4-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4_4.20-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4-dev_4.20-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3_3.39-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3-dev_3.39-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl1.0/libssl1.0.2_1.0.2o-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl1.1_1.1.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl-dev_1.1.1-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/o/opus/libopus0_1.3~beta+20180518-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/o/opus/libopus-dev_1.3~beta+20180518-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit-2-0_2.14.19-4_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit2_2.14.19-4_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/p11-kit/libp11-kit0_0.23.14-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g_1.1.8-3.8_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g-dev_1.1.8-3.8_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango-1.0-0_1.42.4-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango1.0-dev_1.42.4-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangocairo-1.0-0_1.42.4-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoft2-1.0-0_1.42.4-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoxft-1.0-0_1.42.4-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5+b2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci3_3.5.2-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci-dev_3.5.2-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre16-3_8.39-11_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre32-3_8.39-11_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3_8.39-11_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3-dev_8.39-11_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcrecpp0v5_8.39-11_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libpipewire-0.2-1_0.2.3-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libpipewire-0.2-dev_0.2.3-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libspa-lib-0.1-dev_0.2.3-3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-0_0.34.0-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-dev_0.34.0-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse0_12.2-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-dev_12.2-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-mainloop-glib0_12.2-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-4_20180901+dfsg-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-dev_20180901+dfsg-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/s/shared-mime-info/shared-mime-info_1.10-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy1v5_1.1.7-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy-dev_1.1.7-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd2_0.8.8-6_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd-dev_0.8.8-6_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/speech-dispatcher_0.8.8-6_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/s/sqlite3/libsqlite3-0_3.25.2-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libsystemd0_239-10_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev1_239-10_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev-dev_239-10_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/t/tcp-wrappers/libwrap0_7.6.q-27_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/t/tiff/libtiff5_4.0.9-6_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/u/unbound/libunbound8_1.8.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libblkid1_2.32.1-0.1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libmount1_2.32.1-0.1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libuuid1_2.32.1-0.1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/uuid-dev_2.32.1-0.1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/v/vulkan/libvulkan1_1.1.73+dfsg-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-client0_1.16.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-cursor0_1.16.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-dev_1.16.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-egl1_1.16.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-server0_1.16.0-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland-protocols/wayland-protocols_1.16-1_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xft/libxft2_2.3.2-2_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-composite-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-damage-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-fixes-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-input-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-kb-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-randr-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-record-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-render-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xext-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xinerama-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xz-utils/liblzma5_5.2.2-1.3_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g_1.2.11.dfsg-1_amd64.deb
+http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-1_amd64.deb
diff --git a/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.arm b/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.arm
new file mode 100644
index 0000000000..c6c86be2f2
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.arm
@@ -0,0 +1,327 @@
+http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2_1.1.6-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2-dev_1.1.6-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-0_2.30.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-dev_2.30.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.30.0-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.30.0-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-0_2.30.0-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-dev_2.30.0-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/a/attr/libattr1_2.4.47-2+b2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/a/audit/libaudit1_2.8.4-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-client3_0.7-4_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-common3_0.7-4_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth3_5.50-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth-dev_5.50-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi0.6_5.6-5_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi-dev_5.6-5_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2_1.15.12-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2-dev_1.15.12-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-gobject2_1.15.12-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-script-interpreter2_1.15.12-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/c/colord/libcolord2_1.4.3-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2_2.2.8-5_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2-dev_2.2.8-5_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2_2.2.8-5_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2-dev_2.2.8-5_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-3_1.12.10-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-dev_1.12.10-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/comerr-dev_2.1-1.44.4-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/libcom-err2_1.44.4-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf1_0.170-0.5_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf-dev_0.170-0.5_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1_2.2.6-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1-dev_2.2.6-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/f/flac/libflac8_1.3.2-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/f/flac/libflac-dev_1.3.2-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1_2.13.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1-dev_2.13.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6_2.8.1-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6-dev_2.8.1-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi0_1.0.5-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi-dev_1.0.5-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libasan3_6.4.0-22_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libgcc-6-dev_6.4.0-22_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libstdc++-6-dev_6.4.0-22_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-7/libubsan0_7.3.0-29_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libatomic1_8.2.0-7_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgcc1_8.2.0-7_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgomp1_8.2.0-7_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libstdc++6_8.2.0-7_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.38.0+dfsg-6_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.38.0+dfsg-6_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-0_2.58.1-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-dev_2.58.1-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6_2.27-6_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6-dev_2.27-6_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gmp/libgmp10_6.1.2+dfsg-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls28-dev_3.5.19-1+b1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls30_3.5.19-1+b1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-dane0_3.5.19-1+b1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-openssl27_3.5.19-1+b1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutlsxx28_3.5.19-1+b1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphene/libgraphene-1.0-0_1.8.2-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphene/libgraphene-1.0-dev_1.8.2-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-3_1.3.12-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-dev_1.3.12-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.32-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-0_3.24.1-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.1-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+4.0/libgtk-4-0_3.91.2-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+4.0/libgtk-4-dev_3.91.2-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz0b_1.9.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-dev_1.9.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-gobject0_1.9.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-icu0_1.9.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git161113-5_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu/libicu57_57.1-9_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu/libicu60_60.2-6_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/j/json-glib/libjson-glib-1.0-0_1.4.2-4_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/k/keyutils/libkeyutils1_1.5.9-9.3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/krb5-multidev_1.16.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssapi-krb5-2_1.16.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssrpc4_1.16.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libk5crypto3_1.16.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5clnt-mit11_1.16.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5srv-mit11_1.16.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkdb5-9_1.16.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-3_1.16.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-dev_1.16.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5support0_1.16.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator1_0.4.92-7_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-7_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-7_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-7_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libasyncns/libasyncns0_0.8-6_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libb/libbsd/libbsd0_0.9.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap2_2.25-1.2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap-dev_2.25-1.2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdatrie/libdatrie1_0.2.12-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr490+repack1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr490+repack1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr490+repack1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr490+repack1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm2_2.4.94-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.94-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-dev_2.4.94-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-etnaviv1_2.4.94-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-exynos1_2.4.94-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-freedreno1_2.4.94-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-nouveau2_2.4.94-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-omap1_2.4.94-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-radeon1_2.4.94-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-tegra0_2.4.94-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy0_1.5.2-0.3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy-dev_1.5.2-0.3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libevent/libevent-2.1-6_2.1.8-stable-4_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi6_3.2.1-8_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi7_3.3~rc0-7_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi-dev_3.2.1-8_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.3-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.3-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libegl1_1.1.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgl1_1.1.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles1_1.1.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles2_1.1.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd0_1.1.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd-dev_1.1.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglx0_1.1.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libopengl0_1.1.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error0_1.32-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error-dev_1.32-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libice/libice6_1.0.9-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidl/libidl-2-0_0.8.14-4_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidn2/libidn2-0_2.0.5-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidn/libidn11_1.33-2.2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator7_0.5.0-4_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_1.5.2-2+b1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_1.5.2-2+b1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp1_1.7.4-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.7.4-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b5_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg0_1.3.2-1+b1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg-dev_1.3.2-1+b1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpciaccess/libpciaccess0_0.14-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng16-16_1.6.34-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng-dev_1.6.34-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpsl/libpsl5_0.20.2-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libr/librest/librest-0.7-0_0.8.0-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libselinux/libselinux1_2.8-1+b1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsm/libsm6_1.2.2-1+b3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsndfile/libsndfile1_1.0.28-4_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup2.4-1_2.64.0-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.64.0-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.13-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libthai/libthai0_0.1.28-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libtool/libltdl7_2.4.6-4_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libu/libunistring/libunistring2_0.9.10-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva2_2.3.0-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-dev_2.3.0-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-drm2_2.3.0-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-glx2_2.3.0-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-wayland2_2.3.0-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-x11-2_2.3.0-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbis0a_1.3.6-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbisenc2_1.3.6-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx5_1.7.0-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx-dev_1.7.0-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp6_0.6.1-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp-dev_0.6.1-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpmux3_0.6.1-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-6_1.6.6-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-dev_1.6.6-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb1_1.6.6-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb-dev_1.6.6-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau6_1.0.8-1+b2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau-dev_1.0.8-1+b2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1_1.13-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1-dev_1.13-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0_1.13-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.13-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri3-0_1.13-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0_1.13-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0-dev_1.13-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-present0_1.13-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0_1.13-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0-dev_1.13-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0_1.13-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0-dev_1.13-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-sync1_1.13-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-xfixes0_1.13-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite1_0.4.4-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.4-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor1_1.1.15-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor-dev_1.1.15-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage1_1.1.4-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage-dev_1.1.4-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext6_1.3.3-1+b2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext-dev_1.3.3-1+b2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes3_5.0.3-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi6_1.7.9-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi-dev_1.7.9-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama1_1.1.4-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon0_0.8.2-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon-dev_0.8.2-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2_2.9.4+dfsg1-7+b1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2-dev_2.9.4+dfsg1-7+b1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender1_0.9.10-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1.1_1.1.32-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1-dev_1.1.32-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss1_1.2.3-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss-dev_1.2.3-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt6_1.1.5-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt-dev_1.1.5-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst6_1.2.3-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/l/lcms2/liblcms2-2_2.9-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/l/linux/linux-libc-dev_4.18.6-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/l/lz4/liblz4-1_1.8.2-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/l/lzo2/liblzo2-2_2.10-0.1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa_18.1.8-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa-dev_18.1.8-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm1_18.1.8-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm-dev_18.1.8-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-dev_18.1.8-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-glx_18.1.8-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libglapi-mesa_18.1.8-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libwayland-egl1-mesa_18.1.8-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/mesa-common-dev_18.1.8-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip1_1.1-8+b1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip-dev_1.1-8+b1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/n/nettle/libhogweed4_3.4-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/n/nettle/libnettle6_3.4-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4_4.20-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4-dev_4.20-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3_3.39-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3-dev_3.39-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl1.0/libssl1.0.2_1.0.2o-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl1.1_1.1.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl-dev_1.1.1-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/o/opus/libopus0_1.3~beta+20180518-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/o/opus/libopus-dev_1.3~beta+20180518-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit-2-0_2.14.19-4_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit2_2.14.19-4_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/p11-kit/libp11-kit0_0.23.14-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g_1.1.8-3.8_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g-dev_1.1.8-3.8_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango-1.0-0_1.42.4-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango1.0-dev_1.42.4-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangocairo-1.0-0_1.42.4-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoft2-1.0-0_1.42.4-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoxft-1.0-0_1.42.4-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5+b2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci3_3.5.2-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci-dev_3.5.2-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre16-3_8.39-11_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre32-3_8.39-11_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3_8.39-11_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3-dev_8.39-11_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcrecpp0v5_8.39-11_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libpipewire-0.2-1_0.2.3-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libpipewire-0.2-dev_0.2.3-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libspa-lib-0.1-dev_0.2.3-3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-0_0.34.0-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-dev_0.34.0-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse0_12.2-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-dev_12.2-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-mainloop-glib0_12.2-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-4_20180901+dfsg-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-dev_20180901+dfsg-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/s/shared-mime-info/shared-mime-info_1.10-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy1v5_1.1.7-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy-dev_1.1.7-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd2_0.8.8-6_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd-dev_0.8.8-6_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/speech-dispatcher_0.8.8-6_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/s/sqlite3/libsqlite3-0_3.25.2-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libsystemd0_239-10_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev1_239-10_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev-dev_239-10_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/t/tcp-wrappers/libwrap0_7.6.q-27_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/t/tiff/libtiff5_4.0.9-6_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/u/unbound/libunbound8_1.8.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libblkid1_2.32.1-0.1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libmount1_2.32.1-0.1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libuuid1_2.32.1-0.1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/uuid-dev_2.32.1-0.1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/v/vulkan/libvulkan1_1.1.73+dfsg-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-client0_1.16.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-cursor0_1.16.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-dev_1.16.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-egl1_1.16.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-server0_1.16.0-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland-protocols/wayland-protocols_1.16-1_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xft/libxft2_2.3.2-2_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-composite-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-damage-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-fixes-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-input-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-kb-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-randr-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-record-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-render-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xext-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xinerama-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xz-utils/liblzma5_5.2.2-1.3_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g_1.2.11.dfsg-1_armhf.deb
+http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-1_armhf.deb
diff --git a/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.arm64 b/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.arm64
new file mode 100644
index 0000000000..2ad21f52ea
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.arm64
@@ -0,0 +1,328 @@
+http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2_1.1.6-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2-dev_1.1.6-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-0_2.30.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-dev_2.30.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.30.0-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.30.0-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-0_2.30.0-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-dev_2.30.0-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/a/attr/libattr1_2.4.47-2+b2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/a/audit/libaudit1_2.8.4-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-client3_0.7-4_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-common3_0.7-4_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth3_5.50-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth-dev_5.50-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi0.6_5.6-5_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi-dev_5.6-5_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2_1.15.12-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2-dev_1.15.12-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-gobject2_1.15.12-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-script-interpreter2_1.15.12-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/c/colord/libcolord2_1.4.3-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2_2.2.8-5_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2-dev_2.2.8-5_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2_2.2.8-5_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2-dev_2.2.8-5_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-3_1.12.10-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-dev_1.12.10-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/comerr-dev_2.1-1.44.4-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/libcom-err2_1.44.4-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf1_0.170-0.5_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf-dev_0.170-0.5_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1_2.2.6-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1-dev_2.2.6-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/f/flac/libflac8_1.3.2-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/f/flac/libflac-dev_1.3.2-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1_2.13.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1-dev_2.13.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6_2.8.1-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6-dev_2.8.1-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi0_1.0.5-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi-dev_1.0.5-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libasan3_6.4.0-22_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libgcc-6-dev_6.4.0-22_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libstdc++-6-dev_6.4.0-22_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-7/libubsan0_7.3.0-29_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libatomic1_8.2.0-7_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgcc1_8.2.0-7_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgomp1_8.2.0-7_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libitm1_8.2.0-7_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libstdc++6_8.2.0-7_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.38.0+dfsg-6_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.38.0+dfsg-6_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-0_2.58.1-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-dev_2.58.1-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6_2.27-6_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6-dev_2.27-6_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gmp/libgmp10_6.1.2+dfsg-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gmp/libgmp10_6.1.2+dfsg-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls28-dev_3.5.19-1+b1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls30_3.5.19-1+b1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-dane0_3.5.19-1+b1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-openssl27_3.5.19-1+b1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutlsxx28_3.5.19-1+b1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphene/libgraphene-1.0-0_1.8.2-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphene/libgraphene-1.0-dev_1.8.2-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-3_1.3.12-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-dev_1.3.12-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.32-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-0_3.24.1-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.1-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+4.0/libgtk-4-0_3.91.2-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+4.0/libgtk-4-dev_3.91.2-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz0b_1.9.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-dev_1.9.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-gobject0_1.9.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-icu0_1.9.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git161113-5_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu/libicu57_57.1-9_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu/libicu60_60.2-6_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/j/json-glib/libjson-glib-1.0-0_1.4.2-4_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/k/keyutils/libkeyutils1_1.5.9-9.3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/krb5-multidev_1.16.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssapi-krb5-2_1.16.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssrpc4_1.16.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libk5crypto3_1.16.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5clnt-mit11_1.16.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5srv-mit11_1.16.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkdb5-9_1.16.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-3_1.16.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-dev_1.16.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5support0_1.16.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator1_0.4.92-7_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-7_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-7_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-7_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libasyncns/libasyncns0_0.8-6_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libb/libbsd/libbsd0_0.9.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap2_2.25-1.2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap-dev_2.25-1.2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdatrie/libdatrie1_0.2.12-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr490+repack1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr490+repack1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr490+repack1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr490+repack1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm2_2.4.94-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.94-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-dev_2.4.94-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-etnaviv1_2.4.94-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-freedreno1_2.4.94-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-nouveau2_2.4.94-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-radeon1_2.4.94-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-tegra0_2.4.94-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy0_1.5.2-0.3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy-dev_1.5.2-0.3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libevent/libevent-2.1-6_2.1.8-stable-4_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi6_3.2.1-8_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi7_3.3~rc0-7_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi-dev_3.2.1-8_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.3-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.3-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libegl1_1.1.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgl1_1.1.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles1_1.1.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles2_1.1.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd0_1.1.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd-dev_1.1.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglx0_1.1.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libopengl0_1.1.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error0_1.32-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error-dev_1.32-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libice/libice6_1.0.9-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidl/libidl-2-0_0.8.14-4_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidn2/libidn2-0_2.0.5-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidn/libidn11_1.33-2.2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator7_0.5.0-4_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_1.5.2-2+b1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_1.5.2-2+b1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp1_1.7.4-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.7.4-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b5_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg0_1.3.2-1+b1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg-dev_1.3.2-1+b1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpciaccess/libpciaccess0_0.14-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng16-16_1.6.34-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng-dev_1.6.34-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpsl/libpsl5_0.20.2-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libr/librest/librest-0.7-0_0.8.0-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libselinux/libselinux1_2.8-1+b1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsm/libsm6_1.2.2-1+b3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsndfile/libsndfile1_1.0.28-4_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup2.4-1_2.64.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.64.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.13-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libthai/libthai0_0.1.28-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libthai/libthai0_0.1.28-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libtool/libltdl7_2.4.6-4_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libu/libunistring/libunistring2_0.9.10-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva2_2.3.0-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-dev_2.3.0-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-drm2_2.3.0-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-glx2_2.3.0-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-wayland2_2.3.0-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-x11-2_2.3.0-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbis0a_1.3.6-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbisenc2_1.3.6-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx5_1.7.0-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx-dev_1.7.0-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp6_0.6.1-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp-dev_0.6.1-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpmux3_0.6.1-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-6_1.6.6-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-dev_1.6.6-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb1_1.6.6-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb-dev_1.6.6-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau6_1.0.8-1+b2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau-dev_1.0.8-1+b2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1_1.13-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1-dev_1.13-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0_1.13-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.13-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri3-0_1.13-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0_1.13-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0-dev_1.13-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-present0_1.13-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0_1.13-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0-dev_1.13-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0_1.13-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0-dev_1.13-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-sync1_1.13-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-xfixes0_1.13-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite1_0.4.4-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.4-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor1_1.1.15-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor-dev_1.1.15-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage1_1.1.4-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage-dev_1.1.4-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext6_1.3.3-1+b2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext-dev_1.3.3-1+b2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes3_5.0.3-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi6_1.7.9-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi-dev_1.7.9-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama1_1.1.4-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon0_0.8.2-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon-dev_0.8.2-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2_2.9.4+dfsg1-7+b1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2-dev_2.9.4+dfsg1-7+b1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender1_0.9.10-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1.1_1.1.32-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1-dev_1.1.32-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss1_1.2.3-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss-dev_1.2.3-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt6_1.1.5-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt-dev_1.1.5-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst6_1.2.3-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/l/lcms2/liblcms2-2_2.9-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/l/linux/linux-libc-dev_4.18.6-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/l/lz4/liblz4-1_1.8.2-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/l/lzo2/liblzo2-2_2.10-0.1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa_18.1.8-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa-dev_18.1.8-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm1_18.1.8-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm-dev_18.1.8-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-dev_18.1.8-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-glx_18.1.8-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libglapi-mesa_18.1.8-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libwayland-egl1-mesa_18.1.8-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/mesa-common-dev_18.1.8-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip1_1.1-8+b1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip-dev_1.1-8+b1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/n/nettle/libhogweed4_3.4-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/n/nettle/libnettle6_3.4-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4_4.20-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4-dev_4.20-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3_3.39-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3-dev_3.39-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl1.0/libssl1.0.2_1.0.2o-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl1.1_1.1.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl-dev_1.1.1-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/o/opus/libopus0_1.3~beta+20180518-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/o/opus/libopus-dev_1.3~beta+20180518-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit-2-0_2.14.19-4_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit2_2.14.19-4_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/p11-kit/libp11-kit0_0.23.14-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g_1.1.8-3.8_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g-dev_1.1.8-3.8_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango-1.0-0_1.42.4-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango1.0-dev_1.42.4-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangocairo-1.0-0_1.42.4-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoft2-1.0-0_1.42.4-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoxft-1.0-0_1.42.4-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5+b2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci3_3.5.2-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci-dev_3.5.2-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre16-3_8.39-11_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre32-3_8.39-11_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3_8.39-11_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3-dev_8.39-11_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcrecpp0v5_8.39-11_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libpipewire-0.2-1_0.2.3-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libpipewire-0.2-dev_0.2.3-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libspa-lib-0.1-dev_0.2.3-3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-0_0.34.0-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-dev_0.34.0-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse0_12.2-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-dev_12.2-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-mainloop-glib0_12.2-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-4_20180901+dfsg-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-dev_20180901+dfsg-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/s/shared-mime-info/shared-mime-info_1.10-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy1v5_1.1.7-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy-dev_1.1.7-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd2_0.8.8-6_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd-dev_0.8.8-6_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/speech-dispatcher_0.8.8-6_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/s/sqlite3/libsqlite3-0_3.25.2-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libsystemd0_239-10_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev1_239-10_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev-dev_239-10_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/t/tcp-wrappers/libwrap0_7.6.q-27_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/t/tiff/libtiff5_4.0.9-6_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/u/unbound/libunbound8_1.8.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libblkid1_2.32.1-0.1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libmount1_2.32.1-0.1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libuuid1_2.32.1-0.1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/uuid-dev_2.32.1-0.1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/v/vulkan/libvulkan1_1.1.73+dfsg-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-client0_1.16.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-cursor0_1.16.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-dev_1.16.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-egl1_1.16.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-server0_1.16.0-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland-protocols/wayland-protocols_1.16-1_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xft/libxft2_2.3.2-2_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-composite-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-damage-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-fixes-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-input-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-kb-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-randr-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-record-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-render-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xext-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xinerama-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xz-utils/liblzma5_5.2.2-1.3_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g_1.2.11.dfsg-1_arm64.deb
+http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-1_arm64.deb
diff --git a/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.i386 b/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.i386
new file mode 100644
index 0000000000..dee9a8415b
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.i386
@@ -0,0 +1,327 @@
+http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2_1.1.6-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2-dev_1.1.6-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-0_2.30.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-dev_2.30.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.30.0-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.30.0-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-0_2.30.0-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-dev_2.30.0-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/a/attr/libattr1_2.4.47-2+b2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/a/audit/libaudit1_2.8.4-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-client3_0.7-4_i386.deb
+http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-common3_0.7-4_i386.deb
+http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth3_5.50-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth-dev_5.50-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi0.6_5.6-5_i386.deb
+http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi-dev_5.6-5_i386.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2_1.15.12-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2-dev_1.15.12-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-gobject2_1.15.12-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-script-interpreter2_1.15.12-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/c/colord/libcolord2_1.4.3-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2_2.2.8-5_i386.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2-dev_2.2.8-5_i386.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2_2.2.8-5_i386.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2-dev_2.2.8-5_i386.deb
+http://ftp.us.debian.org/debian/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-3_1.12.10-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-dev_1.12.10-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/comerr-dev_2.1-1.44.4-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/libcom-err2_1.44.4-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf1_0.170-0.5_i386.deb
+http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf-dev_0.170-0.5_i386.deb
+http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1_2.2.6-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1-dev_2.2.6-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/f/flac/libflac8_1.3.2-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/f/flac/libflac-dev_1.3.2-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1_2.13.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1-dev_2.13.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6_2.8.1-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6-dev_2.8.1-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi0_1.0.5-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi-dev_1.0.5-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libasan3_6.4.0-22_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libgcc-6-dev_6.4.0-22_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libstdc++-6-dev_6.4.0-22_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-7/libcilkrts5_7.3.0-29_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-7/libubsan0_7.3.0-29_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libatomic1_8.2.0-7_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgcc1_8.2.0-7_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgomp1_8.2.0-7_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libitm1_8.2.0-7_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libmpx2_8.2.0-7_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libquadmath0_8.2.0-7_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libstdc++6_8.2.0-7_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.38.0+dfsg-6_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.38.0+dfsg-6_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-0_2.58.1-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-dev_2.58.1-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6_2.27-6_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6-dev_2.27-6_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gmp/libgmp10_6.1.2+dfsg-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls28-dev_3.5.19-1+b1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls30_3.5.19-1+b1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-dane0_3.5.19-1+b1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-openssl27_3.5.19-1+b1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutlsxx28_3.5.19-1+b1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphene/libgraphene-1.0-0_1.8.2-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphene/libgraphene-1.0-dev_1.8.2-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-3_1.3.12-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-dev_1.3.12-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.32-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-0_3.24.1-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.1-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+4.0/libgtk-4-0_3.91.2-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+4.0/libgtk-4-dev_3.91.2-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz0b_1.9.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-dev_1.9.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-gobject0_1.9.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-icu0_1.9.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git161113-5_i386.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu/libicu57_57.1-9_i386.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu/libicu60_60.2-6_i386.deb
+http://ftp.us.debian.org/debian/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/j/json-glib/libjson-glib-1.0-0_1.4.2-4_i386.deb
+http://ftp.us.debian.org/debian/pool/main/k/keyutils/libkeyutils1_1.5.9-9.3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/krb5-multidev_1.16.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssapi-krb5-2_1.16.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssrpc4_1.16.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libk5crypto3_1.16.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5clnt-mit11_1.16.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5srv-mit11_1.16.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkdb5-9_1.16.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-3_1.16.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-dev_1.16.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5support0_1.16.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator1_0.4.92-7_i386.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-7_i386.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-7_i386.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-7_i386.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libasyncns/libasyncns0_0.8-6_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libb/libbsd/libbsd0_0.9.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap2_2.25-1.2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap-dev_2.25-1.2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdatrie/libdatrie1_0.2.12-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr490+repack1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr490+repack1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr490+repack1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr490+repack1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm2_2.4.94-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.94-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-dev_2.4.94-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-intel1_2.4.94-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-nouveau2_2.4.94-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-radeon1_2.4.94-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy0_1.5.2-0.3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy-dev_1.5.2-0.3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libevent/libevent-2.1-6_2.1.8-stable-4_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi6_3.2.1-8_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi7_3.3~rc0-7_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi-dev_3.2.1-8_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.3-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.3-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libegl1_1.1.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgl1_1.1.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles1_1.1.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles2_1.1.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd0_1.1.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd-dev_1.1.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglx0_1.1.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libopengl0_1.1.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error0_1.32-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error-dev_1.32-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libice/libice6_1.0.9-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidl/libidl-2-0_0.8.14-4_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidn2/libidn2-0_2.0.5-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidn/libidn11_1.33-2.2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator7_0.5.0-4_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_1.5.2-2+b1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_1.5.2-2+b1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp1_1.7.4-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.7.4-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b5_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg0_1.3.2-1+b1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg-dev_1.3.2-1+b1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpciaccess/libpciaccess0_0.14-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng16-16_1.6.34-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng-dev_1.6.34-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpsl/libpsl5_0.20.2-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libr/librest/librest-0.7-0_0.8.0-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libselinux/libselinux1_2.8-1+b1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsm/libsm6_1.2.2-1+b3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsndfile/libsndfile1_1.0.28-4_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup2.4-1_2.64.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.64.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.13-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libthai/libthai0_0.1.28-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libtool/libltdl7_2.4.6-4_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libu/libunistring/libunistring2_0.9.10-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva2_2.3.0-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-dev_2.3.0-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-drm2_2.3.0-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-glx2_2.3.0-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-wayland2_2.3.0-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-x11-2_2.3.0-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbis0a_1.3.6-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbisenc2_1.3.6-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx5_1.7.0-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx-dev_1.7.0-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp6_0.6.1-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp-dev_0.6.1-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpmux3_0.6.1-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-6_1.6.6-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-dev_1.6.6-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb1_1.6.6-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb-dev_1.6.6-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau6_1.0.8-1+b2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau-dev_1.0.8-1+b2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1_1.13-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1-dev_1.13-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0_1.13-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.13-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri3-0_1.13-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0_1.13-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0-dev_1.13-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-present0_1.13-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0_1.13-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0-dev_1.13-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0_1.13-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0-dev_1.13-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-sync1_1.13-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-xfixes0_1.13-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite1_0.4.4-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.4-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor1_1.1.15-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor-dev_1.1.15-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage1_1.1.4-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage-dev_1.1.4-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext6_1.3.3-1+b2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext-dev_1.3.3-1+b2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes3_5.0.3-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi6_1.7.9-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi-dev_1.7.9-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama1_1.1.4-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon0_0.8.2-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon-dev_0.8.2-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2_2.9.4+dfsg1-7+b1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2-dev_2.9.4+dfsg1-7+b1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender1_0.9.10-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1.1_1.1.32-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1-dev_1.1.32-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss1_1.2.3-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss-dev_1.2.3-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt6_1.1.5-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt-dev_1.1.5-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst6_1.2.3-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/l/lcms2/liblcms2-2_2.9-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/l/linux/linux-libc-dev_4.18.10-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/l/lz4/liblz4-1_1.8.2-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/l/lzo2/liblzo2-2_2.10-0.1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa_18.1.8-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa-dev_18.1.8-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm1_18.1.8-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm-dev_18.1.8-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-dev_18.1.8-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-glx_18.1.8-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libglapi-mesa_18.1.8-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libwayland-egl1-mesa_18.1.8-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/mesa-common-dev_18.1.8-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip1_1.1-8+b1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip-dev_1.1-8+b1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/n/nettle/libhogweed4_3.4-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/n/nettle/libnettle6_3.4-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4_4.20-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4-dev_4.20-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3_3.39-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3-dev_3.39-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl1.0/libssl1.0.2_1.0.2o-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl1.1_1.1.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl-dev_1.1.1-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/o/opus/libopus0_1.3~beta+20180518-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/o/opus/libopus-dev_1.3~beta+20180518-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit-2-0_2.14.19-4_i386.deb
+http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit2_2.14.19-4_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/p11-kit/libp11-kit0_0.23.14-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g_1.1.8-3.8_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g-dev_1.1.8-3.8_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango-1.0-0_1.42.4-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango1.0-dev_1.42.4-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangocairo-1.0-0_1.42.4-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoft2-1.0-0_1.42.4-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoxft-1.0-0_1.42.4-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5+b2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci3_3.5.2-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci-dev_3.5.2-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre16-3_8.39-11_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre32-3_8.39-11_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3_8.39-11_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3-dev_8.39-11_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcrecpp0v5_8.39-11_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libpipewire-0.2-1_0.2.3-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libpipewire-0.2-dev_0.2.3-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libspa-lib-0.1-dev_0.2.3-3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-0_0.34.0-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-dev_0.34.0-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse0_12.2-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-dev_12.2-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-mainloop-glib0_12.2-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-4_20180901+dfsg-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-dev_20180901+dfsg-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/s/shared-mime-info/shared-mime-info_1.10-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy1v5_1.1.7-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy-dev_1.1.7-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd2_0.8.8-6_i386.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd-dev_0.8.8-6_i386.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/speech-dispatcher_0.8.8-6_i386.deb
+http://ftp.us.debian.org/debian/pool/main/s/sqlite3/libsqlite3-0_3.25.2-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libsystemd0_239-10_i386.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev1_239-10_i386.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev-dev_239-10_i386.deb
+http://ftp.us.debian.org/debian/pool/main/t/tcp-wrappers/libwrap0_7.6.q-27_i386.deb
+http://ftp.us.debian.org/debian/pool/main/t/tiff/libtiff5_4.0.9-6_i386.deb
+http://ftp.us.debian.org/debian/pool/main/u/unbound/libunbound8_1.8.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libblkid1_2.32.1-0.1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libmount1_2.32.1-0.1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libuuid1_2.32.1-0.1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/uuid-dev_2.32.1-0.1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/v/vulkan/libvulkan1_1.1.73+dfsg-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-client0_1.16.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-cursor0_1.16.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-dev_1.16.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-egl1_1.16.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-server0_1.16.0-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland-protocols/wayland-protocols_1.16-1_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xft/libxft2_2.3.2-2_i386.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-composite-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-damage-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-fixes-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-input-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-kb-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-randr-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-record-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-render-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xext-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xinerama-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xz-utils/liblzma5_5.2.2-1.3_i386.deb
+http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g_1.2.11.dfsg-1_i386.deb
+http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-1_i386.deb
diff --git a/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el b/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el
new file mode 100644
index 0000000000..825d387336
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.mips64el
@@ -0,0 +1,320 @@
+http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2_1.1.6-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2-dev_1.1.6-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-0_2.30.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-dev_2.30.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.30.0-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.30.0-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-0_2.30.0-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-dev_2.30.0-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/a/attr/libattr1_2.4.47-2+b2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/a/audit/libaudit1_2.8.4-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-client3_0.7-4_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-common3_0.7-4_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth3_5.50-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth-dev_5.50-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi0.6_5.6-5_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi-dev_5.6-5_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2_1.15.12-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2-dev_1.15.12-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-gobject2_1.15.12-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-script-interpreter2_1.15.12-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/c/colord/libcolord2_1.4.3-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2_2.2.8-5_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2-dev_2.2.8-5_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2_2.2.8-5_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2-dev_2.2.8-5_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-3_1.12.10-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-dev_1.12.10-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/comerr-dev_2.1-1.44.4-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/libcom-err2_1.44.4-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf1_0.170-0.5_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf-dev_0.170-0.5_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1_2.2.6-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1-dev_2.2.6-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/f/flac/libflac8_1.3.2-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/f/flac/libflac-dev_1.3.2-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1_2.13.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1-dev_2.13.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6_2.8.1-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6-dev_2.8.1-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi0_1.0.5-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi-dev_1.0.5-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libgcc-6-dev_6.4.0-22_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libstdc++-6-dev_6.4.0-22_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libatomic1_8.2.0-7_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgcc1_8.2.0-7_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgomp1_8.2.0-7_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libstdc++6_8.2.0-7_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.38.0+dfsg-6_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.38.0+dfsg-6_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-0_2.58.1-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-dev_2.58.1-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6_2.27-6_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6-dev_2.27-6_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gmp/libgmp10_6.1.2+dfsg-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls28-dev_3.5.19-1+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls30_3.5.19-1+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-dane0_3.5.19-1+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-openssl27_3.5.19-1+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutlsxx28_3.5.19-1+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphene/libgraphene-1.0-0_1.8.2-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphene/libgraphene-1.0-dev_1.8.2-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-3_1.3.12-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-dev_1.3.12-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.32-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-0_3.24.1-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.1-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+4.0/libgtk-4-0_3.91.2-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+4.0/libgtk-4-dev_3.91.2-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz0b_1.9.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-dev_1.9.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-gobject0_1.9.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-icu0_1.9.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git161113-5_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu/libicu57_57.1-9_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu/libicu60_60.2-6_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/j/json-glib/libjson-glib-1.0-0_1.4.2-4_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/k/keyutils/libkeyutils1_1.5.9-9.3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/krb5-multidev_1.16.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssapi-krb5-2_1.16.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssrpc4_1.16.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libk5crypto3_1.16.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5clnt-mit11_1.16.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5srv-mit11_1.16.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkdb5-9_1.16.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-3_1.16.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-dev_1.16.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5support0_1.16.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator1_0.4.92-7_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-7_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-7_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-7_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libasyncns/libasyncns0_0.8-6_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libb/libbsd/libbsd0_0.9.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap2_2.25-1.2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap-dev_2.25-1.2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdatrie/libdatrie1_0.2.12-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr490+repack1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr490+repack1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr490+repack1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr490+repack1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm2_2.4.94-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.94-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-dev_2.4.94-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-nouveau2_2.4.94-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-radeon1_2.4.94-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy0_1.5.2-0.3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy-dev_1.5.2-0.3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libevent/libevent-2.1-6_2.1.8-stable-4_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi6_3.2.1-8_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi7_3.3~rc0-7_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi-dev_3.2.1-8_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.3-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.3-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libegl1_1.1.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgl1_1.1.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles1_1.1.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles2_1.1.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd0_1.1.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd-dev_1.1.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglx0_1.1.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libopengl0_1.1.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error0_1.32-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error-dev_1.32-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libice/libice6_1.0.9-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidl/libidl-2-0_0.8.14-4_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidn2/libidn2-0_2.0.5-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidn/libidn11_1.33-2.2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator7_0.5.0-4_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_1.5.2-2+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_1.5.2-2+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp1_1.7.4-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.7.4-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b5_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg0_1.3.2-1+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg-dev_1.3.2-1+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpciaccess/libpciaccess0_0.14-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng16-16_1.6.34-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng-dev_1.6.34-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpsl/libpsl5_0.20.2-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libr/librest/librest-0.7-0_0.8.0-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libselinux/libselinux1_2.8-1+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsm/libsm6_1.2.2-1+b3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsndfile/libsndfile1_1.0.28-4+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup2.4-1_2.64.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.64.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.13-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libthai/libthai0_0.1.28-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libtool/libltdl7_2.4.6-4_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libu/libunistring/libunistring2_0.9.10-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva2_2.3.0-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-dev_2.3.0-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-drm2_2.3.0-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-glx2_2.3.0-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-wayland2_2.3.0-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-x11-2_2.3.0-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbis0a_1.3.6-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbisenc2_1.3.6-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx5_1.7.0-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx-dev_1.7.0-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp6_0.6.1-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp-dev_0.6.1-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpmux3_0.6.1-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-6_1.6.6-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-dev_1.6.6-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb1_1.6.6-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb-dev_1.6.6-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau6_1.0.8-1+b2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau-dev_1.0.8-1+b2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1_1.13-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1-dev_1.13-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0_1.13-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.13-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri3-0_1.13-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0_1.13-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0-dev_1.13-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-present0_1.13-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0_1.13-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0-dev_1.13-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0_1.13-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0-dev_1.13-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-sync1_1.13-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-xfixes0_1.13-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite1_0.4.4-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.4-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor1_1.1.15-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor-dev_1.1.15-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage1_1.1.4-3+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage-dev_1.1.4-3+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext6_1.3.3-1+b2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext-dev_1.3.3-1+b2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes3_5.0.3-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi6_1.7.9-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi-dev_1.7.9-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama1_1.1.4-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon0_0.8.2-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon-dev_0.8.2-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2_2.9.4+dfsg1-7+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2-dev_2.9.4+dfsg1-7+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender1_0.9.10-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1.1_1.1.32-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1-dev_1.1.32-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss1_1.2.3-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss-dev_1.2.3-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt6_1.1.5-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt-dev_1.1.5-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst6_1.2.3-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/l/lcms2/liblcms2-2_2.9-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/l/linux/linux-libc-dev_4.18.8-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/l/lz4/liblz4-1_1.8.2-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/l/lzo2/liblzo2-2_2.10-0.1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa_18.1.8-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa-dev_18.1.8-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm1_18.1.8-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm-dev_18.1.8-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-dev_18.1.8-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-glx_18.1.8-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libglapi-mesa_18.1.8-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libwayland-egl1-mesa_18.1.8-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/mesa-common-dev_18.1.8-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip1_1.1-8+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip-dev_1.1-8+b1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/n/nettle/libhogweed4_3.4-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/n/nettle/libnettle6_3.4-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4_4.20-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4-dev_4.20-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3_3.39-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3-dev_3.39-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl1.0/libssl1.0.2_1.0.2o-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl1.1_1.1.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl-dev_1.1.1-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/o/opus/libopus0_1.3~beta+20180518-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/o/opus/libopus-dev_1.3~beta+20180518-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit-2-0_2.14.19-4_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit2_2.14.19-4_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/p11-kit/libp11-kit0_0.23.14-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g_1.1.8-3.8_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g-dev_1.1.8-3.8_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango-1.0-0_1.42.4-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango1.0-dev_1.42.4-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangocairo-1.0-0_1.42.4-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoft2-1.0-0_1.42.4-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoxft-1.0-0_1.42.4-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5+b2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci3_3.5.2-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci-dev_3.5.2-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre16-3_8.39-11_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre32-3_8.39-11_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3_8.39-11_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3-dev_8.39-11_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcrecpp0v5_8.39-11_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libpipewire-0.2-1_0.2.3-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libpipewire-0.2-dev_0.2.3-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libspa-lib-0.1-dev_0.2.3-3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-0_0.34.0-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-dev_0.34.0-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse0_12.2-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-dev_12.2-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-mainloop-glib0_12.2-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-4_20180901+dfsg-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-dev_20180901+dfsg-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/s/shared-mime-info/shared-mime-info_1.10-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy1v5_1.1.7-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy-dev_1.1.7-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd2_0.8.8-6_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd-dev_0.8.8-6_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/speech-dispatcher_0.8.8-6_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/s/sqlite3/libsqlite3-0_3.25.2-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libsystemd0_239-10_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev1_239-10_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev-dev_239-10_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/t/tcp-wrappers/libwrap0_7.6.q-27_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/t/tiff/libtiff5_4.0.9-6_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/u/unbound/libunbound8_1.8.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libblkid1_2.32.1-0.1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libmount1_2.32.1-0.1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libuuid1_2.32.1-0.1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/uuid-dev_2.32.1-0.1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/v/vulkan/libvulkan1_1.1.73+dfsg-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-client0_1.16.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-cursor0_1.16.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-dev_1.16.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-egl1_1.16.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-server0_1.16.0-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland-protocols/wayland-protocols_1.16-1_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xft/libxft2_2.3.2-2_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-composite-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-damage-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-fixes-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-input-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-kb-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-randr-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-record-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-render-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xext-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xinerama-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xz-utils/liblzma5_5.2.2-1.3_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g_1.2.11.dfsg-1_mips64el.deb
+http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-1_mips64el.deb
diff --git a/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.mipsel b/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.mipsel
new file mode 100644
index 0000000000..8317793d76
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/generated_package_lists/sid.mipsel
@@ -0,0 +1,320 @@
+http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2_1.1.6-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/a/alsa-lib/libasound2-dev_1.1.6-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-0_2.30.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/a/atk1.0/libatk1.0-dev_2.30.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.30.0-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.30.0-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-0_2.30.0-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/a/at-spi2-core/libatspi2.0-dev_2.30.0-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/a/attr/libattr1_2.4.47-2+b2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/a/audit/libaudit1_2.8.4-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-client3_0.7-4_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/a/avahi/libavahi-common3_0.7-4_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth3_5.50-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/b/bluez/libbluetooth-dev_5.50-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi0.6_5.6-5_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/b/brltty/libbrlapi-dev_5.6-5_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2_1.15.12-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo2-dev_1.15.12-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-gobject2_1.15.12-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/c/cairo/libcairo-script-interpreter2_1.15.12-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/c/colord/libcolord2_1.4.3-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2_2.2.8-5_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcups2-dev_2.2.8-5_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2_2.2.8-5_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/c/cups/libcupsimage2-dev_2.2.8-5_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-3_1.12.10-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/d/dbus/libdbus-1-dev_1.12.10-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/comerr-dev_2.1-1.44.4-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/e/e2fsprogs/libcom-err2_1.44.4-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf1_0.170-0.5_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/e/elfutils/libelf-dev_0.170-0.5_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1_2.2.6-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/e/expat/libexpat1-dev_2.2.6-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/f/flac/libflac8_1.3.2-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/f/flac/libflac-dev_1.3.2-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1_2.13.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/f/fontconfig/libfontconfig1-dev_2.13.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6_2.8.1-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/f/freetype/libfreetype6-dev_2.8.1-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi0_1.0.5-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/f/fribidi/libfribidi-dev_1.0.5-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libgcc-6-dev_6.4.0-22_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-6/libstdc++-6-dev_6.4.0-22_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libatomic1_8.2.0-7_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgcc1_8.2.0-7_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libgomp1_8.2.0-7_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gcc-8/libstdc++6_8.2.0-7_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.38.0+dfsg-6_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.38.0+dfsg-6_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-0_2.58.1-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/glib2.0/libglib2.0-dev_2.58.1-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6_2.27-6_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/glibc/libc6-dev_2.27-6_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gmp/libgmp10_6.1.2+dfsg-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls28-dev_3.5.19-1+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls30_3.5.19-1+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-dane0_3.5.19-1+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutls-openssl27_3.5.19-1+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gnutls28/libgnutlsxx28_3.5.19-1+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphene/libgraphene-1.0-0_1.8.2-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphene/libgraphene-1.0-dev_1.8.2-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-3_1.3.12-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/graphite2/libgraphite2-dev_1.3.12-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.32-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-0_3.24.1-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.1-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+4.0/libgtk-4-0_3.91.2-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/g/gtk+4.0/libgtk-4-dev_3.91.2-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz0b_1.9.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-dev_1.9.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-gobject0_1.9.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/h/harfbuzz/libharfbuzz-icu0_1.9.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git161113-5_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu/libicu57_57.1-9_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/i/icu/libicu60_60.2-6_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/j/json-glib/libjson-glib-1.0-0_1.4.2-4_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/k/keyutils/libkeyutils1_1.5.9-9.3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/krb5-multidev_1.16.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssapi-krb5-2_1.16.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libgssrpc4_1.16.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libk5crypto3_1.16.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5clnt-mit11_1.16.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkadm5srv-mit11_1.16.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkdb5-9_1.16.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-3_1.16.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5-dev_1.16.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/k/krb5/libkrb5support0_1.16.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator1_0.4.92-7_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-1_0.4.92-7_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator3-dev_0.4.92-7_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libappindicator/libappindicator-dev_0.4.92-7_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/liba/libasyncns/libasyncns0_0.8-6_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libb/libbsd/libbsd0_0.9.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap2_2.25-1.2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap2/libcap-dev_2.25-1.2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdatrie/libdatrie1_0.2.12-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr490+repack1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr490+repack1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr490+repack1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr490+repack1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm2_2.4.94-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.94-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-dev_2.4.94-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-nouveau2_2.4.94-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libd/libdrm/libdrm-radeon1_2.4.94-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy0_1.5.2-0.3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libepoxy/libepoxy-dev_1.5.2-0.3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libe/libevent/libevent-2.1-6_2.1.8-stable-4_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi6_3.2.1-8_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi7_3.3~rc0-7_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libf/libffi/libffi-dev_3.2.1-8_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.3-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.3-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libegl1_1.1.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgl1_1.1.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles1_1.1.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libgles2_1.1.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd0_1.1.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglvnd-dev_1.1.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libglx0_1.1.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libglvnd/libopengl0_1.1.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error0_1.32-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error-dev_1.32-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libice/libice6_1.0.9-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidn2/libidn2-0_2.0.5-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libidn/libidn11_1.33-2.2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator3-7_0.5.0-4_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libi/libindicator/libindicator7_0.5.0-4_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_1.5.2-2+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_1.5.2-2+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp1_1.7.4-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.7.4-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b5_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg0_1.3.2-1+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libo/libogg/libogg-dev_1.3.2-1+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpciaccess/libpciaccess0_0.14-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng16-16_1.6.34-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpng1.6/libpng-dev_1.6.34-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpsl/libpsl5_0.20.2-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.3-4_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libr/librest/librest-0.7-0_0.8.0-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libselinux/libselinux1_2.8-1+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsm/libsm6_1.2.2-1+b3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsndfile/libsndfile1_1.0.28-4_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup2.4-1_2.64.0-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.64.0-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.13-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libthai/libthai0_0.1.28-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libt/libtool/libltdl7_2.4.6-4_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libu/libunistring/libunistring2_0.9.10-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva2_2.3.0-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-dev_2.3.0-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-drm2_2.3.0-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-glx2_2.3.0-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-wayland2_2.3.0-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libva/libva-x11-2_2.3.0-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbis0a_1.3.6-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvorbis/libvorbisenc2_1.3.6-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx5_1.7.0-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libv/libvpx/libvpx-dev_1.7.0-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp6_0.6.1-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebp-dev_0.6.1-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libw/libwebp/libwebpmux3_0.6.1-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-6_1.6.6-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-dev_1.6.6-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb1_1.6.6-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libx11/libx11-xcb-dev_1.6.6-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau6_1.0.8-1+b2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxau/libxau-dev_1.0.8-1+b2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1_1.13-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb1-dev_1.13-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0_1.13-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.13-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-dri3-0_1.13-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0_1.13-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-glx0-dev_1.13-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-present0_1.13-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0_1.13-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-render0-dev_1.13-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0_1.13-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-shm0-dev_1.13-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-sync1_1.13-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcb/libxcb-xfixes0_1.13-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite1_0.4.4-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.4-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor1_1.1.15-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxcursor/libxcursor-dev_1.1.15-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage1_1.1.4-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdamage/libxdamage-dev_1.1.4-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext6_1.3.3-1+b2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxext/libxext-dev_1.3.3-1+b2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes3_5.0.3-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi6_1.7.9-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxi/libxi-dev_1.7.9-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama1_1.1.4-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon0_0.8.2-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxkbcommon/libxkbcommon-dev_0.8.2-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2_2.9.4+dfsg1-7+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxml2/libxml2-dev_2.9.4+dfsg1-7+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender1_0.9.10-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1.1_1.1.32-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxslt/libxslt1-dev_1.1.32-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss1_1.2.3-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxss/libxss-dev_1.2.3-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt6_1.1.5-1+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxt/libxt-dev_1.1.5-1+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst6_1.2.3-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/l/lcms2/liblcms2-2_2.9-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/l/linux/linux-libc-dev_4.18.8-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/l/lz4/liblz4-1_1.8.2-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/l/lzo2/liblzo2-2_2.10-0.1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa_18.1.8-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libegl1-mesa-dev_18.1.8-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm1_18.1.8-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgbm-dev_18.1.8-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-dev_18.1.8-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libgl1-mesa-glx_18.1.8-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libglapi-mesa_18.1.8-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/libwayland-egl1-mesa_18.1.8-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/m/mesa/mesa-common-dev_18.1.8-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip1_1.1-8+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/m/minizip/libminizip-dev_1.1-8+b1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/n/nettle/libhogweed4_3.4-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/n/nettle/libnettle6_3.4-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4_4.20-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/n/nspr/libnspr4-dev_4.20-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3_3.39-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/n/nss/libnss3-dev_3.39-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl1.0/libssl1.0.2_1.0.2o-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl1.1_1.1.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/o/openssl/libssl-dev_1.1.1-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/o/opus/libopus0_1.3~beta+20180518-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/o/opus/libopus-dev_1.3~beta+20180518-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit-2-0_2.14.19-4_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/o/orbit2/liborbit2_2.14.19-4_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/p11-kit/libp11-kit0_0.23.14-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g_1.1.8-3.8_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pam/libpam0g-dev_1.1.8-3.8_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango-1.0-0_1.42.4-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpango1.0-dev_1.42.4-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangocairo-1.0-0_1.42.4-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoft2-1.0-0_1.42.4-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pango1.0/libpangoxft-1.0-0_1.42.4-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5+b2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci3_3.5.2-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pciutils/libpci-dev_3.5.2-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre16-3_8.39-11_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre32-3_8.39-11_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3_8.39-11_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcre3-dev_8.39-11_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pcre3/libpcrecpp0v5_8.39-11_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libpipewire-0.2-1_0.2.3-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libpipewire-0.2-dev_0.2.3-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pipewire/libspa-lib-0.1-dev_0.2.3-3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-0_0.34.0-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pixman/libpixman-1-dev_0.34.0-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse0_12.2-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-dev_12.2-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/p/pulseaudio/libpulse-mainloop-glib0_12.2-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-4_20180901+dfsg-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/r/re2/libre2-dev_20180901+dfsg-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/s/shared-mime-info/shared-mime-info_1.10-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy1v5_1.1.7-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/s/snappy/libsnappy-dev_1.1.7-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd2_0.8.8-6_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/libspeechd-dev_0.8.8-6_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/s/speech-dispatcher/speech-dispatcher_0.8.8-6_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/s/sqlite3/libsqlite3-0_3.25.2-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libsystemd0_239-10_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev1_239-10_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/s/systemd/libudev-dev_239-10_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/t/tcp-wrappers/libwrap0_7.6.q-27_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/t/tiff/libtiff5_4.0.9-6_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/u/unbound/libunbound8_1.8.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libblkid1_2.32.1-0.1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libmount1_2.32.1-0.1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/libuuid1_2.32.1-0.1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/u/util-linux/uuid-dev_2.32.1-0.1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/v/vulkan/libvulkan1_1.1.73+dfsg-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-client0_1.16.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-cursor0_1.16.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-dev_1.16.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-egl1_1.16.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland/libwayland-server0_1.16.0-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/w/wayland-protocols/wayland-protocols_1.16-1_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xft/libxft2_2.3.2-2_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-composite-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-damage-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-fixes-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-input-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-kb-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-randr-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-record-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-render-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-scrnsaver-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xext-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xorgproto/x11proto-xinerama-dev_2018.4-4_all.deb
+http://ftp.us.debian.org/debian/pool/main/x/xz-utils/liblzma5_5.2.2-1.3_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g_1.2.11.dfsg-1_mipsel.deb
+http://ftp.us.debian.org/debian/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-1_mipsel.deb
diff --git a/deps/v8/build/linux/sysroot_scripts/install-sysroot.py b/deps/v8/build/linux/sysroot_scripts/install-sysroot.py
new file mode 100755
index 0000000000..115dce4e23
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/install-sysroot.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Install Debian sysroots for building chromium.
+"""
+
+# The sysroot is needed to ensure that binaries that get built will run on
+# the oldest stable version of Debian that we currently support.
+# This script can be run manually but is more often run as part of gclient
+# hooks. When run from hooks this script is a no-op on non-linux platforms.
+
+# The sysroot image could be constructed from scratch based on the current state
+# of the Debian archive but for consistency we use a pre-built root image (we
+# don't want upstream changes to Debian to effect the chromium build until we
+# choose to pull them in). The images will normally need to be rebuilt every
+# time chrome's build dependencies are changed but should also be updated
+# periodically to include upstream security fixes from Debian.
+
+import hashlib
+import json
+import platform
+import optparse
+import os
+import re
+import shutil
+import subprocess
+import sys
+import urllib2
+
+SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
+
+URL_PREFIX = 'https://commondatastorage.googleapis.com'
+URL_PATH = 'chrome-linux-sysroot/toolchain'
+
+VALID_ARCHS = ('arm', 'arm64', 'i386', 'amd64', 'mips', 'mips64el')
+
+ARCH_TRANSLATIONS = {
+ 'x64': 'amd64',
+ 'x86': 'i386',
+ 'mipsel': 'mips',
+ 'mips64': 'mips64el',
+}
+
+DEFAULT_TARGET_PLATFORM = 'sid'
+
+class Error(Exception):
+ pass
+
+
+def GetSha1(filename):
+ sha1 = hashlib.sha1()
+ with open(filename, 'rb') as f:
+ while True:
+ # Read in 1mb chunks, so it doesn't all have to be loaded into memory.
+ chunk = f.read(1024*1024)
+ if not chunk:
+ break
+ sha1.update(chunk)
+ return sha1.hexdigest()
+
+
+def main(args):
+ parser = optparse.OptionParser('usage: %prog [OPTIONS]', description=__doc__)
+ parser.add_option('--arch',
+ help='Sysroot architecture: %s' % ', '.join(VALID_ARCHS))
+ parser.add_option('--all', action='store_true',
+ help='Install all sysroot images (useful when updating the'
+ ' images)')
+ parser.add_option('--print-hash',
+ help='Print the hash of the sysroot for the given arch.')
+ options, _ = parser.parse_args(args)
+ if not sys.platform.startswith('linux'):
+ return 0
+
+ if options.print_hash:
+ arch = options.print_hash
+ print GetSysrootDict(DEFAULT_TARGET_PLATFORM,
+ ARCH_TRANSLATIONS.get(arch, arch))['Sha1Sum']
+ return 0
+ if options.arch:
+ InstallSysroot(DEFAULT_TARGET_PLATFORM,
+ ARCH_TRANSLATIONS.get(options.arch, options.arch))
+ elif options.all:
+ for arch in VALID_ARCHS:
+ InstallSysroot(DEFAULT_TARGET_PLATFORM, arch)
+ else:
+ print 'You much specify one of the options.'
+ return 1
+
+ return 0
+
+
+def GetSysrootDict(target_platform, target_arch):
+ if target_arch not in VALID_ARCHS:
+ raise Error('Unknown architecture: %s' % target_arch)
+
+ sysroots_file = os.path.join(SCRIPT_DIR, 'sysroots.json')
+ sysroots = json.load(open(sysroots_file))
+ sysroot_key = '%s_%s' % (target_platform, target_arch)
+ if sysroot_key not in sysroots:
+ raise Error('No sysroot for: %s %s' % (target_platform, target_arch))
+ return sysroots[sysroot_key]
+
+
+def InstallSysroot(target_platform, target_arch):
+ sysroot_dict = GetSysrootDict(target_platform, target_arch)
+ tarball_filename = sysroot_dict['Tarball']
+ tarball_sha1sum = sysroot_dict['Sha1Sum']
+ # TODO(thestig) Consider putting this elsewhere to avoid having to recreate
+ # it on every build.
+ linux_dir = os.path.dirname(SCRIPT_DIR)
+ sysroot = os.path.join(linux_dir, sysroot_dict['SysrootDir'])
+
+ url = '%s/%s/%s/%s' % (URL_PREFIX, URL_PATH, tarball_sha1sum,
+ tarball_filename)
+
+ stamp = os.path.join(sysroot, '.stamp')
+ if os.path.exists(stamp):
+ with open(stamp) as s:
+ if s.read() == url:
+ return
+
+ print 'Installing Debian %s %s root image: %s' % \
+ (target_platform, target_arch, sysroot)
+ if os.path.isdir(sysroot):
+ shutil.rmtree(sysroot)
+ os.mkdir(sysroot)
+ tarball = os.path.join(sysroot, tarball_filename)
+ print 'Downloading %s' % url
+ sys.stdout.flush()
+ sys.stderr.flush()
+ for _ in range(3):
+ try:
+ response = urllib2.urlopen(url)
+ with open(tarball, "wb") as f:
+ f.write(response.read())
+ break
+ except:
+ pass
+ else:
+ raise Error('Failed to download %s' % url)
+ sha1sum = GetSha1(tarball)
+ if sha1sum != tarball_sha1sum:
+ raise Error('Tarball sha1sum is wrong.'
+ 'Expected %s, actual: %s' % (tarball_sha1sum, sha1sum))
+ subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot])
+ os.remove(tarball)
+
+ with open(stamp, 'w') as s:
+ s.write(url)
+
+
+if __name__ == '__main__':
+ try:
+ sys.exit(main(sys.argv[1:]))
+ except Error as e:
+ sys.stderr.write(str(e) + '\n')
+ sys.exit(1)
diff --git a/deps/v8/build/linux/sysroot_scripts/libdbus-1-3-symbols b/deps/v8/build/linux/sysroot_scripts/libdbus-1-3-symbols
new file mode 100644
index 0000000000..28050aa570
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/libdbus-1-3-symbols
@@ -0,0 +1,235 @@
+libdbus-1.so.3 libdbus-1-3 #MINVER#
+ dbus_address_entries_free@Base 1.0.2
+ dbus_address_entry_get_method@Base 1.0.2
+ dbus_address_entry_get_value@Base 1.0.2
+ dbus_address_escape_value@Base 1.0.2
+ dbus_address_unescape_value@Base 1.0.2
+ dbus_bus_add_match@Base 1.0.2
+ dbus_bus_get@Base 1.0.2
+ dbus_bus_get_id@Base 1.1.1
+ dbus_bus_get_private@Base 1.0.2
+ dbus_bus_get_unique_name@Base 1.0.2
+ dbus_bus_get_unix_user@Base 1.0.2
+ dbus_bus_name_has_owner@Base 1.0.2
+ dbus_bus_register@Base 1.0.2
+ dbus_bus_release_name@Base 1.0.2
+ dbus_bus_remove_match@Base 1.0.2
+ dbus_bus_request_name@Base 1.0.2
+ dbus_bus_set_unique_name@Base 1.0.2
+ dbus_bus_start_service_by_name@Base 1.0.2
+ dbus_connection_add_filter@Base 1.0.2
+ dbus_connection_allocate_data_slot@Base 1.0.2
+ dbus_connection_borrow_message@Base 1.0.2
+ dbus_connection_can_send_type@Base 1.3.1
+ dbus_connection_close@Base 1.0.2
+ dbus_connection_dispatch@Base 1.0.2
+ dbus_connection_flush@Base 1.0.2
+ dbus_connection_free_data_slot@Base 1.0.2
+ dbus_connection_free_preallocated_send@Base 1.0.2
+ dbus_connection_get_adt_audit_session_data@Base 1.2.4
+ dbus_connection_get_data@Base 1.0.2
+ dbus_connection_get_dispatch_status@Base 1.0.2
+ dbus_connection_get_is_anonymous@Base 1.1.1
+ dbus_connection_get_is_authenticated@Base 1.0.2
+ dbus_connection_get_is_connected@Base 1.0.2
+ dbus_connection_get_max_message_size@Base 1.0.2
+ dbus_connection_get_max_message_unix_fds@Base 1.3.1
+ dbus_connection_get_max_received_size@Base 1.0.2
+ dbus_connection_get_max_received_unix_fds@Base 1.3.1
+ dbus_connection_get_object_path_data@Base 1.0.2
+ dbus_connection_get_outgoing_size@Base 1.0.2
+ dbus_connection_get_outgoing_unix_fds@Base 1.3.1
+ dbus_connection_get_server_id@Base 1.1.1
+ dbus_connection_get_socket@Base 1.0.2
+ dbus_connection_get_unix_fd@Base 1.0.2
+ dbus_connection_get_unix_process_id@Base 1.0.2
+ dbus_connection_get_unix_user@Base 1.0.2
+ dbus_connection_get_windows_user@Base 1.1.1
+ dbus_connection_has_messages_to_send@Base 1.0.2
+ dbus_connection_list_registered@Base 1.0.2
+ dbus_connection_open@Base 1.0.2
+ dbus_connection_open_private@Base 1.0.2
+ dbus_connection_pop_message@Base 1.0.2
+ dbus_connection_preallocate_send@Base 1.0.2
+ dbus_connection_read_write@Base 1.0.2
+ dbus_connection_read_write_dispatch@Base 1.0.2
+ dbus_connection_ref@Base 1.0.2
+ dbus_connection_register_fallback@Base 1.0.2
+ dbus_connection_register_object_path@Base 1.0.2
+ dbus_connection_remove_filter@Base 1.0.2
+ dbus_connection_return_message@Base 1.0.2
+ dbus_connection_send@Base 1.0.2
+ dbus_connection_send_preallocated@Base 1.0.2
+ dbus_connection_send_with_reply@Base 1.0.2
+ dbus_connection_send_with_reply_and_block@Base 1.0.2
+ dbus_connection_set_allow_anonymous@Base 1.1.1
+ dbus_connection_set_change_sigpipe@Base 1.0.2
+ dbus_connection_set_data@Base 1.0.2
+ dbus_connection_set_dispatch_status_function@Base 1.0.2
+ dbus_connection_set_exit_on_disconnect@Base 1.0.2
+ dbus_connection_set_max_message_size@Base 1.0.2
+ dbus_connection_set_max_message_unix_fds@Base 1.3.1
+ dbus_connection_set_max_received_size@Base 1.0.2
+ dbus_connection_set_max_received_unix_fds@Base 1.3.1
+ dbus_connection_set_route_peer_messages@Base 1.0.2
+ dbus_connection_set_timeout_functions@Base 1.0.2
+ dbus_connection_set_unix_user_function@Base 1.0.2
+ dbus_connection_set_wakeup_main_function@Base 1.0.2
+ dbus_connection_set_watch_functions@Base 1.0.2
+ dbus_connection_set_windows_user_function@Base 1.1.1
+ dbus_connection_steal_borrowed_message@Base 1.0.2
+ dbus_connection_try_register_fallback@Base 1.1.4
+ dbus_connection_try_register_object_path@Base 1.1.4
+ dbus_connection_unref@Base 1.0.2
+ dbus_connection_unregister_object_path@Base 1.0.2
+ dbus_error_free@Base 1.0.2
+ dbus_error_has_name@Base 1.0.2
+ dbus_error_init@Base 1.0.2
+ dbus_error_is_set@Base 1.0.2
+ dbus_free@Base 1.0.2
+ dbus_free_string_array@Base 1.0.2
+ dbus_get_local_machine_id@Base 1.0.2
+ dbus_get_version@Base 1.1.4
+ dbus_internal_do_not_use_create_uuid@Base 1.0.2
+ dbus_internal_do_not_use_get_uuid@Base 1.0.2
+ dbus_malloc0@Base 1.0.2
+ dbus_malloc@Base 1.0.2
+ dbus_message_allocate_data_slot@Base 1.0.2
+ dbus_message_append_args@Base 1.0.2
+ dbus_message_append_args_valist@Base 1.0.2
+ dbus_message_contains_unix_fds@Base 1.3.1
+ dbus_message_copy@Base 1.0.2
+ dbus_message_demarshal@Base 1.1.1
+ dbus_message_demarshal_bytes_needed@Base 1.2.14
+ dbus_message_free_data_slot@Base 1.0.2
+ dbus_message_get_args@Base 1.0.2
+ dbus_message_get_args_valist@Base 1.0.2
+ dbus_message_get_auto_start@Base 1.0.2
+ dbus_message_get_data@Base 1.0.2
+ dbus_message_get_destination@Base 1.0.2
+ dbus_message_get_error_name@Base 1.0.2
+ dbus_message_get_interface@Base 1.0.2
+ dbus_message_get_member@Base 1.0.2
+ dbus_message_get_no_reply@Base 1.0.2
+ dbus_message_get_path@Base 1.0.2
+ dbus_message_get_path_decomposed@Base 1.0.2
+ dbus_message_get_reply_serial@Base 1.0.2
+ dbus_message_get_sender@Base 1.0.2
+ dbus_message_get_serial@Base 1.0.2
+ dbus_message_get_signature@Base 1.0.2
+ dbus_message_get_type@Base 1.0.2
+ dbus_message_has_destination@Base 1.0.2
+ dbus_message_has_interface@Base 1.0.2
+ dbus_message_has_member@Base 1.0.2
+ dbus_message_has_path@Base 1.0.2
+ dbus_message_has_sender@Base 1.0.2
+ dbus_message_has_signature@Base 1.0.2
+ dbus_message_is_error@Base 1.0.2
+ dbus_message_is_method_call@Base 1.0.2
+ dbus_message_is_signal@Base 1.0.2
+ dbus_message_iter_abandon_container@Base 1.2.16
+ dbus_message_iter_append_basic@Base 1.0.2
+ dbus_message_iter_append_fixed_array@Base 1.0.2
+ dbus_message_iter_close_container@Base 1.0.2
+ dbus_message_iter_get_arg_type@Base 1.0.2
+ dbus_message_iter_get_array_len@Base 1.0.2
+ dbus_message_iter_get_basic@Base 1.0.2
+ dbus_message_iter_get_element_type@Base 1.0.2
+ dbus_message_iter_get_fixed_array@Base 1.0.2
+ dbus_message_iter_get_signature@Base 1.0.2
+ dbus_message_iter_has_next@Base 1.0.2
+ dbus_message_iter_init@Base 1.0.2
+ dbus_message_iter_init_append@Base 1.0.2
+ dbus_message_iter_next@Base 1.0.2
+ dbus_message_iter_open_container@Base 1.0.2
+ dbus_message_iter_recurse@Base 1.0.2
+ dbus_message_lock@Base 1.2.14
+ dbus_message_marshal@Base 1.1.1
+ dbus_message_new@Base 1.0.2
+ dbus_message_new_error@Base 1.0.2
+ dbus_message_new_error_printf@Base 1.0.2
+ dbus_message_new_method_call@Base 1.0.2
+ dbus_message_new_method_return@Base 1.0.2
+ dbus_message_new_signal@Base 1.0.2
+ dbus_message_ref@Base 1.0.2
+ dbus_message_set_auto_start@Base 1.0.2
+ dbus_message_set_data@Base 1.0.2
+ dbus_message_set_destination@Base 1.0.2
+ dbus_message_set_error_name@Base 1.0.2
+ dbus_message_set_interface@Base 1.0.2
+ dbus_message_set_member@Base 1.0.2
+ dbus_message_set_no_reply@Base 1.0.2
+ dbus_message_set_path@Base 1.0.2
+ dbus_message_set_reply_serial@Base 1.0.2
+ dbus_message_set_sender@Base 1.0.2
+ dbus_message_set_serial@Base 1.2.14
+ dbus_message_type_from_string@Base 1.0.2
+ dbus_message_type_to_string@Base 1.0.2
+ dbus_message_unref@Base 1.0.2
+ dbus_move_error@Base 1.0.2
+ dbus_parse_address@Base 1.0.2
+ dbus_pending_call_allocate_data_slot@Base 1.0.2
+ dbus_pending_call_block@Base 1.0.2
+ dbus_pending_call_cancel@Base 1.0.2
+ dbus_pending_call_free_data_slot@Base 1.0.2
+ dbus_pending_call_get_completed@Base 1.0.2
+ dbus_pending_call_get_data@Base 1.0.2
+ dbus_pending_call_ref@Base 1.0.2
+ dbus_pending_call_set_data@Base 1.0.2
+ dbus_pending_call_set_notify@Base 1.0.2
+ dbus_pending_call_steal_reply@Base 1.0.2
+ dbus_pending_call_unref@Base 1.0.2
+ dbus_realloc@Base 1.0.2
+ dbus_server_allocate_data_slot@Base 1.0.2
+ dbus_server_disconnect@Base 1.0.2
+ dbus_server_free_data_slot@Base 1.0.2
+ dbus_server_get_address@Base 1.0.2
+ dbus_server_get_data@Base 1.0.2
+ dbus_server_get_id@Base 1.1.1
+ dbus_server_get_is_connected@Base 1.0.2
+ dbus_server_listen@Base 1.0.2
+ dbus_server_ref@Base 1.0.2
+ dbus_server_set_auth_mechanisms@Base 1.0.2
+ dbus_server_set_data@Base 1.0.2
+ dbus_server_set_new_connection_function@Base 1.0.2
+ dbus_server_set_timeout_functions@Base 1.0.2
+ dbus_server_set_watch_functions@Base 1.0.2
+ dbus_server_unref@Base 1.0.2
+ dbus_set_error@Base 1.0.2
+ dbus_set_error_const@Base 1.0.2
+ dbus_set_error_from_message@Base 1.0.2
+ dbus_setenv@Base 1.7.6
+ dbus_shutdown@Base 1.0.2
+ dbus_signature_iter_get_current_type@Base 1.0.2
+ dbus_signature_iter_get_element_type@Base 1.0.2
+ dbus_signature_iter_get_signature@Base 1.0.2
+ dbus_signature_iter_init@Base 1.0.2
+ dbus_signature_iter_next@Base 1.0.2
+ dbus_signature_iter_recurse@Base 1.0.2
+ dbus_signature_validate@Base 1.0.2
+ dbus_signature_validate_single@Base 1.0.2
+ dbus_threads_init@Base 1.0.2
+ dbus_threads_init_default@Base 1.0.2
+ dbus_timeout_get_data@Base 1.0.2
+ dbus_timeout_get_enabled@Base 1.0.2
+ dbus_timeout_get_interval@Base 1.0.2
+ dbus_timeout_handle@Base 1.0.2
+ dbus_timeout_set_data@Base 1.0.2
+ dbus_type_is_basic@Base 1.0.2
+ dbus_type_is_container@Base 1.0.2
+ dbus_type_is_fixed@Base 1.0.2
+ dbus_type_is_valid@Base 1.5.0
+ dbus_validate_bus_name@Base 1.5.12
+ dbus_validate_error_name@Base 1.5.12
+ dbus_validate_interface@Base 1.5.12
+ dbus_validate_member@Base 1.5.12
+ dbus_validate_path@Base 1.5.12
+ dbus_validate_utf8@Base 1.5.12
+ dbus_watch_get_data@Base 1.0.2
+ dbus_watch_get_enabled@Base 1.0.2
+ dbus_watch_get_fd@Base 1.0.2
+ dbus_watch_get_flags@Base 1.0.2
+ dbus_watch_get_socket@Base 1.1.1
+ dbus_watch_get_unix_fd@Base 1.1.1
+ dbus_watch_handle@Base 1.0.2
+ dbus_watch_set_data@Base 1.0.2
diff --git a/deps/v8/build/linux/sysroot_scripts/merge-package-lists.py b/deps/v8/build/linux/sysroot_scripts/merge-package-lists.py
new file mode 100755
index 0000000000..58bd163024
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/merge-package-lists.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Merge package entries from different package lists.
+"""
+
+# This is used for replacing packages in eg. sid with those in experimental.
+# The updated packages are ABI compatible, but include security patches, so we
+# should use those instead in our sysroots.
+
+import sys
+
+if len(sys.argv) != 2:
+ exit(1)
+
+packages = {}
+
+def AddPackagesFromFile(file):
+ global packages
+ lines = file.readlines()
+ if len(lines) % 3 != 0:
+ exit(1)
+ for i in xrange(0, len(lines), 3):
+ packages[lines[i]] = (lines[i + 1], lines[i + 2])
+
+AddPackagesFromFile(open(sys.argv[1], 'r'))
+AddPackagesFromFile(sys.stdin)
+
+output_file = open(sys.argv[1], 'w')
+
+for (package, (filename, sha256)) in packages.iteritems():
+ output_file.write(package + filename + sha256)
diff --git a/deps/v8/build/linux/sysroot_scripts/sysroot-creator-sid.sh b/deps/v8/build/linux/sysroot_scripts/sysroot-creator-sid.sh
new file mode 100755
index 0000000000..6e21e977fd
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/sysroot-creator-sid.sh
@@ -0,0 +1,397 @@
+#!/bin/bash
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+DISTRO=debian
+DIST=sid
+
+# Keep the "experimental" repo before the "sid" repo. There are some packages
+# that are currently only available in experimental like libgtk-4-0, but if it
+# were to be placed first, experimental (unreleased) versions of other packages
+# like libc6 would take precedence over the sid (released) versions. While this
+# may be useful for certain kinds of development, the standard sysroots should
+# continue to be shipped only with released packages.
+ARCHIVE_URL="https://snapshot.debian.org/archive/debian"
+ARCHIVE_TIMESTAMP=20181214T150526Z
+APT_SOURCES_LIST="${ARCHIVE_URL}/${ARCHIVE_TIMESTAMP}/ experimental main
+${ARCHIVE_URL}/${ARCHIVE_TIMESTAMP}/ sid main"
+
+# gpg keyring file generated using:
+# export KEYS="518E17E1 46925553 2B90D010 C857C906 F66AEC98 8AE22BA9 1A7B6500"
+# gpg --recv-keys $KEYS
+# gpg --output ./debian-archive-sid-stable.gpg --export $KEYS
+KEYRING_FILE="${SCRIPT_DIR}/debian-archive-sid-stable.gpg"
+
+HAS_ARCH_AMD64=1
+HAS_ARCH_I386=1
+HAS_ARCH_ARM=1
+HAS_ARCH_ARM64=1
+HAS_ARCH_MIPS=1
+HAS_ARCH_MIPS64EL=1
+
+# Sysroot packages: these are the packages needed to build chrome.
+# NOTE: When DEBIAN_PACKAGES is modified, the packagelist files must be updated
+# by running this script in GeneratePackageList mode.
+DEBIAN_PACKAGES="\
+ comerr-dev
+ krb5-multidev
+ libappindicator-dev
+ libappindicator1
+ libappindicator3-1
+ libappindicator3-dev
+ libasound2
+ libasound2-dev
+ libasyncns0
+ libatk-bridge2.0-0
+ libatk-bridge2.0-dev
+ libatk1.0-0
+ libatk1.0-dev
+ libatomic1
+ libatspi2.0-0
+ libatspi2.0-dev
+ libattr1
+ libaudit1
+ libavahi-client3
+ libavahi-common3
+ libblkid1
+ libbluetooth-dev
+ libbluetooth3
+ libbrlapi-dev
+ libbrlapi0.6
+ libbsd0
+ libc6
+ libc6-dev
+ libcairo-gobject2
+ libcairo-script-interpreter2
+ libcairo2
+ libcairo2-dev
+ libcap-dev
+ libcap-ng0
+ libcap2
+ libcolord2
+ libcom-err2
+ libcups2
+ libcups2-dev
+ libcupsimage2
+ libcupsimage2-dev
+ libdatrie1
+ libdb5.3
+ libdbus-1-3
+ libdbus-1-dev
+ libdbus-glib-1-2
+ libdbusmenu-glib-dev
+ libdbusmenu-glib4
+ libdbusmenu-gtk3-4
+ libdbusmenu-gtk4
+ libdrm-amdgpu1
+ libdrm-dev
+ libdrm-nouveau2
+ libdrm-radeon1
+ libdrm2
+ libegl1
+ libegl1-mesa
+ libegl1-mesa-dev
+ libelf-dev
+ libelf1
+ libepoxy-dev
+ libepoxy0
+ libevent-2.1-6
+ libexpat1
+ libexpat1-dev
+ libffi-dev
+ libffi6
+ libffi7
+ libflac-dev
+ libflac8
+ libfontconfig1
+ libfontconfig1-dev
+ libfreetype6
+ libfreetype6-dev
+ libfribidi-dev
+ libfribidi0
+ libgbm-dev
+ libgbm1
+ libgcc-6-dev
+ libgcc1
+ libgcrypt20
+ libgcrypt20-dev
+ libgdk-pixbuf2.0-0
+ libgdk-pixbuf2.0-dev
+ libgl1
+ libgl1-mesa-dev
+ libgl1-mesa-glx
+ libglapi-mesa
+ libgles1
+ libgles2
+ libglib2.0-0
+ libglib2.0-dev
+ libglvnd-dev
+ libglvnd0
+ libglx0
+ libgmp10
+ libgnome-keyring-dev
+ libgnome-keyring0
+ libgnutls-dane0
+ libgnutls-openssl27
+ libgnutls28-dev
+ libgnutls30
+ libgnutlsxx28
+ libgomp1
+ libgpg-error-dev
+ libgpg-error0
+ libgraphene-1.0-0
+ libgraphene-1.0-dev
+ libgraphite2-3
+ libgraphite2-dev
+ libgssapi-krb5-2
+ libgssrpc4
+ libgtk-3-0
+ libgtk-3-dev
+ libgtk-4-0
+ libgtk-4-dev
+ libgtk2.0-0
+ libharfbuzz-dev
+ libharfbuzz-gobject0
+ libharfbuzz-icu0
+ libharfbuzz0b
+ libhogweed4
+ libice6
+ libicu-le-hb0
+ libicu57
+ libicu60
+ libidl-2-0
+ libidn11
+ libidn2-0
+ libindicator3-7
+ libindicator7
+ libjbig0
+ libjpeg62-turbo
+ libjpeg62-turbo-dev
+ libjson-glib-1.0-0
+ libjsoncpp-dev
+ libjsoncpp1
+ libk5crypto3
+ libkadm5clnt-mit11
+ libkadm5srv-mit11
+ libkdb5-9
+ libkeyutils1
+ libkrb5-3
+ libkrb5-dev
+ libkrb5support0
+ liblcms2-2
+ libltdl7
+ liblz4-1
+ liblzma5
+ liblzo2-2
+ libminizip-dev
+ libminizip1
+ libmount1
+ libnettle6
+ libnspr4
+ libnspr4-dev
+ libnss-db
+ libnss3
+ libnss3-dev
+ libogg-dev
+ libogg0
+ libopengl0
+ libopus-dev
+ libopus0
+ liborbit-2-0
+ liborbit2
+ libp11-kit0
+ libpam0g
+ libpam0g-dev
+ libpango-1.0-0
+ libpango1.0-dev
+ libpangocairo-1.0-0
+ libpangoft2-1.0-0
+ libpangox-1.0-0
+ libpangoxft-1.0-0
+ libpci-dev
+ libpci3
+ libpciaccess0
+ libpcre16-3
+ libpcre3
+ libpcre3-dev
+ libpcre32-3
+ libpcrecpp0v5
+ libpipewire-0.2-1
+ libpipewire-0.2-dev
+ libpixman-1-0
+ libpixman-1-dev
+ libpng-dev
+ libpng16-16
+ libpsl5
+ libpthread-stubs0-dev
+ libpulse-dev
+ libpulse-mainloop-glib0
+ libpulse0
+ libre2-4
+ libre2-dev
+ librest-0.7-0
+ libselinux1
+ libsm6
+ libsnappy-dev
+ libsnappy1v5
+ libsndfile1
+ libsoup-gnome2.4-1
+ libsoup2.4-1
+ libspa-lib-0.1-dev
+ libspeechd-dev
+ libspeechd2
+ libsqlite3-0
+ libssl-dev
+ libssl1.0.2
+ libssl1.1
+ libstdc++-6-dev
+ libstdc++6
+ libsystemd0
+ libtasn1-6
+ libthai0
+ libtiff5
+ libudev-dev
+ libudev1
+ libunbound8
+ libunistring2
+ libuuid1
+ libva-dev
+ libva-drm2
+ libva-glx2
+ libva-wayland2
+ libva-x11-2
+ libva2
+ libvorbis0a
+ libvorbisenc2
+ libvpx-dev
+ libvpx5
+ libvulkan1
+ libwayland-client0
+ libwayland-cursor0
+ libwayland-dev
+ libwayland-egl1
+ libwayland-egl1-mesa
+ libwayland-server0
+ libwebp-dev
+ libwebp6
+ libwebpdemux2
+ libwebpmux3
+ libwrap0
+ libx11-6
+ libx11-dev
+ libx11-xcb-dev
+ libx11-xcb1
+ libxau-dev
+ libxau6
+ libxcb-dri2-0
+ libxcb-dri2-0-dev
+ libxcb-dri3-0
+ libxcb-glx0
+ libxcb-glx0-dev
+ libxcb-present0
+ libxcb-render0
+ libxcb-render0-dev
+ libxcb-shm0
+ libxcb-shm0-dev
+ libxcb-sync1
+ libxcb-xfixes0
+ libxcb1
+ libxcb1-dev
+ libxcomposite-dev
+ libxcomposite1
+ libxcursor-dev
+ libxcursor1
+ libxdamage-dev
+ libxdamage1
+ libxdmcp-dev
+ libxdmcp6
+ libxext-dev
+ libxext6
+ libxfixes-dev
+ libxfixes3
+ libxft2
+ libxi-dev
+ libxi6
+ libxinerama-dev
+ libxinerama1
+ libxkbcommon-dev
+ libxkbcommon0
+ libxml2
+ libxml2-dev
+ libxrandr-dev
+ libxrandr2
+ libxrender-dev
+ libxrender1
+ libxshmfence1
+ libxslt1-dev
+ libxslt1.1
+ libxss-dev
+ libxss1
+ libxt-dev
+ libxt6
+ libxtst-dev
+ libxtst6
+ libxxf86vm-dev
+ libxxf86vm1
+ linux-libc-dev
+ mesa-common-dev
+ shared-mime-info
+ speech-dispatcher
+ uuid-dev
+ wayland-protocols
+ x11proto-composite-dev
+ x11proto-damage-dev
+ x11proto-dev
+ x11proto-fixes-dev
+ x11proto-input-dev
+ x11proto-kb-dev
+ x11proto-randr-dev
+ x11proto-record-dev
+ x11proto-render-dev
+ x11proto-scrnsaver-dev
+ x11proto-xext-dev
+ x11proto-xinerama-dev
+ zlib1g
+ zlib1g-dev
+"
+
+DEBIAN_PACKAGES_AMD64="
+ liblsan0
+ libtsan0
+"
+
+DEBIAN_PACKAGES_X86="
+ libasan3
+ libcilkrts5
+ libdrm-intel1
+ libitm1
+ libmpx2
+ libquadmath0
+ libubsan0
+"
+
+DEBIAN_PACKAGES_ARM="
+ libasan3
+ libdrm-etnaviv1
+ libdrm-exynos1
+ libdrm-freedreno1
+ libdrm-omap1
+ libdrm-tegra0
+ libubsan0
+"
+
+DEBIAN_PACKAGES_ARM64="
+ libasan3
+ libdrm-etnaviv1
+ libdrm-freedreno1
+ libdrm-tegra0
+ libgmp10
+ libitm1
+ libthai0
+ libubsan0
+"
+
+. "${SCRIPT_DIR}/sysroot-creator.sh"
diff --git a/deps/v8/build/linux/sysroot_scripts/sysroot-creator.sh b/deps/v8/build/linux/sysroot_scripts/sysroot-creator.sh
new file mode 100644
index 0000000000..36f440b513
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/sysroot-creator.sh
@@ -0,0 +1,846 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This script should not be run directly but sourced by the other
+# scripts (e.g. sysroot-creator-sid.sh). Its up to the parent scripts
+# to define certain environment variables: e.g.
+# DISTRO=debian
+# DIST=sid
+# # Similar in syntax to /etc/apt/sources.list
+# APT_SOURCES_LIST="http://ftp.us.debian.org/debian/ sid main"
+# KEYRING_FILE=debian-archive-sid-stable.gpg
+# DEBIAN_PACKAGES="gcc libz libssl"
+
+#@ This script builds Debian/Ubuntu sysroot images for building Google Chrome.
+#@
+#@ Generally this script is invoked as:
+#@ sysroot-creator-<flavour>.sh <mode> <args>*
+#@ Available modes are shown below.
+#@
+#@ List of modes:
+
+######################################################################
+# Config
+######################################################################
+
+set -o nounset
+set -o errexit
+
+SCRIPT_DIR=$(cd $(dirname $0) && pwd)
+
+if [ -z "${DIST:-}" ]; then
+ echo "error: DIST not defined"
+ exit 1
+fi
+
+if [ -z "${KEYRING_FILE:-}" ]; then
+ echo "error: KEYRING_FILE not defined"
+ exit 1
+fi
+
+if [ -z "${DEBIAN_PACKAGES:-}" ]; then
+ echo "error: DEBIAN_PACKAGES not defined"
+ exit 1
+fi
+
+readonly HAS_ARCH_AMD64=${HAS_ARCH_AMD64:=0}
+readonly HAS_ARCH_I386=${HAS_ARCH_I386:=0}
+readonly HAS_ARCH_ARM=${HAS_ARCH_ARM:=0}
+readonly HAS_ARCH_ARM64=${HAS_ARCH_ARM64:=0}
+readonly HAS_ARCH_MIPS=${HAS_ARCH_MIPS:=0}
+readonly HAS_ARCH_MIPS64EL=${HAS_ARCH_MIPS64EL:=0}
+
+readonly REQUIRED_TOOLS="curl xzcat"
+
+######################################################################
+# Package Config
+######################################################################
+
+readonly PACKAGES_EXT=xz
+readonly RELEASE_FILE="Release"
+readonly RELEASE_FILE_GPG="Release.gpg"
+
+readonly DEBIAN_DEP_LIST_AMD64="generated_package_lists/${DIST}.amd64"
+readonly DEBIAN_DEP_LIST_I386="generated_package_lists/${DIST}.i386"
+readonly DEBIAN_DEP_LIST_ARM="generated_package_lists/${DIST}.arm"
+readonly DEBIAN_DEP_LIST_ARM64="generated_package_lists/${DIST}.arm64"
+readonly DEBIAN_DEP_LIST_MIPS="generated_package_lists/${DIST}.mipsel"
+readonly DEBIAN_DEP_LIST_MIPS64EL="generated_package_lists/${DIST}.mips64el"
+
+
+######################################################################
+# Helper
+######################################################################
+
+Banner() {
+ echo "######################################################################"
+ echo $*
+ echo "######################################################################"
+}
+
+
+SubBanner() {
+ echo "----------------------------------------------------------------------"
+ echo $*
+ echo "----------------------------------------------------------------------"
+}
+
+
+Usage() {
+ egrep "^#@" "${BASH_SOURCE[0]}" | cut --bytes=3-
+}
+
+
+DownloadOrCopy() {
+ if [ -f "$2" ] ; then
+ echo "$2 already in place"
+ return
+ fi
+
+ HTTP=0
+ echo "$1" | grep -Eqs '^https?://' && HTTP=1
+ if [ "$HTTP" = "1" ]; then
+ SubBanner "downloading from $1 -> $2"
+ # Appending the "$$" shell pid is necessary here to prevent concurrent
+ # instances of sysroot-creator.sh from trying to write to the same file.
+ # --create-dirs is added in case there are slashes in the filename, as can
+ # happen with the "debian/security" release class.
+ curl -L "$1" --create-dirs -o "${2}.partial.$$"
+ mv "${2}.partial.$$" $2
+ else
+ SubBanner "copying from $1"
+ cp "$1" "$2"
+ fi
+}
+
+
+SetEnvironmentVariables() {
+ ARCH=""
+ echo $1 | grep -qs Amd64$ && ARCH=AMD64
+ if [ -z "$ARCH" ]; then
+ echo $1 | grep -qs I386$ && ARCH=I386
+ fi
+ if [ -z "$ARCH" ]; then
+ echo $1 | grep -qs Mips64el$ && ARCH=MIPS64EL
+ fi
+ if [ -z "$ARCH" ]; then
+ echo $1 | grep -qs Mips$ && ARCH=MIPS
+ fi
+ if [ -z "$ARCH" ]; then
+ echo $1 | grep -qs ARM$ && ARCH=ARM
+ fi
+ if [ -z "$ARCH" ]; then
+ echo $1 | grep -qs ARM64$ && ARCH=ARM64
+ fi
+ if [ -z "${ARCH}" ]; then
+ echo "ERROR: Unable to determine architecture based on: $1"
+ exit 1
+ fi
+ ARCH_LOWER=$(echo $ARCH | tr '[:upper:]' '[:lower:]')
+}
+
+
+# some sanity checks to make sure this script is run from the right place
+# with the right tools
+SanityCheck() {
+ Banner "Sanity Checks"
+
+ local chrome_dir=$(cd "${SCRIPT_DIR}/../../.." && pwd)
+ BUILD_DIR="${chrome_dir}/out/sysroot-build/${DIST}"
+ mkdir -p ${BUILD_DIR}
+ echo "Using build directory: ${BUILD_DIR}"
+
+ for tool in ${REQUIRED_TOOLS} ; do
+ if ! which ${tool} > /dev/null ; then
+ echo "Required binary $tool not found."
+ echo "Exiting."
+ exit 1
+ fi
+ done
+
+ # This is where the staging sysroot is.
+ INSTALL_ROOT="${BUILD_DIR}/${DIST}_${ARCH_LOWER}_staging"
+ TARBALL="${BUILD_DIR}/${DISTRO}_${DIST}_${ARCH_LOWER}_sysroot.tar.xz"
+
+ if ! mkdir -p "${INSTALL_ROOT}" ; then
+ echo "ERROR: ${INSTALL_ROOT} can't be created."
+ exit 1
+ fi
+}
+
+
+ChangeDirectory() {
+ # Change directory to where this script is.
+ cd ${SCRIPT_DIR}
+}
+
+
+ClearInstallDir() {
+ Banner "Clearing dirs in ${INSTALL_ROOT}"
+ rm -rf ${INSTALL_ROOT}/*
+}
+
+
+CreateTarBall() {
+ Banner "Creating tarball ${TARBALL}"
+ tar -I "xz -9 -T0" -cf ${TARBALL} -C ${INSTALL_ROOT} .
+}
+
+ExtractPackageXz() {
+ local src_file="$1"
+ local dst_file="$2"
+ local repo="$3"
+ xzcat "${src_file}" | egrep '^(Package:|Filename:|SHA256:) ' |
+ sed "s|Filename: |Filename: ${repo}|" > "${dst_file}"
+}
+
+GeneratePackageListDist() {
+ local arch="$1"
+ set -- $2
+ local repo="$1"
+ local dist="$2"
+ local repo_name="$3"
+
+ TMP_PACKAGE_LIST="${BUILD_DIR}/Packages.${dist}_${repo_name}_${arch}"
+ local repo_basedir="${repo}/dists/${dist}"
+ local package_list="${BUILD_DIR}/Packages.${dist}_${repo_name}_${arch}.${PACKAGES_EXT}"
+ local package_file_arch="${repo_name}/binary-${arch}/Packages.${PACKAGES_EXT}"
+ local package_list_arch="${repo_basedir}/${package_file_arch}"
+
+ DownloadOrCopy "${package_list_arch}" "${package_list}"
+ VerifyPackageListing "${package_file_arch}" "${package_list}" ${repo} ${dist}
+ ExtractPackageXz "${package_list}" "${TMP_PACKAGE_LIST}" ${repo}
+}
+
+GeneratePackageListCommon() {
+ local output_file="$1"
+ local arch="$2"
+ local packages="$3"
+
+ local dists="${DIST} ${DIST_UPDATES:-}"
+ local repos="main ${REPO_EXTRA:-}"
+
+ local list_base="${BUILD_DIR}/Packages.${DIST}_${arch}"
+ > "${list_base}" # Create (or truncate) a zero-length file.
+ echo "${APT_SOURCES_LIST}" | while read source; do
+ GeneratePackageListDist "${arch}" "${source}"
+ cat "${TMP_PACKAGE_LIST}" | ./merge-package-lists.py "${list_base}"
+ done
+
+ GeneratePackageList "${list_base}" "${output_file}" "${packages}"
+}
+
+GeneratePackageListAmd64() {
+ GeneratePackageListCommon "$1" amd64 "${DEBIAN_PACKAGES}
+ ${DEBIAN_PACKAGES_X86:=} ${DEBIAN_PACKAGES_AMD64:=}"
+}
+
+GeneratePackageListI386() {
+ GeneratePackageListCommon "$1" i386 "${DEBIAN_PACKAGES}
+ ${DEBIAN_PACKAGES_X86:=}"
+}
+
+GeneratePackageListARM() {
+ GeneratePackageListCommon "$1" armhf "${DEBIAN_PACKAGES}
+ ${DEBIAN_PACKAGES_ARM:=}"
+}
+
+GeneratePackageListARM64() {
+ GeneratePackageListCommon "$1" arm64 "${DEBIAN_PACKAGES}
+ ${DEBIAN_PACKAGES_ARM64:=}"
+}
+
+GeneratePackageListMips() {
+ GeneratePackageListCommon "$1" mipsel "${DEBIAN_PACKAGES}"
+}
+
+GeneratePackageListMips64el() {
+ GeneratePackageListCommon "$1" mips64el "${DEBIAN_PACKAGES}
+ ${DEBIAN_PACKAGES_MIPS64EL:=}"
+}
+
+StripChecksumsFromPackageList() {
+ local package_file="$1"
+ sed -i 's/ [a-f0-9]\{64\}$//' "$package_file"
+}
+
+######################################################################
+#
+######################################################################
+
+HacksAndPatchesCommon() {
+ local arch=$1
+ local os=$2
+ local strip=$3
+ Banner "Misc Hacks & Patches"
+ # these are linker scripts with absolute pathnames in them
+ # which we rewrite here
+ lscripts="${INSTALL_ROOT}/usr/lib/${arch}-${os}/libpthread.so \
+ ${INSTALL_ROOT}/usr/lib/${arch}-${os}/libc.so"
+
+ # Rewrite linker scripts
+ sed -i -e 's|/usr/lib/${arch}-${os}/||g' ${lscripts}
+ sed -i -e 's|/lib/${arch}-${os}/||g' ${lscripts}
+
+ # Unversion libdbus symbols. This is required because libdbus-1-3
+ # switched from unversioned symbols to versioned ones, and we must
+ # still support distros using the unversioned library. This hack
+ # can be removed once support for Ubuntu Trusty and Debian Jessie
+ # are dropped.
+ ${strip} -R .gnu.version_d -R .gnu.version \
+ "${INSTALL_ROOT}/lib/${arch}-${os}/libdbus-1.so.3"
+ cp "${SCRIPT_DIR}/libdbus-1-3-symbols" \
+ "${INSTALL_ROOT}/debian/libdbus-1-3/DEBIAN/symbols"
+
+ # Glibc 2.27 introduced some new optimizations to several math functions, but
+ # it will be a while before it makes it into all supported distros. Luckily,
+ # glibc maintains ABI compatibility with previous versions, so the old symbols
+ # are still there.
+ # TODO(thomasanderson): Remove this once glibc 2.27 is available on all
+ # supported distros.
+ local math_h="${INSTALL_ROOT}/usr/include/math.h"
+ local libm_so="${INSTALL_ROOT}/lib/${arch}-${os}/libm.so.6"
+ nm -D --defined-only --with-symbol-versions "${libm_so}" | \
+ "${SCRIPT_DIR}/find_incompatible_glibc_symbols.py" >> "${math_h}"
+
+ # glob64() was also optimized in glibc 2.27. Make sure to choose the older
+ # version.
+ local glob_h="${INSTALL_ROOT}/usr/include/glob.h"
+ local libc_so="${INSTALL_ROOT}/lib/${arch}-${os}/libc.so.6"
+ nm -D --defined-only --with-symbol-versions "${libc_so}" | \
+ "${SCRIPT_DIR}/find_incompatible_glibc_symbols.py" >> "${glob_h}"
+
+ # This is for chrome's ./build/linux/pkg-config-wrapper
+ # which overwrites PKG_CONFIG_LIBDIR internally
+ SubBanner "Move pkgconfig scripts"
+ mkdir -p ${INSTALL_ROOT}/usr/lib/pkgconfig
+ mv ${INSTALL_ROOT}/usr/lib/${arch}-${os}/pkgconfig/* \
+ ${INSTALL_ROOT}/usr/lib/pkgconfig
+}
+
+
+HacksAndPatchesAmd64() {
+ HacksAndPatchesCommon x86_64 linux-gnu strip
+}
+
+
+HacksAndPatchesI386() {
+ HacksAndPatchesCommon i386 linux-gnu strip
+}
+
+
+HacksAndPatchesARM() {
+ HacksAndPatchesCommon arm linux-gnueabihf arm-linux-gnueabihf-strip
+}
+
+
+HacksAndPatchesARM64() {
+ # Use the unstripped libdbus for arm64 to prevent linker errors.
+ # https://bugs.chromium.org/p/webrtc/issues/detail?id=8535
+ HacksAndPatchesCommon aarch64 linux-gnu true
+}
+
+
+HacksAndPatchesMips() {
+ HacksAndPatchesCommon mipsel linux-gnu mipsel-linux-gnu-strip
+}
+
+
+HacksAndPatchesMips64el() {
+ HacksAndPatchesCommon mips64el linux-gnuabi64 mips64el-linux-gnuabi64-strip
+}
+
+
+InstallIntoSysroot() {
+ Banner "Install Libs And Headers Into Jail"
+
+ mkdir -p ${BUILD_DIR}/debian-packages
+ # The /debian directory is an implementation detail that's used to cd into
+ # when running dpkg-shlibdeps.
+ mkdir -p ${INSTALL_ROOT}/debian
+ # An empty control file is necessary to run dpkg-shlibdeps.
+ touch ${INSTALL_ROOT}/debian/control
+ while (( "$#" )); do
+ local file="$1"
+ local package="${BUILD_DIR}/debian-packages/${file##*/}"
+ shift
+ local sha256sum="$1"
+ shift
+ if [ "${#sha256sum}" -ne "64" ]; then
+ echo "Bad sha256sum from package list"
+ exit 1
+ fi
+
+ Banner "Installing $(basename ${file})"
+ DownloadOrCopy ${file} ${package}
+ if [ ! -s "${package}" ] ; then
+ echo
+ echo "ERROR: bad package ${package}"
+ exit 1
+ fi
+ echo "${sha256sum} ${package}" | sha256sum --quiet -c
+
+ SubBanner "Extracting to ${INSTALL_ROOT}"
+ dpkg-deb -x ${package} ${INSTALL_ROOT}
+
+ base_package=$(dpkg-deb --field ${package} Package)
+ mkdir -p ${INSTALL_ROOT}/debian/${base_package}/DEBIAN
+ dpkg-deb -e ${package} ${INSTALL_ROOT}/debian/${base_package}/DEBIAN
+ done
+
+ # Prune /usr/share, leaving only pkgconfig
+ for name in ${INSTALL_ROOT}/usr/share/*; do
+ if [ "${name}" != "${INSTALL_ROOT}/usr/share/pkgconfig" ]; then
+ rm -r ${name}
+ fi
+ done
+}
+
+
+CleanupJailSymlinks() {
+ Banner "Jail symlink cleanup"
+
+ SAVEDPWD=$(pwd)
+ cd ${INSTALL_ROOT}
+ local libdirs="lib usr/lib"
+ if [ "${ARCH}" != "MIPS" ]; then
+ libdirs="${libdirs} lib64"
+ fi
+ find $libdirs -type l -printf '%p %l\n' | while read link target; do
+ # skip links with non-absolute paths
+ echo "${target}" | grep -qs ^/ || continue
+ echo "${link}: ${target}"
+ # Relativize the symlink.
+ prefix=$(echo "${link}" | sed -e 's/[^/]//g' | sed -e 's|/|../|g')
+ ln -snfv "${prefix}${target}" "${link}"
+ done
+
+ find $libdirs -type l -printf '%p %l\n' | while read link target; do
+ # Make sure we catch new bad links.
+ if [ ! -r "${link}" ]; then
+ echo "ERROR: FOUND BAD LINK ${link}"
+ ls -l ${link}
+ exit 1
+ fi
+ done
+ cd "$SAVEDPWD"
+}
+
+
+VerifyLibraryDepsCommon() {
+ local arch=$1
+ local os=$2
+ local find_dirs=(
+ "${INSTALL_ROOT}/lib/${arch}-${os}/"
+ "${INSTALL_ROOT}/usr/lib/${arch}-${os}/"
+ )
+ local needed_libs="$(
+ find ${find_dirs[*]} -name "*\.so*" -type f -exec file {} \; | \
+ grep ': ELF' | sed 's/^\(.*\): .*$/\1/' | xargs readelf -d | \
+ grep NEEDED | sort | uniq | sed 's/^.*Shared library: \[\(.*\)\]$/\1/g')"
+ local all_libs="$(find ${find_dirs[*]} -printf '%f\n')"
+ local missing_libs="$(grep -vFxf <(echo "${all_libs}") \
+ <(echo "${needed_libs}"))"
+ if [ ! -z "${missing_libs}" ]; then
+ echo "Missing libraries:"
+ echo "${missing_libs}"
+ exit 1
+ fi
+}
+
+
+VerifyLibraryDepsAmd64() {
+ VerifyLibraryDepsCommon x86_64 linux-gnu
+}
+
+
+VerifyLibraryDepsI386() {
+ VerifyLibraryDepsCommon i386 linux-gnu
+}
+
+
+VerifyLibraryDepsARM() {
+ VerifyLibraryDepsCommon arm linux-gnueabihf
+}
+
+
+VerifyLibraryDepsARM64() {
+ VerifyLibraryDepsCommon aarch64 linux-gnu
+}
+
+
+VerifyLibraryDepsMips() {
+ VerifyLibraryDepsCommon mipsel linux-gnu
+}
+
+
+VerifyLibraryDepsMips64el() {
+ VerifyLibraryDepsCommon mips64el linux-gnuabi64
+}
+
+
+#@
+#@ BuildSysrootAmd64
+#@
+#@ Build everything and package it
+BuildSysrootAmd64() {
+ if [ "$HAS_ARCH_AMD64" = "0" ]; then
+ return
+ fi
+ ClearInstallDir
+ local package_file="${DEBIAN_DEP_LIST_AMD64}"
+ GeneratePackageListAmd64 "$package_file"
+ local files_and_sha256sums="$(cat ${package_file})"
+ StripChecksumsFromPackageList "$package_file"
+ InstallIntoSysroot ${files_and_sha256sums}
+ CleanupJailSymlinks
+ HacksAndPatchesAmd64
+ VerifyLibraryDepsAmd64
+ CreateTarBall
+}
+
+#@
+#@ BuildSysrootI386
+#@
+#@ Build everything and package it
+BuildSysrootI386() {
+ if [ "$HAS_ARCH_I386" = "0" ]; then
+ return
+ fi
+ ClearInstallDir
+ local package_file="${DEBIAN_DEP_LIST_I386}"
+ GeneratePackageListI386 "$package_file"
+ local files_and_sha256sums="$(cat ${package_file})"
+ StripChecksumsFromPackageList "$package_file"
+ InstallIntoSysroot ${files_and_sha256sums}
+ CleanupJailSymlinks
+ HacksAndPatchesI386
+ VerifyLibraryDepsI386
+ CreateTarBall
+}
+
+#@
+#@ BuildSysrootARM
+#@
+#@ Build everything and package it
+BuildSysrootARM() {
+ if [ "$HAS_ARCH_ARM" = "0" ]; then
+ return
+ fi
+ ClearInstallDir
+ local package_file="${DEBIAN_DEP_LIST_ARM}"
+ GeneratePackageListARM "$package_file"
+ local files_and_sha256sums="$(cat ${package_file})"
+ StripChecksumsFromPackageList "$package_file"
+ InstallIntoSysroot ${files_and_sha256sums}
+ CleanupJailSymlinks
+ HacksAndPatchesARM
+ VerifyLibraryDepsARM
+ CreateTarBall
+}
+
+#@
+#@ BuildSysrootARM64
+#@
+#@ Build everything and package it
+BuildSysrootARM64() {
+ if [ "$HAS_ARCH_ARM64" = "0" ]; then
+ return
+ fi
+ ClearInstallDir
+ local package_file="${DEBIAN_DEP_LIST_ARM64}"
+ GeneratePackageListARM64 "$package_file"
+ local files_and_sha256sums="$(cat ${package_file})"
+ StripChecksumsFromPackageList "$package_file"
+ InstallIntoSysroot ${files_and_sha256sums}
+ CleanupJailSymlinks
+ HacksAndPatchesARM64
+ VerifyLibraryDepsARM64
+ CreateTarBall
+}
+
+#@
+#@ BuildSysrootMips
+#@
+#@ Build everything and package it
+BuildSysrootMips() {
+ if [ "$HAS_ARCH_MIPS" = "0" ]; then
+ return
+ fi
+ ClearInstallDir
+ local package_file="${DEBIAN_DEP_LIST_MIPS}"
+ GeneratePackageListMips "$package_file"
+ local files_and_sha256sums="$(cat ${package_file})"
+ StripChecksumsFromPackageList "$package_file"
+ InstallIntoSysroot ${files_and_sha256sums}
+ CleanupJailSymlinks
+ HacksAndPatchesMips
+ VerifyLibraryDepsMips
+ CreateTarBall
+}
+
+#@
+#@ BuildSysrootMips64el
+#@
+#@ Build everything and package it
+BuildSysrootMips64el() {
+ if [ "$HAS_ARCH_MIPS64EL" = "0" ]; then
+ return
+ fi
+ ClearInstallDir
+ local package_file="${DEBIAN_DEP_LIST_MIPS64EL}"
+ GeneratePackageListMips64el "$package_file"
+ local files_and_sha256sums="$(cat ${package_file})"
+ StripChecksumsFromPackageList "$package_file"
+ InstallIntoSysroot ${files_and_sha256sums}
+ CleanupJailSymlinks
+ HacksAndPatchesMips64el
+ VerifyLibraryDepsMips64el
+ CreateTarBall
+}
+
+#@
+#@ BuildSysrootAll
+#@
+#@ Build sysroot images for all architectures
+BuildSysrootAll() {
+ RunCommand BuildSysrootAmd64
+ RunCommand BuildSysrootI386
+ RunCommand BuildSysrootARM
+ RunCommand BuildSysrootARM64
+ RunCommand BuildSysrootMips
+ RunCommand BuildSysrootMips64el
+}
+
+UploadSysroot() {
+ local sha=$(sha1sum "${TARBALL}" | awk '{print $1;}')
+ set -x
+ gsutil cp -a public-read "${TARBALL}" \
+ "gs://chrome-linux-sysroot/toolchain/$sha/"
+ set +x
+}
+
+#@
+#@ UploadSysrootAmd64
+#@
+UploadSysrootAmd64() {
+ if [ "$HAS_ARCH_AMD64" = "0" ]; then
+ return
+ fi
+ UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootI386
+#@
+UploadSysrootI386() {
+ if [ "$HAS_ARCH_I386" = "0" ]; then
+ return
+ fi
+ UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootARM
+#@
+UploadSysrootARM() {
+ if [ "$HAS_ARCH_ARM" = "0" ]; then
+ return
+ fi
+ UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootARM64
+#@
+UploadSysrootARM64() {
+ if [ "$HAS_ARCH_ARM64" = "0" ]; then
+ return
+ fi
+ UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootMips
+#@
+UploadSysrootMips() {
+ if [ "$HAS_ARCH_MIPS" = "0" ]; then
+ return
+ fi
+ UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootMips64el
+#@
+UploadSysrootMips64el() {
+ if [ "$HAS_ARCH_MIPS64EL" = "0" ]; then
+ return
+ fi
+ UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootAll
+#@
+#@ Upload sysroot image for all architectures
+UploadSysrootAll() {
+ RunCommand UploadSysrootAmd64 "$@"
+ RunCommand UploadSysrootI386 "$@"
+ RunCommand UploadSysrootARM "$@"
+ RunCommand UploadSysrootARM64 "$@"
+ RunCommand UploadSysrootMips "$@"
+ RunCommand UploadSysrootMips64el "$@"
+
+}
+
+#
+# CheckForDebianGPGKeyring
+#
+# Make sure the Debian GPG keys exist. Otherwise print a helpful message.
+#
+CheckForDebianGPGKeyring() {
+ if [ ! -e "$KEYRING_FILE" ]; then
+ echo "KEYRING_FILE not found: ${KEYRING_FILE}"
+ echo "Debian GPG keys missing. Install the debian-archive-keyring package."
+ exit 1
+ fi
+}
+
+#
+# VerifyPackageListing
+#
+# Verifies the downloaded Packages.xz file has the right checksums.
+#
+VerifyPackageListing() {
+ local file_path="$1"
+ local output_file="$2"
+ local repo="$3"
+ local dist="$4"
+
+ local repo_basedir="${repo}/dists/${dist}"
+ local release_list="${repo_basedir}/${RELEASE_FILE}"
+ local release_list_gpg="${repo_basedir}/${RELEASE_FILE_GPG}"
+
+ local release_file="${BUILD_DIR}/${dist}-${RELEASE_FILE}"
+ local release_file_gpg="${BUILD_DIR}/${dist}-${RELEASE_FILE_GPG}"
+
+ CheckForDebianGPGKeyring
+
+ DownloadOrCopy ${release_list} ${release_file}
+ DownloadOrCopy ${release_list_gpg} ${release_file_gpg}
+ echo "Verifying: ${release_file} with ${release_file_gpg}"
+ set -x
+ gpgv --keyring "${KEYRING_FILE}" "${release_file_gpg}" "${release_file}"
+ set +x
+
+ echo "Verifying: ${output_file}"
+ local sha256sum=$(grep -E "${file_path}\$|:\$" "${release_file}" | \
+ grep "SHA256:" -A 1 | xargs echo | awk '{print $2;}')
+
+ if [ "${#sha256sum}" -ne "64" ]; then
+ echo "Bad sha256sum from ${release_list}"
+ exit 1
+ fi
+
+ echo "${sha256sum} ${output_file}" | sha256sum --quiet -c
+}
+
+#
+# GeneratePackageList
+#
+# Looks up package names in ${BUILD_DIR}/Packages and write list of URLs
+# to output file.
+#
+GeneratePackageList() {
+ local input_file="$1"
+ local output_file="$2"
+ echo "Updating: ${output_file} from ${input_file}"
+ /bin/rm -f "${output_file}"
+ shift
+ shift
+ for pkg in $@ ; do
+ local pkg_full=$(grep -A 1 " ${pkg}\$" "$input_file" | \
+ egrep "pool/.*" | sed 's/.*Filename: //')
+ if [ -z "${pkg_full}" ]; then
+ echo "ERROR: missing package: $pkg"
+ exit 1
+ fi
+ local sha256sum=$(grep -A 4 " ${pkg}\$" "$input_file" | \
+ grep ^SHA256: | sed 's/^SHA256: //')
+ if [ "${#sha256sum}" -ne "64" ]; then
+ echo "Bad sha256sum from Packages"
+ exit 1
+ fi
+ echo $pkg_full $sha256sum >> "$output_file"
+ done
+ # sort -o does an in-place sort of this file
+ sort "$output_file" -o "$output_file"
+}
+
+#@
+#@ PrintArchitectures
+#@
+#@ Prints supported architectures.
+PrintArchitectures() {
+ if [ "$HAS_ARCH_AMD64" = "1" ]; then
+ echo Amd64
+ fi
+ if [ "$HAS_ARCH_I386" = "1" ]; then
+ echo I386
+ fi
+ if [ "$HAS_ARCH_ARM" = "1" ]; then
+ echo ARM
+ fi
+ if [ "$HAS_ARCH_ARM64" = "1" ]; then
+ echo ARM64
+ fi
+ if [ "$HAS_ARCH_MIPS" = "1" ]; then
+ echo Mips
+ fi
+ if [ "$HAS_ARCH_MIPS64EL" = "1" ]; then
+ echo Mips64el
+ fi
+}
+
+#@
+#@ PrintDistro
+#@
+#@ Prints distro. eg: ubuntu
+PrintDistro() {
+ echo ${DISTRO}
+}
+
+#@
+#@ DumpRelease
+#@
+#@ Prints disto release. eg: jessie
+PrintRelease() {
+ echo ${DIST}
+}
+
+RunCommand() {
+ SetEnvironmentVariables "$1"
+ SanityCheck
+ "$@"
+}
+
+if [ $# -eq 0 ] ; then
+ echo "ERROR: you must specify a mode on the commandline"
+ echo
+ Usage
+ exit 1
+elif [ "$(type -t $1)" != "function" ]; then
+ echo "ERROR: unknown function '$1'." >&2
+ echo "For help, try:"
+ echo " $0 help"
+ exit 1
+else
+ ChangeDirectory
+ if echo $1 | grep -qs --regexp='\(^Print\)\|\(All$\)'; then
+ "$@"
+ else
+ RunCommand "$@"
+ fi
+fi
diff --git a/deps/v8/build/linux/sysroot_scripts/sysroots.json b/deps/v8/build/linux/sysroot_scripts/sysroots.json
new file mode 100644
index 0000000000..d49621ce04
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/sysroots.json
@@ -0,0 +1,32 @@
+{
+ "sid_amd64": {
+ "Sha1Sum": "e7c53f04bd88d29d075bfd1f62b073aeb69cbe09",
+ "SysrootDir": "debian_sid_amd64-sysroot",
+ "Tarball": "debian_sid_amd64_sysroot.tar.xz"
+ },
+ "sid_arm": {
+ "Sha1Sum": "ef5c4f84bcafb7a3796d36bb1db7826317dde51c",
+ "SysrootDir": "debian_sid_arm-sysroot",
+ "Tarball": "debian_sid_arm_sysroot.tar.xz"
+ },
+ "sid_arm64": {
+ "Sha1Sum": "953c2471bc7e71a788309f6c2d2003e8b703305d",
+ "SysrootDir": "debian_sid_arm64-sysroot",
+ "Tarball": "debian_sid_arm64_sysroot.tar.xz"
+ },
+ "sid_i386": {
+ "Sha1Sum": "9e6279438ece6fb42b5333ca90d5e9d0c188a403",
+ "SysrootDir": "debian_sid_i386-sysroot",
+ "Tarball": "debian_sid_i386_sysroot.tar.xz"
+ },
+ "sid_mips": {
+ "Sha1Sum": "958731a68a169631c0450efb15410ccc4135ef2a",
+ "SysrootDir": "debian_sid_mips-sysroot",
+ "Tarball": "debian_sid_mips_sysroot.tar.xz"
+ },
+ "sid_mips64el": {
+ "Sha1Sum": "51ca1f4092ac76ad1a1da953f0f3ce1aea947a42",
+ "SysrootDir": "debian_sid_mips64el-sysroot",
+ "Tarball": "debian_sid_mips64el_sysroot.tar.xz"
+ }
+}
diff --git a/deps/v8/build/linux/sysroot_scripts/update-archive-timestamp.sh b/deps/v8/build/linux/sysroot_scripts/update-archive-timestamp.sh
new file mode 100755
index 0000000000..a61dd99571
--- /dev/null
+++ b/deps/v8/build/linux/sysroot_scripts/update-archive-timestamp.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script updates sysroot-creator-*.sh with the timestamp of the latest
+# snapshot from snapshot.debian.org.
+
+SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+ARCHIVE_URL="http://snapshot.debian.org/archive/debian"
+
+# Use 9999-01-01 as the date so that we get a redirect to the page with the
+# latest timestamp.
+TIMESTAMP=$(curl -s "${ARCHIVE_URL}/99990101T000000Z/pool/" | \
+ sed -n "s|.*${ARCHIVE_URL}/\([[:digit:]TZ]\+\)/pool/.*|\1|p" | head -n 1)
+
+sed -i "s/ARCHIVE_TIMESTAMP=.*$/ARCHIVE_TIMESTAMP=${TIMESTAMP}/" \
+ "${SCRIPT_DIR}"/sysroot-creator-*.sh
diff --git a/deps/v8/build/linux/unbundle/README b/deps/v8/build/linux/unbundle/README
new file mode 100644
index 0000000000..b6b6321b38
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/README
@@ -0,0 +1,57 @@
+This directory contains files that make it possible for Linux
+distributions to build Chromium using system libraries and exclude the
+source code for Chromium's bundled copies of system libraries in a
+consistent manner. Nothing here is used in normal developer builds.
+
+
+For more info on the Linux distros' philosophy on bundling system
+libraries and why this exists, please read the following:
+
+ - https://fedoraproject.org/wiki/Packaging:No_Bundled_Libraries
+ - https://wiki.gentoo.org/wiki/Why_not_bundle_dependencies
+ - http://www.debian.org/doc/debian-policy/ch-source.html#s-embeddedfiles
+
+For more Chromium-specific context please read
+http://spot.livejournal.com/312320.html .
+
+Additional resources which might provide even more context:
+
+ - http://events.linuxfoundation.org/sites/events/files/slides/LinuxCon%202014%20Slides_0.pdf
+ - https://lwn.net/Articles/619158/
+
+
+This directory is provided in the source tree so one can follow the
+above guidelines without having to download additional tools and worry
+about having the right version of the tool. It is a compromise solution
+which takes into account Chromium developers who want to avoid the
+perceived burden of more conditional code in build files, and
+expectations of Open Source community, where using system-provided
+libraries is the norm.
+
+Usage:
+
+1. remove_bundled_libraries.py <preserved-directories>
+
+ For example: remove_bundled_libraries.py third_party/zlib
+
+ The script scans sources looking for third_party directories.
+ Everything that is not explicitly preserved is removed (except for
+ GYP/GN build files), and the script fails if any directory passed on
+ command line does not exist (to ensure list is kept up to date).
+
+ This is intended to be used on source code extracted from a tarball,
+ not on a git repository.
+
+ NOTE: by default this will not remove anything (for safety). Pass
+ the --do-remove flag to actually remove files.
+
+2. replace_gn_files.py --system-libraries lib...
+
+ This swaps out a normal library GN build file that is intended for
+ use with a bundled library for a build file that is set up to use
+ the system library. While some build files have use_system_libfoo
+ build flags, using unbundled build files has the advantage that Linux
+ distros can build Chromium without having to specify many additional
+ build flags.
+
+ For example: replace_gn_files.py --system-libraries libxml
diff --git a/deps/v8/build/linux/unbundle/ffmpeg.gn b/deps/v8/build/linux/unbundle/ffmpeg.gn
new file mode 100644
index 0000000000..978298a7f2
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/ffmpeg.gn
@@ -0,0 +1,37 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/buildflag_header.gni")
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("system_ffmpeg") {
+ packages = [
+ "libavcodec",
+ "libavformat",
+ "libavutil",
+ ]
+}
+
+buildflag_header("ffmpeg_features") {
+ header = "ffmpeg_features.h"
+ flags = [ "USE_SYSTEM_FFMPEG=true" ]
+}
+
+shim_headers("ffmpeg_shim") {
+ root_path = "."
+ headers = [
+ "libavcodec/avcodec.h",
+ "libavformat/avformat.h",
+ "libavutil/imgutils.h",
+ ]
+}
+
+source_set("ffmpeg") {
+ deps = [
+ ":ffmpeg_features",
+ ":ffmpeg_shim",
+ ]
+ public_configs = [ ":system_ffmpeg" ]
+}
diff --git a/deps/v8/build/linux/unbundle/flac.gn b/deps/v8/build/linux/unbundle/flac.gn
new file mode 100644
index 0000000000..5f2fdebac9
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/flac.gn
@@ -0,0 +1,32 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("system_flac") {
+ packages = [ "flac" ]
+}
+
+shim_headers("flac_shim") {
+ root_path = "include"
+ headers = [
+ "FLAC/all.h",
+ "FLAC/assert.h",
+ "FLAC/callback.h",
+ "FLAC/export.h",
+ "FLAC/format.h",
+ "FLAC/metadata.h",
+ "FLAC/ordinals.h",
+ "FLAC/stream_decoder.h",
+ "FLAC/stream_encoder.h",
+ ]
+}
+
+source_set("flac") {
+ deps = [
+ ":flac_shim",
+ ]
+ public_configs = [ ":system_flac" ]
+}
diff --git a/deps/v8/build/linux/unbundle/fontconfig.gn b/deps/v8/build/linux/unbundle/fontconfig.gn
new file mode 100644
index 0000000000..c1e229854b
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/fontconfig.gn
@@ -0,0 +1,13 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_linux)
+
+config("fontconfig_config") {
+ libs = [ "fontconfig" ]
+}
+
+group("fontconfig") {
+ public_configs = [ ":fontconfig_config" ]
+}
diff --git a/deps/v8/build/linux/unbundle/freetype.gn b/deps/v8/build/linux/unbundle/freetype.gn
new file mode 100644
index 0000000000..cafa9db6b7
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/freetype.gn
@@ -0,0 +1,14 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+ # Blink needs a recent and properly build-configured FreeType version to
+ # support OpenType variations, color emoji and avoid security bugs. By default
+ # we ship and link such a version as part of Chrome. For distributions that
+ # prefer to keep linking to the version the system, FreeType must be newer
+ # than version 2.7.1 and have color bitmap support compiled in. WARNING:
+ # System FreeType configurations other than as described WILL INTRODUCE TEXT
+ # RENDERING AND SECURITY REGRESSIONS.
+ use_system_freetype = true
+}
diff --git a/deps/v8/build/linux/unbundle/harfbuzz-ng.gn b/deps/v8/build/linux/unbundle/harfbuzz-ng.gn
new file mode 100644
index 0000000000..b4ba17a9b8
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/harfbuzz-ng.gn
@@ -0,0 +1,13 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+ # Blink uses a cutting-edge version of Harfbuzz (version listed in
+ # third_party/harfbuzz-ng/README.chromium); most Linux distros do not contain
+ # a new enough version of the code to work correctly. However, ChromeOS
+ # chroots (i.e. real ChromeOS builds for devices) do contain a new enough
+ # version of the library, and so this variable exists so that ChromeOS can
+ # build against the system lib and keep binary sizes smaller.
+ use_system_harfbuzz = true
+}
diff --git a/deps/v8/build/linux/unbundle/icu.gn b/deps/v8/build/linux/unbundle/icu.gn
new file mode 100644
index 0000000000..4450e409db
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/icu.gn
@@ -0,0 +1,258 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+group("icu") {
+ public_deps = [
+ ":icui18n",
+ ":icuuc",
+ ]
+}
+
+config("icu_config") {
+ defines = [
+ "USING_SYSTEM_ICU=1",
+ "ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_STATIC",
+ "UCHAR_TYPE=uint16_t",
+
+ # U_EXPORT (defined in unicode/platform.h) is used to set public visibility
+ # on classes through the U_COMMON_API and U_I18N_API macros (among others).
+ # When linking against the system ICU library, we want its symbols to have
+ # public LTO visibility. This disables CFI checks for the ICU classes and
+ # allows whole-program optimization to be applied to the rest of Chromium.
+ #
+ # Both U_COMMON_API and U_I18N_API macros would be defined to U_EXPORT only
+ # when U_COMBINED_IMPLEMENTATION is defined (see unicode/utypes.h). Because
+ # we override the default system UCHAR_TYPE (char16_t), it is not possible
+ # to use U_COMBINED_IMPLEMENTATION at this moment, meaning the U_COMMON_API
+ # and U_I18N_API macros are set to U_IMPORT which is an empty definition.
+ #
+ # Until building with UCHAR_TYPE=char16_t is supported, one way to apply
+ # public visibility (and thus public LTO visibility) to all ICU classes is
+ # to define U_IMPORT to have the same value as U_EXPORT. For more details,
+ # please see: https://crbug.com/822820
+ "U_IMPORT=U_EXPORT",
+ ]
+}
+
+pkg_config("system_icui18n") {
+ packages = [ "icu-i18n" ]
+}
+
+pkg_config("system_icuuc") {
+ packages = [ "icu-uc" ]
+}
+
+source_set("icui18n") {
+ deps = [
+ ":icui18n_shim",
+ ]
+ public_configs = [
+ ":icu_config",
+ ":system_icui18n",
+ ]
+}
+
+source_set("icuuc") {
+ deps = [
+ ":icuuc_shim",
+ ]
+ public_configs = [
+ ":icu_config",
+ ":system_icuuc",
+ ]
+}
+
+shim_headers("icui18n_shim") {
+ root_path = "source/i18n"
+ headers = [
+ # This list can easily be updated using the commands below:
+ # cd third_party/icu/source/i18n
+ # find unicode -iname '*.h' -printf ' "%p",\n' | LC_ALL=C sort -u
+ "unicode/alphaindex.h",
+ "unicode/basictz.h",
+ "unicode/calendar.h",
+ "unicode/choicfmt.h",
+ "unicode/coleitr.h",
+ "unicode/coll.h",
+ "unicode/compactdecimalformat.h",
+ "unicode/curramt.h",
+ "unicode/currpinf.h",
+ "unicode/currunit.h",
+ "unicode/datefmt.h",
+ "unicode/dcfmtsym.h",
+ "unicode/decimfmt.h",
+ "unicode/dtfmtsym.h",
+ "unicode/dtitvfmt.h",
+ "unicode/dtitvinf.h",
+ "unicode/dtptngen.h",
+ "unicode/dtrule.h",
+ "unicode/fieldpos.h",
+ "unicode/fmtable.h",
+ "unicode/format.h",
+ "unicode/fpositer.h",
+ "unicode/gender.h",
+ "unicode/gregocal.h",
+ "unicode/measfmt.h",
+ "unicode/measunit.h",
+ "unicode/measure.h",
+ "unicode/msgfmt.h",
+ "unicode/numfmt.h",
+ "unicode/numsys.h",
+ "unicode/plurfmt.h",
+ "unicode/plurrule.h",
+ "unicode/rbnf.h",
+ "unicode/rbtz.h",
+ "unicode/regex.h",
+ "unicode/region.h",
+ "unicode/reldatefmt.h",
+ "unicode/scientificnumberformatter.h",
+ "unicode/search.h",
+ "unicode/selfmt.h",
+ "unicode/simpletz.h",
+ "unicode/smpdtfmt.h",
+ "unicode/sortkey.h",
+ "unicode/stsearch.h",
+ "unicode/tblcoll.h",
+ "unicode/timezone.h",
+ "unicode/tmunit.h",
+ "unicode/tmutamt.h",
+ "unicode/tmutfmt.h",
+ "unicode/translit.h",
+ "unicode/tzfmt.h",
+ "unicode/tznames.h",
+ "unicode/tzrule.h",
+ "unicode/tztrans.h",
+ "unicode/ucal.h",
+ "unicode/ucol.h",
+ "unicode/ucoleitr.h",
+ "unicode/ucsdet.h",
+ "unicode/udat.h",
+ "unicode/udateintervalformat.h",
+ "unicode/udatpg.h",
+ "unicode/ufieldpositer.h",
+ "unicode/uformattable.h",
+ "unicode/ugender.h",
+ "unicode/ulocdata.h",
+ "unicode/umsg.h",
+ "unicode/unirepl.h",
+ "unicode/unum.h",
+ "unicode/unumsys.h",
+ "unicode/upluralrules.h",
+ "unicode/uregex.h",
+ "unicode/uregion.h",
+ "unicode/ureldatefmt.h",
+ "unicode/usearch.h",
+ "unicode/uspoof.h",
+ "unicode/utmscale.h",
+ "unicode/utrans.h",
+ "unicode/vtzone.h",
+ ]
+}
+
+shim_headers("icuuc_shim") {
+ root_path = "source/common"
+ headers = [
+ # This list can easily be updated using the commands below:
+ # cd third_party/icu/source/common
+ # find unicode -iname '*.h' -printf ' "%p",\n' | LC_ALL=C sort -u
+ "unicode/appendable.h",
+ "unicode/brkiter.h",
+ "unicode/bytestream.h",
+ "unicode/bytestrie.h",
+ "unicode/bytestriebuilder.h",
+ "unicode/caniter.h",
+ "unicode/casemap.h",
+ "unicode/char16ptr.h",
+ "unicode/chariter.h",
+ "unicode/dbbi.h",
+ "unicode/docmain.h",
+ "unicode/dtintrv.h",
+ "unicode/edits.h",
+ "unicode/enumset.h",
+ "unicode/errorcode.h",
+ "unicode/filteredbrk.h",
+ "unicode/icudataver.h",
+ "unicode/icuplug.h",
+ "unicode/idna.h",
+ "unicode/listformatter.h",
+ "unicode/localpointer.h",
+ "unicode/locdspnm.h",
+ "unicode/locid.h",
+ "unicode/messagepattern.h",
+ "unicode/normalizer2.h",
+ "unicode/normlzr.h",
+ "unicode/parseerr.h",
+ "unicode/parsepos.h",
+ "unicode/platform.h",
+ "unicode/ptypes.h",
+ "unicode/putil.h",
+ "unicode/rbbi.h",
+ "unicode/rep.h",
+ "unicode/resbund.h",
+ "unicode/schriter.h",
+ "unicode/simpleformatter.h",
+ "unicode/std_string.h",
+ "unicode/strenum.h",
+ "unicode/stringpiece.h",
+ "unicode/stringtriebuilder.h",
+ "unicode/symtable.h",
+ "unicode/ubidi.h",
+ "unicode/ubiditransform.h",
+ "unicode/ubrk.h",
+ "unicode/ucasemap.h",
+ "unicode/ucat.h",
+ "unicode/uchar.h",
+ "unicode/ucharstrie.h",
+ "unicode/ucharstriebuilder.h",
+ "unicode/uchriter.h",
+ "unicode/uclean.h",
+ "unicode/ucnv.h",
+ "unicode/ucnv_cb.h",
+ "unicode/ucnv_err.h",
+ "unicode/ucnvsel.h",
+ "unicode/uconfig.h",
+ "unicode/ucurr.h",
+ "unicode/udata.h",
+ "unicode/udisplaycontext.h",
+ "unicode/uenum.h",
+ "unicode/uidna.h",
+ "unicode/uiter.h",
+ "unicode/uldnames.h",
+ "unicode/ulistformatter.h",
+ "unicode/uloc.h",
+ "unicode/umachine.h",
+ "unicode/umisc.h",
+ "unicode/unifilt.h",
+ "unicode/unifunct.h",
+ "unicode/unimatch.h",
+ "unicode/uniset.h",
+ "unicode/unistr.h",
+ "unicode/unorm.h",
+ "unicode/unorm2.h",
+ "unicode/uobject.h",
+ "unicode/urename.h",
+ "unicode/urep.h",
+ "unicode/ures.h",
+ "unicode/uscript.h",
+ "unicode/uset.h",
+ "unicode/usetiter.h",
+ "unicode/ushape.h",
+ "unicode/usprep.h",
+ "unicode/ustring.h",
+ "unicode/ustringtrie.h",
+ "unicode/utext.h",
+ "unicode/utf.h",
+ "unicode/utf16.h",
+ "unicode/utf32.h",
+ "unicode/utf8.h",
+ "unicode/utf_old.h",
+ "unicode/utrace.h",
+ "unicode/utypes.h",
+ "unicode/uvernum.h",
+ "unicode/uversion.h",
+ ]
+}
diff --git a/deps/v8/build/linux/unbundle/libdrm.gn b/deps/v8/build/linux/unbundle/libdrm.gn
new file mode 100644
index 0000000000..22df98ae71
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/libdrm.gn
@@ -0,0 +1,22 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("system_libdrm") {
+ packages = [ "libdrm" ]
+}
+
+shim_headers("libdrm_shim") {
+ root_path = "src/include"
+ headers = [ "drm.h" ]
+}
+
+source_set("libdrm") {
+ deps = [
+ ":libdrm_shim",
+ ]
+ public_configs = [ ":system_libdrm" ]
+}
diff --git a/deps/v8/build/linux/unbundle/libevent.gn b/deps/v8/build/linux/unbundle/libevent.gn
new file mode 100644
index 0000000000..47e48e9b99
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/libevent.gn
@@ -0,0 +1,17 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/shim_headers.gni")
+
+shim_headers("libevent_shim") {
+ root_path = "."
+ headers = [ "event.h" ]
+}
+
+source_set("libevent") {
+ deps = [
+ ":libevent_shim",
+ ]
+ libs = [ "event" ]
+}
diff --git a/deps/v8/build/linux/unbundle/libjpeg.gn b/deps/v8/build/linux/unbundle/libjpeg.gn
new file mode 100644
index 0000000000..17398ea60b
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/libjpeg.gn
@@ -0,0 +1,12 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+ # Uses system libjpeg. If true, overrides use_libjpeg_turbo.
+ use_system_libjpeg = true
+
+ # Uses libjpeg_turbo as the jpeg implementation. Has no effect if
+ # use_system_libjpeg is set.
+ use_libjpeg_turbo = true
+}
diff --git a/deps/v8/build/linux/unbundle/libpng.gn b/deps/v8/build/linux/unbundle/libpng.gn
new file mode 100644
index 0000000000..60f837bc97
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/libpng.gn
@@ -0,0 +1,25 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("libpng_config") {
+ packages = [ "libpng" ]
+}
+
+shim_headers("libpng_shim") {
+ root_path = "."
+ headers = [
+ "png.h",
+ "pngconf.h",
+ ]
+}
+
+source_set("libpng") {
+ deps = [
+ ":libpng_shim",
+ ]
+ public_configs = [ ":libpng_config" ]
+}
diff --git a/deps/v8/build/linux/unbundle/libvpx.gn b/deps/v8/build/linux/unbundle/libvpx.gn
new file mode 100644
index 0000000000..eb49e757ca
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/libvpx.gn
@@ -0,0 +1,34 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("system_libvpx") {
+ packages = [ "vpx" ]
+}
+
+shim_headers("libvpx_shim") {
+ root_path = "source/libvpx"
+ headers = [
+ "vpx/vp8.h",
+ "vpx/vp8cx.h",
+ "vpx/vp8dx.h",
+ "vpx/vpx_codec.h",
+ "vpx/vpx_codec_impl_bottom.h",
+ "vpx/vpx_codec_impl_top.h",
+ "vpx/vpx_decoder.h",
+ "vpx/vpx_encoder.h",
+ "vpx/vpx_frame_buffer.h",
+ "vpx/vpx_image.h",
+ "vpx/vpx_integer.h",
+ ]
+}
+
+source_set("libvpx") {
+ deps = [
+ ":libvpx_shim",
+ ]
+ public_configs = [ ":system_libvpx" ]
+}
diff --git a/deps/v8/build/linux/unbundle/libwebp.gn b/deps/v8/build/linux/unbundle/libwebp.gn
new file mode 100644
index 0000000000..12574d87be
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/libwebp.gn
@@ -0,0 +1,39 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("system_libwebp") {
+ packages = [
+ "libwebp",
+ "libwebpdemux",
+ "libwebpmux",
+ ]
+}
+
+shim_headers("libwebp_shim") {
+ root_path = "src"
+ headers = [
+ "webp/decode.h",
+ "webp/demux.h",
+ "webp/encode.h",
+ "webp/mux.h",
+ "webp/mux_types.h",
+ "webp/types.h",
+ ]
+}
+
+source_set("libwebp_webp") {
+ deps = [
+ ":libwebp_shim",
+ ]
+ public_configs = [ ":system_libwebp" ]
+}
+
+group("libwebp") {
+ deps = [
+ ":libwebp_webp",
+ ]
+}
diff --git a/deps/v8/build/linux/unbundle/libxml.gn b/deps/v8/build/linux/unbundle/libxml.gn
new file mode 100644
index 0000000000..c481bd3547
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/libxml.gn
@@ -0,0 +1,18 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+pkg_config("system_libxml") {
+ packages = [ "libxml-2.0" ]
+}
+
+static_library("libxml") {
+ sources = [
+ "chromium/libxml_utils.cc",
+ "chromium/libxml_utils.h",
+ ]
+
+ public_configs = [ ":system_libxml" ]
+}
diff --git a/deps/v8/build/linux/unbundle/libxslt.gn b/deps/v8/build/linux/unbundle/libxslt.gn
new file mode 100644
index 0000000000..885574ef89
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/libxslt.gn
@@ -0,0 +1,13 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+pkg_config("system_libxslt") {
+ packages = [ "libxslt" ]
+}
+
+source_set("libxslt") {
+ public_configs = [ ":system_libxslt" ]
+}
diff --git a/deps/v8/build/linux/unbundle/openh264.gn b/deps/v8/build/linux/unbundle/openh264.gn
new file mode 100644
index 0000000000..68ca48784b
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/openh264.gn
@@ -0,0 +1,42 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("config") {
+ packages = [ "openh264" ]
+}
+
+shim_headers("openh264_shim") {
+ prefix = "wels/"
+ root_path = "src/codec/api/svc"
+ headers = [
+ "codec_api.h",
+ "codec_app_def.h",
+ "codec_def.h",
+ "codec_ver.h",
+ ]
+}
+
+source_set("common") {
+ deps = [
+ ":openh264_shim",
+ ]
+ public_configs = [ ":config" ]
+}
+
+source_set("processing") {
+ deps = [
+ ":openh264_shim",
+ ]
+ public_configs = [ ":config" ]
+}
+
+source_set("encoder") {
+ deps = [
+ ":openh264_shim",
+ ]
+ public_configs = [ ":config" ]
+}
diff --git a/deps/v8/build/linux/unbundle/opus.gn b/deps/v8/build/linux/unbundle/opus.gn
new file mode 100644
index 0000000000..e998e3e7a1
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/opus.gn
@@ -0,0 +1,45 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/shim_headers.gni")
+
+pkg_config("opus_config") {
+ packages = [ "opus" ]
+}
+
+shim_headers("opus_shim") {
+ root_path = "src/include"
+ headers = [
+ "opus.h",
+ "opus_defines.h",
+ "opus_multistream.h",
+ "opus_types.h",
+ ]
+}
+
+source_set("opus") {
+ deps = [
+ ":opus_shim",
+ ]
+ public_configs = [ ":opus_config" ]
+}
+
+source_set("opus_compare") {
+}
+
+source_set("opus_demo") {
+}
+
+source_set("test_opus_api") {
+}
+
+source_set("test_opus_decode") {
+}
+
+source_set("test_opus_encode") {
+}
+
+source_set("test_opus_padding") {
+}
diff --git a/deps/v8/build/linux/unbundle/re2.gn b/deps/v8/build/linux/unbundle/re2.gn
new file mode 100644
index 0000000000..94013e5375
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/re2.gn
@@ -0,0 +1,23 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/shim_headers.gni")
+
+shim_headers("re2_shim") {
+ root_path = "src"
+ headers = [
+ "re2/filtered_re2.h",
+ "re2/re2.h",
+ "re2/set.h",
+ "re2/stringpiece.h",
+ "re2/variadic_function.h",
+ ]
+}
+
+source_set("re2") {
+ deps = [
+ ":re2_shim",
+ ]
+ libs = [ "re2" ]
+}
diff --git a/deps/v8/build/linux/unbundle/remove_bundled_libraries.py b/deps/v8/build/linux/unbundle/remove_bundled_libraries.py
new file mode 100755
index 0000000000..1cf2841bbd
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/remove_bundled_libraries.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Removes bundled libraries to make sure they are not used.
+
+See README for more details.
+"""
+
+
+import optparse
+import os.path
+import sys
+
+
+def DoMain(argv):
+ my_dirname = os.path.abspath(os.path.dirname(__file__))
+ source_tree_root = os.path.abspath(
+ os.path.join(my_dirname, '..', '..', '..'))
+
+ if os.path.join(source_tree_root, 'build', 'linux', 'unbundle') != my_dirname:
+ print ('Sanity check failed: please run this script from ' +
+ 'build/linux/unbundle directory.')
+ return 1
+
+ parser = optparse.OptionParser()
+ parser.add_option('--do-remove', action='store_true')
+
+ options, args = parser.parse_args(argv)
+
+ exclusion_used = {}
+ for exclusion in args:
+ exclusion_used[exclusion] = False
+
+ for root, dirs, files in os.walk(source_tree_root, topdown=False):
+ # Only look at paths which contain a "third_party" component
+ # (note that e.g. third_party.png doesn't count).
+ root_relpath = os.path.relpath(root, source_tree_root)
+ if 'third_party' not in root_relpath.split(os.sep):
+ continue
+
+ for f in files:
+ path = os.path.join(root, f)
+ relpath = os.path.relpath(path, source_tree_root)
+
+ excluded = False
+ for exclusion in args:
+ # Require precise exclusions. Find the right-most third_party
+ # in the relative path, and if there is more than one ignore
+ # the exclusion if it's completely contained within the part
+ # before right-most third_party path component.
+ split = relpath.rsplit(os.sep + 'third_party' + os.sep, 1)
+ if len(split) > 1 and split[0].startswith(exclusion):
+ continue
+
+ if relpath.startswith(exclusion):
+ # Multiple exclusions can match the same path. Go through all of them
+ # and mark each one as used.
+ exclusion_used[exclusion] = True
+ excluded = True
+ if excluded:
+ continue
+
+ # Deleting gyp files almost always leads to gyp failures.
+ # These files come from Chromium project, and can be replaced if needed.
+ if f.endswith('.gyp') or f.endswith('.gypi'):
+ continue
+
+ # Same about GN files.
+ if f.endswith('.gn') or f.endswith('.gni'):
+ continue
+
+ # Deleting .isolate files leads to gyp failures. They are usually
+ # not used by a distro build anyway.
+ # See http://www.chromium.org/developers/testing/isolated-testing
+ # for more info.
+ if f.endswith('.isolate'):
+ continue
+
+ if options.do_remove:
+ # Delete the file - best way to ensure it's not used during build.
+ os.remove(path)
+ else:
+ # By default just print paths that would be removed.
+ print path
+
+ exit_code = 0
+
+ # Fail if exclusion list contains stale entries - this helps keep it
+ # up to date.
+ for exclusion, used in exclusion_used.iteritems():
+ if not used:
+ print '%s does not exist' % exclusion
+ exit_code = 1
+
+ if not options.do_remove:
+ print ('To actually remove files printed above, please pass ' +
+ '--do-remove flag.')
+
+ return exit_code
+
+
+if __name__ == '__main__':
+ sys.exit(DoMain(sys.argv[1:]))
diff --git a/deps/v8/build/linux/unbundle/replace_gn_files.py b/deps/v8/build/linux/unbundle/replace_gn_files.py
new file mode 100755
index 0000000000..d4d07f23d9
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/replace_gn_files.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Replaces GN files in tree with files from here that
+make the build use system libraries.
+"""
+
+from __future__ import print_function
+
+import argparse
+import os
+import shutil
+import sys
+
+
+REPLACEMENTS = {
+ 'ffmpeg': 'third_party/ffmpeg/BUILD.gn',
+ 'flac': 'third_party/flac/BUILD.gn',
+ 'fontconfig': 'third_party/fontconfig/BUILD.gn',
+ 'freetype': 'build/config/freetype/freetype.gni',
+ 'harfbuzz-ng': 'third_party/harfbuzz-ng/harfbuzz.gni',
+ 'icu': 'third_party/icu/BUILD.gn',
+ 'libdrm': 'third_party/libdrm/BUILD.gn',
+ 'libevent': 'base/third_party/libevent/BUILD.gn',
+ 'libjpeg': 'third_party/libjpeg.gni',
+ 'libpng': 'third_party/libpng/BUILD.gn',
+ 'libvpx': 'third_party/libvpx/BUILD.gn',
+ 'libwebp': 'third_party/libwebp/BUILD.gn',
+ 'libxml': 'third_party/libxml/BUILD.gn',
+ 'libxslt': 'third_party/libxslt/BUILD.gn',
+ 'openh264': 'third_party/openh264/BUILD.gn',
+ 'opus': 'third_party/opus/BUILD.gn',
+ 're2': 'third_party/re2/BUILD.gn',
+ 'snappy': 'third_party/snappy/BUILD.gn',
+ 'yasm': 'third_party/yasm/yasm_assemble.gni',
+ 'zlib': 'third_party/zlib/BUILD.gn',
+}
+
+
+def DoMain(argv):
+ my_dirname = os.path.dirname(__file__)
+ source_tree_root = os.path.abspath(
+ os.path.join(my_dirname, '..', '..', '..'))
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--system-libraries', nargs='*', default=[])
+ parser.add_argument('--undo', action='store_true')
+
+ args = parser.parse_args(argv)
+
+ handled_libraries = set()
+ for lib, path in REPLACEMENTS.items():
+ if lib not in args.system_libraries:
+ continue
+ handled_libraries.add(lib)
+
+ if args.undo:
+ # Restore original file, and also remove the backup.
+ # This is meant to restore the source tree to its original state.
+ os.rename(os.path.join(source_tree_root, path + '.orig'),
+ os.path.join(source_tree_root, path))
+ else:
+ # Create a backup copy for --undo.
+ shutil.copyfile(os.path.join(source_tree_root, path),
+ os.path.join(source_tree_root, path + '.orig'))
+
+ # Copy the GN file from directory of this script to target path.
+ shutil.copyfile(os.path.join(my_dirname, '%s.gn' % lib),
+ os.path.join(source_tree_root, path))
+
+ unhandled_libraries = set(args.system_libraries) - handled_libraries
+ if unhandled_libraries:
+ print('Unrecognized system libraries requested: %s' % ', '.join(
+ sorted(unhandled_libraries)), file=sys.stderr)
+ return 1
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(DoMain(sys.argv[1:]))
diff --git a/deps/v8/build/linux/unbundle/snappy.gn b/deps/v8/build/linux/unbundle/snappy.gn
new file mode 100644
index 0000000000..9956ef88bc
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/snappy.gn
@@ -0,0 +1,22 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/shim_headers.gni")
+
+shim_headers("snappy_shim") {
+ root_path = "src"
+ headers = [
+ "snappy-c.h",
+ "snappy-sinksource.h",
+ "snappy-stubs-public.h",
+ "snappy.h",
+ ]
+}
+
+source_set("snappy") {
+ deps = [
+ ":snappy_shim",
+ ]
+ libs = [ "snappy" ]
+}
diff --git a/deps/v8/build/linux/unbundle/yasm.gn b/deps/v8/build/linux/unbundle/yasm.gn
new file mode 100644
index 0000000000..b5b440e66f
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/yasm.gn
@@ -0,0 +1,102 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (current_cpu == "x86") {
+ _yasm_flags = [
+ "-felf32",
+ "-m",
+ "x86",
+ ]
+} else if (current_cpu == "x64") {
+ _yasm_flags = [
+ "-DPIC",
+ "-felf64",
+ "-m",
+ "amd64",
+ ]
+}
+
+template("yasm_assemble") {
+ action_name = "${target_name}_action"
+ source_set_name = target_name
+
+ action_foreach(action_name) {
+ # Only the source set can depend on this.
+ visibility = [ ":$source_set_name" ]
+
+ script = "//third_party/yasm/run_yasm.py"
+ sources = invoker.sources
+
+ if (defined(invoker.inputs)) {
+ inputs = invoker.inputs
+ }
+
+ deps = []
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+
+ args = [ "yasm" ] + _yasm_flags
+ if (defined(invoker.yasm_flags)) {
+ args += invoker.yasm_flags
+ }
+
+ # User defined include dirs go first.
+ if (defined(invoker.include_dirs)) {
+ foreach(include, invoker.include_dirs) {
+ args += [ "-I" + rebase_path(include, root_build_dir) ]
+ }
+ }
+
+ # Default yasm include dirs. Make it match the native build (source root and
+ # root generated code directory).
+ # This goes to the end of include list.
+ args += [
+ "-I.",
+
+ # Using "//." will produce a relative path "../.." which looks better than
+ # "../../" which will result from using "//" as the base (although both
+ # work). This is because rebase_path will terminate the result in a
+ # slash if the input ends in a slash.
+ "-I" + rebase_path("//.", root_build_dir),
+ "-I" + rebase_path(root_gen_dir, root_build_dir),
+ ]
+
+ # Extra defines.
+ if (defined(invoker.defines)) {
+ foreach(def, invoker.defines) {
+ args += [ "-D$def" ]
+ }
+ }
+
+ # Output file.
+ outputs = [
+ "$target_out_dir/$source_set_name/{{source_name_part}}.o",
+ ]
+ args += [
+ "-o",
+ rebase_path(outputs[0], root_build_dir),
+ "{{source}}",
+ ]
+
+ # The wrapper script run_yasm will write the depfile to the same name as
+ # the output but with .d appended (like gcc will).
+ depfile = outputs[0] + ".d"
+ }
+
+ # Gather the .o files into a linkable thing. This doesn't actually link
+ # anything (a source set just compiles files to link later), but will pass
+ # the object files generated by the action up the dependency chain.
+ static_library(source_set_name) {
+ if (defined(invoker.visibility)) {
+ visibility = invoker.visibility
+ }
+
+ sources = get_target_outputs(":$action_name")
+
+ deps = [
+ ":$action_name",
+ ]
+ }
+}
diff --git a/deps/v8/build/linux/unbundle/zlib.gn b/deps/v8/build/linux/unbundle/zlib.gn
new file mode 100644
index 0000000000..020fc7e935
--- /dev/null
+++ b/deps/v8/build/linux/unbundle/zlib.gn
@@ -0,0 +1,66 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/shim_headers.gni")
+
+shim_headers("zlib_shim") {
+ root_path = "."
+ headers = [ "zlib.h" ]
+}
+
+config("system_zlib") {
+ defines = [ "USE_SYSTEM_ZLIB=1" ]
+}
+
+source_set("zlib") {
+ deps = [
+ ":zlib_shim",
+ ]
+ libs = [ "z" ]
+ public_configs = [ ":system_zlib" ]
+}
+
+shim_headers("minizip_shim") {
+ root_path = "contrib"
+ headers = [
+ "minizip/crypt.h",
+ "minizip/ioapi.h",
+ "minizip/iowin32.h",
+ "minizip/mztools.h",
+ "minizip/unzip.h",
+ "minizip/zip.h",
+ ]
+}
+
+source_set("minizip") {
+ deps = [
+ ":minizip_shim",
+ ]
+ libs = [ "minizip" ]
+}
+
+static_library("zip") {
+ sources = [
+ "google/zip.cc",
+ "google/zip.h",
+ "google/zip_internal.cc",
+ "google/zip_internal.h",
+ "google/zip_reader.cc",
+ "google/zip_reader.h",
+ ]
+ deps = [
+ ":minizip",
+ "//base",
+ ]
+}
+
+static_library("compression_utils") {
+ sources = [
+ "google/compression_utils.cc",
+ "google/compression_utils.h",
+ ]
+ deps = [
+ ":zlib",
+ ]
+}
diff --git a/deps/v8/build/locale_tool.py b/deps/v8/build/locale_tool.py
new file mode 100755
index 0000000000..04d5cd3591
--- /dev/null
+++ b/deps/v8/build/locale_tool.py
@@ -0,0 +1,1483 @@
+#!/usr/bin/env vpython
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper script used to manage locale-related files in Chromium.
+
+This script is used to check, and potentially fix, many locale-related files
+in your Chromium workspace, such as:
+
+ - GRIT input files (.grd) and the corresponding translations (.xtb).
+
+ - BUILD.gn files listing Android localized resource string resource .xml
+ generated by GRIT for all supported Chrome locales. These correspond to
+ <output> elements that use the type="android" attribute.
+
+The --scan-dir <dir> option can be used to check for all files under a specific
+directory, and the --fix-inplace option can be used to try fixing any file
+that doesn't pass the check.
+
+This can be very handy to avoid tedious and repetitive work when adding new
+translations / locales to the Chrome code base, since this script can update
+said input files for you.
+
+Important note: checks and fix may fail on some input files. For example
+remoting/resources/remoting_strings.grd contains an in-line comment element
+inside its <outputs> section that breaks the script. The check will fail, and
+trying to fix it too, but at least the file will not be modified.
+"""
+import argparse
+import json
+import os
+import re
+import shutil
+import subprocess
+import sys
+import unittest
+
+# Assume this script is under build/
+_SCRIPT_DIR = os.path.dirname(__file__)
+_SCRIPT_NAME = os.path.join(_SCRIPT_DIR, os.path.basename(__file__))
+_TOP_SRC_DIR = os.path.join(_SCRIPT_DIR, '..')
+
+# Need to import android/gyp/util/resource_utils.py here.
+sys.path.insert(0, os.path.join(_SCRIPT_DIR, 'android/gyp'))
+
+from util import build_utils
+from util import resource_utils
+
+
+# This locale is the default and doesn't have translations.
+_DEFAULT_LOCALE = 'en-US'
+
+# Misc terminal codes to provide human friendly progress output.
+_CONSOLE_CODE_MOVE_CURSOR_TO_COLUMN_0 = '\x1b[0G'
+_CONSOLE_CODE_ERASE_LINE = '\x1b[K'
+_CONSOLE_START_LINE = (
+ _CONSOLE_CODE_MOVE_CURSOR_TO_COLUMN_0 + _CONSOLE_CODE_ERASE_LINE)
+
+##########################################################################
+##########################################################################
+#####
+##### G E N E R I C H E L P E R F U N C T I O N S
+#####
+##########################################################################
+##########################################################################
+
+def _FixChromiumLangAttribute(lang):
+ """Map XML "lang" attribute values to Chromium locale names."""
+ _CHROMIUM_LANG_FIXES = {
+ 'en': 'en-US', # For now, Chromium doesn't have an 'en' locale.
+ 'iw': 'he', # 'iw' is the obsolete form of ISO 639-1 for Hebrew
+ 'no': 'nb', # 'no' is used by the Translation Console for Norwegian (nb).
+ }
+ return _CHROMIUM_LANG_FIXES.get(lang, lang)
+
+
+def _FixTranslationConsoleLocaleName(locale):
+ _FIXES = {
+ 'nb': 'no', # Norwegian.
+ 'he': 'iw', # Hebrew
+ }
+ return _FIXES.get(locale, locale)
+
+
+def _CompareLocaleLists(list_a, list_expected, list_name):
+ """Compare two lists of locale names. Print errors if they differ.
+
+ Args:
+ list_a: First list of locales.
+ list_expected: Second list of locales, as expected.
+ list_name: Name of list printed in error messages.
+ Returns:
+ On success, return False. On error, print error messages and return True.
+ """
+ errors = []
+ missing_locales = sorted(set(list_a) - set(list_expected))
+ if missing_locales:
+ errors.append('Missing locales: %s' % missing_locales)
+
+ extra_locales = sorted(set(list_expected) - set(list_a))
+ if extra_locales:
+ errors.append('Unexpected locales: %s' % extra_locales)
+
+ if errors:
+ print 'Errors in %s definition:' % list_name
+ for error in errors:
+ print ' %s\n' % error
+ return True
+
+ return False
+
+
+def _BuildIntervalList(input_list, predicate):
+ """Find ranges of contiguous list items that pass a given predicate.
+
+ Args:
+ input_list: An input list of items of any type.
+ predicate: A function that takes a list item and return True if it
+ passes a given test.
+ Returns:
+ A list of (start_pos, end_pos) tuples, where all items in
+ [start_pos, end_pos) pass the predicate.
+ """
+ result = []
+ size = len(input_list)
+ start = 0
+ while True:
+ # Find first item in list that passes the predicate.
+ while start < size and not predicate(input_list[start]):
+ start += 1
+
+ if start >= size:
+ return result
+
+ # Find first item in the rest of the list that does not pass the
+ # predicate.
+ end = start + 1
+ while end < size and predicate(input_list[end]):
+ end += 1
+
+ result.append((start, end))
+ start = end + 1
+
+
+def _SortListSubRange(input_list, start, end, key_func):
+ """Sort an input list's sub-range according to a specific key function.
+
+ Args:
+ input_list: An input list.
+ start: Sub-range starting position in list.
+ end: Sub-range limit position in list.
+ key_func: A function that extracts a sort key from a line.
+ Returns:
+ A copy of |input_list|, with all items in [|start|, |end|) sorted
+ according to |key_func|.
+ """
+ result = input_list[:start]
+ inputs = []
+ for pos in xrange(start, end):
+ line = input_list[pos]
+ key = key_func(line)
+ inputs.append((key, line))
+
+ for _, line in sorted(inputs):
+ result.append(line)
+
+ result += input_list[end:]
+ return result
+
+
+def _SortElementsRanges(lines, element_predicate, element_key):
+ """Sort all elements of a given type in a list of lines by a given key.
+
+ Args:
+ lines: input lines.
+ element_predicate: predicate function to select elements to sort.
+ element_key: lambda returning a comparison key for each element that
+ passes the predicate.
+ Returns:
+ A new list of input lines, with lines [start..end) sorted.
+ """
+ intervals = _BuildIntervalList(lines, element_predicate)
+ for start, end in intervals:
+ lines = _SortListSubRange(lines, start, end, element_key)
+
+ return lines
+
+
+def _ProcessFile(input_file, locales, check_func, fix_func):
+ """Process a given input file, potentially fixing it.
+
+ Args:
+ input_file: Input file path.
+ locales: List of Chrome locales to consider / expect.
+ check_func: A lambda called to check the input file lines with
+ (input_lines, locales) argument. It must return an list of error
+ messages, or None on success.
+ fix_func: None, or a lambda called to fix the input file lines with
+ (input_lines, locales). It must return the new list of lines for
+ the input file, and may raise an Exception in case of error.
+ Returns:
+ True at the moment.
+ """
+ print '%sProcessing %s...' % (_CONSOLE_START_LINE, input_file),
+ sys.stdout.flush()
+ with open(input_file) as f:
+ input_lines = f.readlines()
+ errors = check_func(input_file, input_lines, locales)
+ if errors:
+ print '\n%s%s' % (_CONSOLE_START_LINE, '\n'.join(errors))
+ if fix_func:
+ try:
+ input_lines = fix_func(input_file, input_lines, locales)
+ output = ''.join(input_lines)
+ with open(input_file, 'wt') as f:
+ f.write(output)
+ print 'Fixed %s.' % input_file
+ except Exception as e: # pylint: disable=broad-except
+ print 'Skipped %s: %s' % (input_file, e)
+
+ return True
+
+
+def _ScanDirectoriesForFiles(scan_dirs, file_predicate):
+ """Scan a directory for files that match a given predicate.
+
+ Args:
+ scan_dir: A list of top-level directories to start scan in.
+ file_predicate: lambda function which is passed the file's base name
+ and returns True if its full path, relative to |scan_dir|, should be
+ passed in the result.
+ Returns:
+ A list of file full paths.
+ """
+ result = []
+ for src_dir in scan_dirs:
+ for root, _, files in os.walk(src_dir):
+ result.extend(os.path.join(root, f) for f in files if file_predicate(f))
+ return result
+
+
+def _WriteFile(file_path, file_data):
+ """Write |file_data| to |file_path|."""
+ with open(file_path, 'w') as f:
+ f.write(file_data)
+
+
+def _FindGnExecutable():
+ """Locate the real GN executable used by this Chromium checkout.
+
+ This is needed because the depot_tools 'gn' wrapper script will look
+ for .gclient and other things we really don't need here.
+
+ Returns:
+ Path of real host GN executable from current Chromium src/ checkout.
+ """
+ # Simply scan buildtools/*/gn and return the first one found so we don't
+ # have to guess the platform-specific sub-directory name (e.g. 'linux64'
+ # for 64-bit Linux machines).
+ buildtools_dir = os.path.join(_TOP_SRC_DIR, 'buildtools')
+ for subdir in os.listdir(buildtools_dir):
+ subdir_path = os.path.join(buildtools_dir, subdir)
+ if not os.path.isdir(subdir_path):
+ continue
+ gn_path = os.path.join(subdir_path, 'gn')
+ if os.path.exists(gn_path):
+ return gn_path
+ return None
+
+
+def _PrettyPrintListAsLines(input_list, available_width, trailing_comma=False):
+ result = []
+ input_str = ', '.join(input_list)
+ while len(input_str) > available_width:
+ pos = input_str.rfind(',', 0, available_width)
+ result.append(input_str[:pos + 1])
+ input_str = input_str[pos + 1:].lstrip()
+ if trailing_comma and input_str:
+ input_str += ','
+ result.append(input_str)
+ return result
+
+
+class _PrettyPrintListAsLinesTest(unittest.TestCase):
+
+ def test_empty_list(self):
+ self.assertListEqual([''], _PrettyPrintListAsLines([], 10))
+
+ def test_wrapping(self):
+ input_list = ['foo', 'bar', 'zoo', 'tool']
+ self.assertListEqual(
+ _PrettyPrintListAsLines(input_list, 8),
+ ['foo,', 'bar,', 'zoo,', 'tool'])
+ self.assertListEqual(
+ _PrettyPrintListAsLines(input_list, 12), ['foo, bar,', 'zoo, tool'])
+ self.assertListEqual(
+ _PrettyPrintListAsLines(input_list, 79), ['foo, bar, zoo, tool'])
+
+ def test_trailing_comma(self):
+ input_list = ['foo', 'bar', 'zoo', 'tool']
+ self.assertListEqual(
+ _PrettyPrintListAsLines(input_list, 8, trailing_comma=True),
+ ['foo,', 'bar,', 'zoo,', 'tool,'])
+ self.assertListEqual(
+ _PrettyPrintListAsLines(input_list, 12, trailing_comma=True),
+ ['foo, bar,', 'zoo, tool,'])
+ self.assertListEqual(
+ _PrettyPrintListAsLines(input_list, 79, trailing_comma=True),
+ ['foo, bar, zoo, tool,'])
+
+
+##########################################################################
+##########################################################################
+#####
+##### L O C A L E S L I S T S
+#####
+##########################################################################
+##########################################################################
+
+# Various list of locales that will be extracted from build/config/locales.gni
+# Do not use these directly, use ChromeLocales(), AndroidOmittedLocales() and
+# IosUnsupportedLocales() instead to access these lists.
+_INTERNAL_CHROME_LOCALES = []
+_INTERNAL_ANDROID_OMITTED_LOCALES = []
+_INTERNAL_IOS_UNSUPPORTED_LOCALES = []
+
+
+def ChromeLocales():
+ """Return the list of all locales supported by Chrome."""
+ if not _INTERNAL_CHROME_LOCALES:
+ _ExtractAllChromeLocalesLists()
+ return _INTERNAL_CHROME_LOCALES
+
+
+def AndroidOmittedLocales():
+ """Reutrn the list of locales omitted from Android APKs."""
+ if not _INTERNAL_ANDROID_OMITTED_LOCALES:
+ _ExtractAllChromeLocalesLists()
+ return _INTERNAL_ANDROID_OMITTED_LOCALES
+
+
+def IosUnsupportedLocales():
+ """Return the list of locales that are unsupported on iOS."""
+ if not _INTERNAL_IOS_UNSUPPORTED_LOCALES:
+ _ExtractAllChromeLocalesLists()
+ return _INTERNAL_IOS_UNSUPPORTED_LOCALES
+
+
+def _PrepareTinyGnWorkspace(work_dir, out_subdir_name='out'):
+ """Populate an empty directory with a tiny set of working GN config files.
+
+ This allows us to run 'gn gen <out> --root <work_dir>' as fast as possible
+ to generate files containing the locales list. This takes about 300ms on
+ a decent machine, instead of more than 5 seconds when running the equivalent
+ commands from a real Chromium workspace, which requires regenerating more
+ than 23k targets.
+
+ Args:
+ work_dir: target working directory.
+ out_subdir_name: Name of output sub-directory.
+ Returns:
+ Full path of output directory created inside |work_dir|.
+ """
+ # Create top-level .gn file that must point to the BUILDCONFIG.gn.
+ _WriteFile(os.path.join(work_dir, '.gn'),
+ 'buildconfig = "//BUILDCONFIG.gn"\n')
+ # Create BUILDCONFIG.gn which must set a default toolchain. Also add
+ # all variables that may be used in locales.gni in a declare_args() block.
+ _WriteFile(os.path.join(work_dir, 'BUILDCONFIG.gn'),
+ r'''set_default_toolchain("toolchain")
+declare_args () {
+ is_ios = false
+}
+''')
+
+ # Create fake toolchain required by BUILDCONFIG.gn.
+ os.mkdir(os.path.join(work_dir, 'toolchain'))
+ _WriteFile(os.path.join(work_dir, 'toolchain', 'BUILD.gn'),
+ r'''toolchain("toolchain") {
+ tool("stamp") {
+ command = "touch {{output}}" # Required by action()
+ }
+}
+''')
+
+ # Create top-level BUILD.gn, GN requires at least one target to build so do
+ # that with a fake action which will never be invoked. Also write the locales
+ # to misc files in the output directory.
+ _WriteFile(os.path.join(work_dir, 'BUILD.gn'),
+ r'''import("//locales.gni")
+
+action("create_foo") { # fake action to avoid GN complaints.
+ script = "//build/create_foo.py"
+ inputs = []
+ outputs = [ "$target_out_dir/$target_name" ]
+}
+
+# Write the locales lists to files in the output directory.
+_filename = root_build_dir + "/foo"
+write_file(_filename + ".locales", locales, "json")
+write_file(_filename + ".android_omitted_locales",
+ android_chrome_omitted_locales,
+ "json")
+write_file(_filename + ".ios_unsupported_locales",
+ ios_unsupported_locales,
+ "json")
+''')
+
+ # Copy build/config/locales.gni to the workspace, as required by BUILD.gn.
+ shutil.copyfile(os.path.join(_TOP_SRC_DIR, 'build', 'config', 'locales.gni'),
+ os.path.join(work_dir, 'locales.gni'))
+
+ # Create output directory.
+ out_path = os.path.join(work_dir, out_subdir_name)
+ os.mkdir(out_path)
+
+ # And ... we're good.
+ return out_path
+
+
+# Set this global variable to the path of a given temporary directory
+# before calling _ExtractAllChromeLocalesLists() if you want to debug
+# the locales list extraction process.
+_DEBUG_LOCALES_WORK_DIR = None
+
+
+def _ReadJsonList(file_path):
+ """Read a JSON file that must contain a list, and return it."""
+ with open(file_path) as f:
+ data = json.load(f)
+ assert isinstance(data, list), "JSON file %s is not a list!" % file_path
+ return [item.encode('utf8') for item in data]
+
+
+def _ExtractAllChromeLocalesLists():
+ with build_utils.TempDir() as tmp_path:
+ if _DEBUG_LOCALES_WORK_DIR:
+ tmp_path = _DEBUG_LOCALES_WORK_DIR
+ build_utils.DeleteDirectory(tmp_path)
+ build_utils.MakeDirectory(tmp_path)
+
+ out_path = _PrepareTinyGnWorkspace(tmp_path, 'out')
+
+ # NOTE: The file suffixes used here should be kept in sync with
+ # build/config/locales.gni
+ gn_executable = _FindGnExecutable()
+ subprocess.check_output(
+ [gn_executable, 'gen', out_path, '--root=' + tmp_path])
+
+ global _INTERNAL_CHROME_LOCALES
+ _INTERNAL_CHROME_LOCALES = _ReadJsonList(
+ os.path.join(out_path, 'foo.locales'))
+
+ global _INTERNAL_ANDROID_OMITTED_LOCALES
+ _INTERNAL_ANDROID_OMITTED_LOCALES = _ReadJsonList(
+ os.path.join(out_path, 'foo.android_omitted_locales'))
+
+ global _INTERNAL_IOS_UNSUPPORTED_LOCALES
+ _INTERNAL_IOS_UNSUPPORTED_LOCALES = _ReadJsonList(
+ os.path.join(out_path, 'foo.ios_unsupported_locales'))
+
+
+##########################################################################
+##########################################################################
+#####
+##### G R D H E L P E R F U N C T I O N S
+#####
+##########################################################################
+##########################################################################
+
+# Technical note:
+#
+# Even though .grd files are XML, an xml parser library is not used in order
+# to preserve the original file's structure after modification. ElementTree
+# tends to re-order attributes in each element when re-writing an XML
+# document tree, which is undesirable here.
+#
+# Thus simple line-based regular expression matching is used instead.
+#
+
+# Misc regular expressions used to match elements and their attributes.
+_RE_OUTPUT_ELEMENT = re.compile(r'<output (.*)\s*/>')
+_RE_TRANSLATION_ELEMENT = re.compile(r'<file (.*\.xtb")\s*/>')
+_RE_FILENAME_ATTRIBUTE = re.compile(r'filename="([^"]*)"')
+_RE_LANG_ATTRIBUTE = re.compile(r'lang="([^"]*)"')
+_RE_PATH_ATTRIBUTE = re.compile(r'path="([^"]*)"')
+_RE_TYPE_ANDROID_ATTRIBUTE = re.compile(r'type="android"')
+
+assert _RE_TRANSLATION_ELEMENT.match('<file path="foo/bar.xtb" />')
+assert _RE_TRANSLATION_ELEMENT.match('<file path="foo/bar.xtb"/>')
+assert _RE_TRANSLATION_ELEMENT.match('<file path="foo/bar.xml" />') is None
+
+
+def _IsGritInputFile(input_file):
+ """Returns True iff this is a GRIT input file."""
+ return input_file.endswith('.grd')
+
+
+def _SortGrdElementsRanges(grd_lines, element_predicate):
+ """Sort all .grd elements of a given type by their lang attribute."""
+ return _SortElementsRanges(
+ grd_lines,
+ element_predicate,
+ lambda x: _RE_LANG_ATTRIBUTE.search(x).group(1))
+
+
+def _CheckGrdElementRangeLang(grd_lines, start, end, wanted_locales):
+ """Check the element 'lang' attributes in specific .grd lines range.
+
+ This really checks the following:
+ - Each item has a correct 'lang' attribute.
+ - There are no duplicated lines for the same 'lang' attribute.
+ - That there are no extra locales that Chromium doesn't want.
+ - That no wanted locale is missing.
+
+ Args:
+ grd_lines: Input .grd lines.
+ start: Sub-range start position in input line list.
+ end: Sub-range limit position in input line list.
+ wanted_locales: Set of wanted Chromium locale names.
+ Returns:
+ List of error message strings for this input. Empty on success.
+ """
+ errors = []
+ locales = set()
+ for pos in xrange(start, end):
+ line = grd_lines[pos]
+ m = _RE_LANG_ATTRIBUTE.search(line)
+ if not m:
+ errors.append('%d: Missing "lang" attribute in <output> element' % pos +
+ 1)
+ continue
+ lang = m.group(1)
+ cr_locale = _FixChromiumLangAttribute(lang)
+ if cr_locale in locales:
+ errors.append(
+ '%d: Redefinition of <output> for "%s" locale' % (pos + 1, lang))
+ locales.add(cr_locale)
+
+ extra_locales = locales.difference(wanted_locales)
+ if extra_locales:
+ errors.append('%d-%d: Extra locales found: %s' % (start + 1, end + 1,
+ sorted(extra_locales)))
+
+ missing_locales = wanted_locales.difference(locales)
+ if missing_locales:
+ errors.append('%d-%d: Missing locales: %s' % (start + 1, end + 1,
+ sorted(missing_locales)))
+
+ return errors
+
+
+##########################################################################
+##########################################################################
+#####
+##### G R D A N D R O I D O U T P U T S
+#####
+##########################################################################
+##########################################################################
+
+def _IsGrdAndroidOutputLine(line):
+ """Returns True iff this is an Android-specific <output> line."""
+ m = _RE_OUTPUT_ELEMENT.search(line)
+ if m:
+ return 'type="android"' in m.group(1)
+ return False
+
+assert _IsGrdAndroidOutputLine(' <output type="android"/>')
+
+# Many of the functions below have unused arguments due to genericity.
+# pylint: disable=unused-argument
+
+def _CheckGrdElementRangeAndroidOutputFilename(grd_lines, start, end,
+ wanted_locales):
+ """Check all <output> elements in specific input .grd lines range.
+
+ This really checks the following:
+ - Filenames exist for each listed locale.
+ - Filenames are well-formed.
+
+ Args:
+ grd_lines: Input .grd lines.
+ start: Sub-range start position in input line list.
+ end: Sub-range limit position in input line list.
+ wanted_locales: Set of wanted Chromium locale names.
+ Returns:
+ List of error message strings for this input. Empty on success.
+ """
+ errors = []
+ for pos in xrange(start, end):
+ line = grd_lines[pos]
+ m = _RE_LANG_ATTRIBUTE.search(line)
+ if not m:
+ continue
+ lang = m.group(1)
+ cr_locale = _FixChromiumLangAttribute(lang)
+
+ m = _RE_FILENAME_ATTRIBUTE.search(line)
+ if not m:
+ errors.append('%d: Missing filename attribute in <output> element' % pos +
+ 1)
+ else:
+ filename = m.group(1)
+ if not filename.endswith('.xml'):
+ errors.append(
+ '%d: Filename should end with ".xml": %s' % (pos + 1, filename))
+
+ dirname = os.path.basename(os.path.dirname(filename))
+ prefix = ('values-%s' % resource_utils.ToAndroidLocaleName(cr_locale)
+ if cr_locale != _DEFAULT_LOCALE else 'values')
+ if dirname != prefix:
+ errors.append(
+ '%s: Directory name should be %s: %s' % (pos + 1, prefix, filename))
+
+ return errors
+
+
+def _CheckGrdAndroidOutputElements(grd_file, grd_lines, wanted_locales):
+ """Check all <output> elements related to Android.
+
+ Args:
+ grd_file: Input .grd file path.
+ grd_lines: List of input .grd lines.
+ wanted_locales: set of wanted Chromium locale names.
+ Returns:
+ List of error message strings. Empty on success.
+ """
+ intervals = _BuildIntervalList(grd_lines, _IsGrdAndroidOutputLine)
+ errors = []
+ for start, end in intervals:
+ errors += _CheckGrdElementRangeLang(grd_lines, start, end, wanted_locales)
+ errors += _CheckGrdElementRangeAndroidOutputFilename(grd_lines, start, end,
+ wanted_locales)
+ return errors
+
+
+def _AddMissingLocalesInGrdAndroidOutputs(grd_file, grd_lines, wanted_locales):
+ """Fix an input .grd line by adding missing Android outputs.
+
+ Args:
+ grd_file: Input .grd file path.
+ grd_lines: Input .grd line list.
+ wanted_locales: set of Chromium locale names.
+ Returns:
+ A new list of .grd lines, containing new <output> elements when needed
+ for locales from |wanted_locales| that were not part of the input.
+ """
+ intervals = _BuildIntervalList(grd_lines, _IsGrdAndroidOutputLine)
+ for start, end in reversed(intervals):
+ locales = set()
+ for pos in xrange(start, end):
+ lang = _RE_LANG_ATTRIBUTE.search(grd_lines[pos]).group(1)
+ locale = _FixChromiumLangAttribute(lang)
+ locales.add(locale)
+
+ missing_locales = wanted_locales.difference(locales)
+ if not missing_locales:
+ continue
+
+ src_locale = 'bg'
+ src_lang_attribute = 'lang="%s"' % src_locale
+ src_line = None
+ for pos in xrange(start, end):
+ if src_lang_attribute in grd_lines[pos]:
+ src_line = grd_lines[pos]
+ break
+
+ if not src_line:
+ raise Exception(
+ 'Cannot find <output> element with "%s" lang attribute' % src_locale)
+
+ line_count = end - 1
+ for locale in missing_locales:
+ android_locale = resource_utils.ToAndroidLocaleName(locale)
+ dst_line = src_line.replace(
+ 'lang="%s"' % src_locale, 'lang="%s"' % locale).replace(
+ 'values-%s/' % src_locale, 'values-%s/' % android_locale)
+ grd_lines.insert(line_count, dst_line)
+ line_count += 1
+
+ # Sort the new <output> elements.
+ return _SortGrdElementsRanges(grd_lines, _IsGrdAndroidOutputLine)
+
+
+##########################################################################
+##########################################################################
+#####
+##### G R D T R A N S L A T I O N S
+#####
+##########################################################################
+##########################################################################
+
+
+def _IsTranslationGrdOutputLine(line):
+ """Returns True iff this is an output .xtb <file> element."""
+ m = _RE_TRANSLATION_ELEMENT.search(line)
+ return m is not None
+
+
+def _CheckGrdTranslationElementRange(grd_lines, start, end,
+ wanted_locales):
+ """Check all <translations> sub-elements in specific input .grd lines range.
+
+ This really checks the following:
+ - Each item has a 'path' attribute.
+ - Each such path value ends up with '.xtb'.
+
+ Args:
+ grd_lines: Input .grd lines.
+ start: Sub-range start position in input line list.
+ end: Sub-range limit position in input line list.
+ wanted_locales: Set of wanted Chromium locale names.
+ Returns:
+ List of error message strings for this input. Empty on success.
+ """
+ errors = []
+ for pos in xrange(start, end):
+ line = grd_lines[pos]
+ m = _RE_LANG_ATTRIBUTE.search(line)
+ if not m:
+ continue
+ m = _RE_PATH_ATTRIBUTE.search(line)
+ if not m:
+ errors.append('%d: Missing path attribute in <file> element' % pos +
+ 1)
+ else:
+ filename = m.group(1)
+ if not filename.endswith('.xtb'):
+ errors.append(
+ '%d: Path should end with ".xtb": %s' % (pos + 1, filename))
+
+ return errors
+
+
+def _CheckGrdTranslations(grd_file, grd_lines, wanted_locales):
+ """Check all <file> elements that correspond to an .xtb output file.
+
+ Args:
+ grd_file: Input .grd file path.
+ grd_lines: List of input .grd lines.
+ wanted_locales: set of wanted Chromium locale names.
+ Returns:
+ List of error message strings. Empty on success.
+ """
+ wanted_locales = wanted_locales - set([_DEFAULT_LOCALE])
+ intervals = _BuildIntervalList(grd_lines, _IsTranslationGrdOutputLine)
+ errors = []
+ for start, end in intervals:
+ errors += _CheckGrdElementRangeLang(grd_lines, start, end, wanted_locales)
+ errors += _CheckGrdTranslationElementRange(grd_lines, start, end,
+ wanted_locales)
+ return errors
+
+
+# Regular expression used to replace the lang attribute inside .xtb files.
+_RE_TRANSLATIONBUNDLE = re.compile('<translationbundle lang="(.*)">')
+
+
+def _CreateFakeXtbFileFrom(src_xtb_path, dst_xtb_path, dst_locale):
+ """Create a fake .xtb file.
+
+ Args:
+ src_xtb_path: Path to source .xtb file to copy from.
+ dst_xtb_path: Path to destination .xtb file to write to.
+ dst_locale: Destination locale, the lang attribute in the source file
+ will be substituted with this value before its lines are written
+ to the destination file.
+ """
+ with open(src_xtb_path) as f:
+ src_xtb_lines = f.readlines()
+
+ def replace_xtb_lang_attribute(line):
+ m = _RE_TRANSLATIONBUNDLE.search(line)
+ if not m:
+ return line
+ return line[:m.start(1)] + dst_locale + line[m.end(1):]
+
+ dst_xtb_lines = [replace_xtb_lang_attribute(line) for line in src_xtb_lines]
+ with build_utils.AtomicOutput(dst_xtb_path) as tmp:
+ tmp.writelines(dst_xtb_lines)
+
+
+def _AddMissingLocalesInGrdTranslations(grd_file, grd_lines, wanted_locales):
+ """Fix an input .grd line by adding missing Android outputs.
+
+ This also creates fake .xtb files from the one provided for 'en-GB'.
+
+ Args:
+ grd_file: Input .grd file path.
+ grd_lines: Input .grd line list.
+ wanted_locales: set of Chromium locale names.
+ Returns:
+ A new list of .grd lines, containing new <output> elements when needed
+ for locales from |wanted_locales| that were not part of the input.
+ """
+ wanted_locales = wanted_locales - set([_DEFAULT_LOCALE])
+ intervals = _BuildIntervalList(grd_lines, _IsTranslationGrdOutputLine)
+ for start, end in reversed(intervals):
+ locales = set()
+ for pos in xrange(start, end):
+ lang = _RE_LANG_ATTRIBUTE.search(grd_lines[pos]).group(1)
+ locale = _FixChromiumLangAttribute(lang)
+ locales.add(locale)
+
+ missing_locales = wanted_locales.difference(locales)
+ if not missing_locales:
+ continue
+
+ src_locale = 'en-GB'
+ src_lang_attribute = 'lang="%s"' % src_locale
+ src_line = None
+ for pos in xrange(start, end):
+ if src_lang_attribute in grd_lines[pos]:
+ src_line = grd_lines[pos]
+ break
+
+ if not src_line:
+ raise Exception(
+ 'Cannot find <file> element with "%s" lang attribute' % src_locale)
+
+ src_path = os.path.join(
+ os.path.dirname(grd_file),
+ _RE_PATH_ATTRIBUTE.search(src_line).group(1))
+
+ line_count = end - 1
+ for locale in missing_locales:
+ dst_line = src_line.replace(
+ 'lang="%s"' % src_locale, 'lang="%s"' % locale).replace(
+ '_%s.xtb' % src_locale, '_%s.xtb' % locale)
+ grd_lines.insert(line_count, dst_line)
+ line_count += 1
+
+ dst_path = src_path.replace('_%s.xtb' % src_locale, '_%s.xtb' % locale)
+ _CreateFakeXtbFileFrom(src_path, dst_path, locale)
+
+
+ # Sort the new <output> elements.
+ return _SortGrdElementsRanges(grd_lines, _IsTranslationGrdOutputLine)
+
+
+##########################################################################
+##########################################################################
+#####
+##### G N A N D R O I D O U T P U T S
+#####
+##########################################################################
+##########################################################################
+
+_RE_GN_VALUES_LIST_LINE = re.compile(
+ r'^\s*".*values(\-([A-Za-z0-9-]+))?/.*\.xml",\s*$')
+
+def _IsBuildGnInputFile(input_file):
+ """Returns True iff this is a BUILD.gn file."""
+ return os.path.basename(input_file) == 'BUILD.gn'
+
+
+def _GetAndroidGnOutputLocale(line):
+ """Check a GN list, and return its Android locale if it is an output .xml"""
+ m = _RE_GN_VALUES_LIST_LINE.match(line)
+ if not m:
+ return None
+
+ if m.group(1): # First group is optional and contains group 2.
+ return m.group(2)
+
+ return resource_utils.ToAndroidLocaleName(_DEFAULT_LOCALE)
+
+
+def _IsAndroidGnOutputLine(line):
+ """Returns True iff this is an Android-specific localized .xml output."""
+ return _GetAndroidGnOutputLocale(line) != None
+
+
+def _CheckGnOutputsRangeForLocalizedStrings(gn_lines, start, end):
+ """Check that a range of GN lines corresponds to localized strings.
+
+ Special case: Some BUILD.gn files list several non-localized .xml files
+ that should be ignored by this function, e.g. in
+ components/cronet/android/BUILD.gn, the following appears:
+
+ inputs = [
+ ...
+ "sample/res/layout/activity_main.xml",
+ "sample/res/layout/dialog_url.xml",
+ "sample/res/values/dimens.xml",
+ "sample/res/values/strings.xml",
+ ...
+ ]
+
+ These are non-localized strings, and should be ignored. This function is
+ used to detect them quickly.
+ """
+ for pos in xrange(start, end):
+ if not 'values/' in gn_lines[pos]:
+ return True
+ return False
+
+
+def _CheckGnOutputsRange(gn_lines, start, end, wanted_locales):
+ if not _CheckGnOutputsRangeForLocalizedStrings(gn_lines, start, end):
+ return []
+
+ errors = []
+ locales = set()
+ for pos in xrange(start, end):
+ line = gn_lines[pos]
+ android_locale = _GetAndroidGnOutputLocale(line)
+ assert android_locale != None
+ cr_locale = resource_utils.ToChromiumLocaleName(android_locale)
+ if cr_locale in locales:
+ errors.append('%s: Redefinition of output for "%s" locale' %
+ (pos + 1, android_locale))
+ locales.add(cr_locale)
+
+ extra_locales = locales.difference(wanted_locales)
+ if extra_locales:
+ errors.append('%d-%d: Extra locales: %s' % (start + 1, end + 1,
+ sorted(extra_locales)))
+
+ missing_locales = wanted_locales.difference(locales)
+ if missing_locales:
+ errors.append('%d-%d: Missing locales: %s' % (start + 1, end + 1,
+ sorted(missing_locales)))
+
+ return errors
+
+
+def _CheckGnAndroidOutputs(gn_file, gn_lines, wanted_locales):
+ intervals = _BuildIntervalList(gn_lines, _IsAndroidGnOutputLine)
+ errors = []
+ for start, end in intervals:
+ errors += _CheckGnOutputsRange(gn_lines, start, end, wanted_locales)
+ return errors
+
+
+def _AddMissingLocalesInGnAndroidOutputs(gn_file, gn_lines, wanted_locales):
+ intervals = _BuildIntervalList(gn_lines, _IsAndroidGnOutputLine)
+ # NOTE: Since this may insert new lines to each interval, process the
+ # list in reverse order to maintain valid (start,end) positions during
+ # the iteration.
+ for start, end in reversed(intervals):
+ if not _CheckGnOutputsRangeForLocalizedStrings(gn_lines, start, end):
+ continue
+
+ locales = set()
+ for pos in xrange(start, end):
+ lang = _GetAndroidGnOutputLocale(gn_lines[pos])
+ locale = resource_utils.ToChromiumLocaleName(lang)
+ locales.add(locale)
+
+ missing_locales = wanted_locales.difference(locales)
+ if not missing_locales:
+ continue
+
+ src_locale = 'bg'
+ src_values = 'values-%s/' % resource_utils.ToAndroidLocaleName(src_locale)
+ src_line = None
+ for pos in xrange(start, end):
+ if src_values in gn_lines[pos]:
+ src_line = gn_lines[pos]
+ break
+
+ if not src_line:
+ raise Exception(
+ 'Cannot find output list item with "%s" locale' % src_locale)
+
+ line_count = end - 1
+ for locale in missing_locales:
+ if locale == _DEFAULT_LOCALE:
+ dst_line = src_line.replace('values-%s/' % src_locale, 'values/')
+ else:
+ dst_line = src_line.replace(
+ 'values-%s/' % src_locale,
+ 'values-%s/' % resource_utils.ToAndroidLocaleName(locale))
+ gn_lines.insert(line_count, dst_line)
+ line_count += 1
+
+ gn_lines = _SortListSubRange(
+ gn_lines, start, line_count,
+ lambda line: _RE_GN_VALUES_LIST_LINE.match(line).group(1))
+
+ return gn_lines
+
+
+##########################################################################
+##########################################################################
+#####
+##### T R A N S L A T I O N E X P E C T A T I O N S
+#####
+##########################################################################
+##########################################################################
+
+_EXPECTATIONS_FILENAME = 'translation_expectations.pyl'
+
+# Technical note: the format of translation_expectations.pyl
+# is a 'Python literal', which defines a python dictionary, so should
+# be easy to parse. However, when modifying it, care should be taken
+# to respect the line comments and the order of keys within the text
+# file.
+
+
+def _ReadPythonLiteralFile(pyl_path):
+ """Read a .pyl file into a Python data structure."""
+ with open(pyl_path) as f:
+ pyl_content = f.read()
+ # Evaluate as a Python data structure, use an empty global
+ # and local dictionary.
+ return eval(pyl_content, dict(), dict())
+
+
+def _UpdateLocalesInExpectationLines(pyl_lines,
+ wanted_locales,
+ available_width=79):
+ """Update the locales list(s) found in an expectations file.
+
+ Args:
+ pyl_lines: Iterable of input lines from the file.
+ wanted_locales: Set or list of new locale names.
+ available_width: Optional, number of character colums used
+ to word-wrap the new list items.
+ Returns:
+ New list of updated lines.
+ """
+ locales_list = ['"%s"' % loc for loc in sorted(wanted_locales)]
+ result = []
+ line_count = len(pyl_lines)
+ line_num = 0
+ DICT_START = '"languages": ['
+ while line_num < line_count:
+ line = pyl_lines[line_num]
+ line_num += 1
+ result.append(line)
+ # Look for start of "languages" dictionary.
+ pos = line.find(DICT_START)
+ if pos < 0:
+ continue
+
+ start_margin = pos
+ start_line = line_num
+ # Skip over all lines from the list.
+ while (line_num < line_count and
+ not pyl_lines[line_num].rstrip().endswith('],')):
+ line_num += 1
+ continue
+
+ if line_num == line_count:
+ raise Exception('%d: Missing list termination!' % start_line)
+
+ # Format the new list according to the new margin.
+ locale_width = available_width - (start_margin + 2)
+ locale_lines = _PrettyPrintListAsLines(
+ locales_list, locale_width, trailing_comma=True)
+ for locale_line in locale_lines:
+ result.append(' ' * (start_margin + 2) + locale_line)
+ result.append(' ' * start_margin + '],')
+ line_num += 1
+
+ return result
+
+
+class _UpdateLocalesInExpectationLinesTest(unittest.TestCase):
+
+ def test_simple(self):
+ self.maxDiff = 1000
+ input_text = r'''
+# This comment should be preserved
+# 23456789012345678901234567890123456789
+{
+ "android_grd": {
+ "languages": [
+ "aa", "bb", "cc", "dd", "ee",
+ "ff", "gg", "hh", "ii", "jj",
+ "kk"],
+ },
+ # Example with bad indentation in input.
+ "another_grd": {
+ "languages": [
+ "aa", "bb", "cc", "dd", "ee", "ff", "gg", "hh", "ii", "jj", "kk",
+ ],
+ },
+}
+'''
+ expected_text = r'''
+# This comment should be preserved
+# 23456789012345678901234567890123456789
+{
+ "android_grd": {
+ "languages": [
+ "A2", "AA", "BB", "CC", "DD",
+ "E2", "EE", "FF", "GG", "HH",
+ "I2", "II", "JJ", "KK",
+ ],
+ },
+ # Example with bad indentation in input.
+ "another_grd": {
+ "languages": [
+ "A2", "AA", "BB", "CC", "DD",
+ "E2", "EE", "FF", "GG", "HH",
+ "I2", "II", "JJ", "KK",
+ ],
+ },
+}
+'''
+ input_lines = input_text.splitlines()
+ test_locales = ([
+ 'AA', 'BB', 'CC', 'DD', 'EE', 'FF', 'GG', 'HH', 'II', 'JJ', 'KK', 'A2',
+ 'E2', 'I2'
+ ])
+ expected_lines = expected_text.splitlines()
+ self.assertListEqual(
+ _UpdateLocalesInExpectationLines(input_lines, test_locales, 40),
+ expected_lines)
+
+ def test_missing_list_termination(self):
+ input_lines = r'''
+ "languages": ['
+ "aa", "bb", "cc", "dd"
+'''.splitlines()
+ with self.assertRaises(Exception) as cm:
+ _UpdateLocalesInExpectationLines(input_lines, ['a', 'b'], 40)
+
+ self.assertEqual(str(cm.exception), '2: Missing list termination!')
+
+
+def _UpdateLocalesInExpectationFile(pyl_path, wanted_locales):
+ """Update all locales listed in a given expectations file.
+
+ Args:
+ pyl_path: Path to .pyl file to update.
+ wanted_locales: List of locales that need to be written to
+ the file.
+ """
+ tc_locales = {
+ _FixTranslationConsoleLocaleName(locale)
+ for locale in set(wanted_locales) - set([_DEFAULT_LOCALE])
+ }
+
+ with open(pyl_path) as f:
+ input_lines = [l.rstrip() for l in f.readlines()]
+
+ updated_lines = _UpdateLocalesInExpectationLines(input_lines, tc_locales)
+ print repr(updated_lines)
+ with build_utils.AtomicOutput(pyl_path) as f:
+ f.writelines('\n'.join(updated_lines) + '\n')
+
+
+##########################################################################
+##########################################################################
+#####
+##### C H E C K E V E R Y T H I N G
+#####
+##########################################################################
+##########################################################################
+
+# pylint: enable=unused-argument
+
+
+def _IsAllInputFile(input_file):
+ return _IsGritInputFile(input_file) or _IsBuildGnInputFile(input_file)
+
+
+def _CheckAllFiles(input_file, input_lines, wanted_locales):
+ errors = []
+ if _IsGritInputFile(input_file):
+ errors += _CheckGrdTranslations(input_file, input_lines, wanted_locales)
+ errors += _CheckGrdAndroidOutputElements(
+ input_file, input_lines, wanted_locales)
+ elif _IsBuildGnInputFile(input_file):
+ errors += _CheckGnAndroidOutputs(input_file, input_lines, wanted_locales)
+ return errors
+
+
+def _AddMissingLocalesInAllFiles(input_file, input_lines, wanted_locales):
+ if _IsGritInputFile(input_file):
+ lines = _AddMissingLocalesInGrdTranslations(
+ input_file, input_lines, wanted_locales)
+ lines = _AddMissingLocalesInGrdAndroidOutputs(
+ input_file, lines, wanted_locales)
+ elif _IsBuildGnInputFile(input_file):
+ lines = _AddMissingLocalesInGnAndroidOutputs(
+ input_file, input_lines, wanted_locales)
+ return lines
+
+
+##########################################################################
+##########################################################################
+#####
+##### C O M M A N D H A N D L I N G
+#####
+##########################################################################
+##########################################################################
+
+class _Command(object):
+ """A base class for all commands recognized by this script.
+
+ Usage is the following:
+ 1) Derived classes must re-define the following class-based fields:
+ - name: Command name (e.g. 'list-locales')
+ - description: Command short description.
+ - long_description: Optional. Command long description.
+ NOTE: As a convenience, if the first character is a newline,
+ it will be omitted in the help output.
+
+ 2) Derived classes for commands that take arguments should override
+ RegisterExtraArgs(), which receives a corresponding argparse
+ sub-parser as argument.
+
+ 3) Derived classes should implement a Run() command, which can read
+ the current arguments from self.args.
+ """
+ name = None
+ description = None
+ long_description = None
+
+ def __init__(self):
+ self._parser = None
+ self.args = None
+
+ def RegisterExtraArgs(self, subparser):
+ pass
+
+ def RegisterArgs(self, parser):
+ subp = parser.add_parser(
+ self.name, help=self.description,
+ description=self.long_description or self.description,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
+ self._parser = subp
+ subp.set_defaults(command=self)
+ group = subp.add_argument_group('%s arguments' % self.name)
+ self.RegisterExtraArgs(group)
+
+ def ProcessArgs(self, args):
+ self.args = args
+
+
+class _ListLocalesCommand(_Command):
+ """Implement the 'list-locales' command to list locale lists of interest."""
+ name = 'list-locales'
+ description = 'List supported Chrome locales'
+ long_description = r'''
+List locales of interest, by default this prints all locales supported by
+Chrome, but `--type=android_omitted` can be used to print the list of locales
+omitted from Android APKs (but not app bundles), and `--type=ios_unsupported`
+for the list of locales unsupported on iOS.
+
+These values are extracted directly from build/config/locales.gni.
+
+Additionally, use the --as-json argument to print the list as a JSON list,
+instead of the default format (which is a space-separated list of locale names).
+'''
+
+ # Maps type argument to a function returning the corresponding locales list.
+ TYPE_MAP = {
+ 'all': ChromeLocales,
+ 'android_omitted': AndroidOmittedLocales,
+ 'ios_unsupported': IosUnsupportedLocales,
+ }
+
+ def RegisterExtraArgs(self, group):
+ group.add_argument(
+ '--as-json',
+ action='store_true',
+ help='Output as JSON list.')
+ group.add_argument(
+ '--type',
+ choices=tuple(self.TYPE_MAP.viewkeys()),
+ default='all',
+ help='Select type of locale list to print.')
+
+ def Run(self):
+ locale_list = self.TYPE_MAP[self.args.type]()
+ if self.args.as_json:
+ print '[%s]' % ", ".join("'%s'" % loc for loc in locale_list)
+ else:
+ print ' '.join(locale_list)
+
+
+class _CheckInputFileBaseCommand(_Command):
+ """Used as a base for other _Command subclasses that check input files.
+
+ Subclasses should also define the following class-level variables:
+
+ - select_file_func:
+ A predicate that receives a file name (not path) and return True if it
+ should be selected for inspection. Used when scanning directories with
+ '--scan-dir <dir>'.
+
+ - check_func:
+ - fix_func:
+ Two functions passed as parameters to _ProcessFile(), see relevant
+ documentation in this function's definition.
+ """
+ select_file_func = None
+ check_func = None
+ fix_func = None
+
+ def RegisterExtraArgs(self, group):
+ group.add_argument(
+ '--scan-dir',
+ action='append',
+ help='Optional directory to scan for input files recursively.')
+ group.add_argument(
+ 'input',
+ nargs='*',
+ help='Input file(s) to check.')
+ group.add_argument(
+ '--fix-inplace',
+ action='store_true',
+ help='Try to fix the files in-place too.')
+ group.add_argument(
+ '--add-locales',
+ help='Space-separated list of additional locales to use')
+
+ def Run(self):
+ args = self.args
+ input_files = []
+ if args.input:
+ input_files = args.input
+ if args.scan_dir:
+ input_files.extend(_ScanDirectoriesForFiles(
+ args.scan_dir, self.select_file_func.__func__))
+ locales = ChromeLocales()
+ if args.add_locales:
+ locales.extend(args.add_locales.split(' '))
+
+ locales = set(locales)
+
+ for input_file in input_files:
+ _ProcessFile(input_file,
+ locales,
+ self.check_func.__func__,
+ self.fix_func.__func__ if args.fix_inplace else None)
+ print '%sDone.' % (_CONSOLE_START_LINE)
+
+
+class _CheckGrdAndroidOutputsCommand(_CheckInputFileBaseCommand):
+ name = 'check-grd-android-outputs'
+ description = (
+ 'Check the Android resource (.xml) files outputs in GRIT input files.')
+ long_description = r'''
+Check the Android .xml files outputs in one or more input GRIT (.grd) files
+for the following conditions:
+
+ - Each item has a correct 'lang' attribute.
+ - There are no duplicated lines for the same 'lang' attribute.
+ - That there are no extra locales that Chromium doesn't want.
+ - That no wanted locale is missing.
+ - Filenames exist for each listed locale.
+ - Filenames are well-formed.
+'''
+ select_file_func = _IsGritInputFile
+ check_func = _CheckGrdAndroidOutputElements
+ fix_func = _AddMissingLocalesInGrdAndroidOutputs
+
+
+class _CheckGrdTranslationsCommand(_CheckInputFileBaseCommand):
+ name = 'check-grd-translations'
+ description = (
+ 'Check the translation (.xtb) files outputted by .grd input files.')
+ long_description = r'''
+Check the translation (.xtb) file outputs in one or more input GRIT (.grd) files
+for the following conditions:
+
+ - Each item has a correct 'lang' attribute.
+ - There are no duplicated lines for the same 'lang' attribute.
+ - That there are no extra locales that Chromium doesn't want.
+ - That no wanted locale is missing.
+ - Each item has a 'path' attribute.
+ - Each such path value ends up with '.xtb'.
+'''
+ select_file_func = _IsGritInputFile
+ check_func = _CheckGrdTranslations
+ fix_func = _AddMissingLocalesInGrdTranslations
+
+
+class _CheckGnAndroidOutputsCommand(_CheckInputFileBaseCommand):
+ name = 'check-gn-android-outputs'
+ description = 'Check the Android .xml file lists in GN build files.'
+ long_description = r'''
+Check one or more BUILD.gn file, looking for lists of Android resource .xml
+files, and checking that:
+
+ - There are no duplicated output files in the list.
+ - Each output file belongs to a wanted Chromium locale.
+ - There are no output files for unwanted Chromium locales.
+'''
+ select_file_func = _IsBuildGnInputFile
+ check_func = _CheckGnAndroidOutputs
+ fix_func = _AddMissingLocalesInGnAndroidOutputs
+
+
+class _CheckAllCommand(_CheckInputFileBaseCommand):
+ name = 'check-all'
+ description = 'Check everything.'
+ long_description = 'Equivalent to calling all other check-xxx commands.'
+ select_file_func = _IsAllInputFile
+ check_func = _CheckAllFiles
+ fix_func = _AddMissingLocalesInAllFiles
+
+
+class _UpdateExpectationsCommand(_Command):
+ name = 'update-expectations'
+ description = 'Update translation expectations file.'
+ long_description = r'''
+Update %s files to match the current list of locales supported by Chromium.
+This is especially useful to add new locales before updating any GRIT or GN
+input file with the --add-locales option.
+''' % _EXPECTATIONS_FILENAME
+
+ def RegisterExtraArgs(self, group):
+ group.add_argument(
+ '--add-locales',
+ help='Space-separated list of additional locales to use.')
+
+ def Run(self):
+ locales = ChromeLocales()
+ add_locales = self.args.add_locales
+ if add_locales:
+ locales.extend(add_locales.split(' '))
+
+ expectation_paths = [
+ 'tools/gritsettings/translation_expectations.pyl',
+ 'clank/tools/translation_expectations.pyl',
+ ]
+ missing_expectation_files = []
+ for path in enumerate(expectation_paths):
+ file_path = os.path.join(_TOP_SRC_DIR, path)
+ if not os.path.exists(file_path):
+ missing_expectation_files.append(file_path)
+ continue
+ _UpdateLocalesInExpectationFile(file_path, locales)
+
+ if missing_expectation_files:
+ sys.stderr.write('WARNING: Missing file(s): %s\n' %
+ (', '.join(missing_expectation_files)))
+
+
+class _UnitTestsCommand(_Command):
+ name = 'unit-tests'
+ description = 'Run internal unit-tests for this script'
+
+ def RegisterExtraArgs(self, group):
+ group.add_argument(
+ '-v', '--verbose', action='count', help='Increase test verbosity.')
+ group.add_argument('args', nargs=argparse.REMAINDER)
+
+ def Run(self):
+ argv = [_SCRIPT_NAME] + self.args.args
+ unittest.main(argv=argv, verbosity=self.args.verbose)
+
+
+# List of all commands supported by this script.
+_COMMANDS = [
+ _ListLocalesCommand,
+ _CheckGrdAndroidOutputsCommand,
+ _CheckGrdTranslationsCommand,
+ _CheckGnAndroidOutputsCommand,
+ _CheckAllCommand,
+ _UpdateExpectationsCommand,
+ _UnitTestsCommand,
+]
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+
+ subparsers = parser.add_subparsers()
+ commands = [clazz() for clazz in _COMMANDS]
+ for command in commands:
+ command.RegisterArgs(subparsers)
+
+ if not argv:
+ argv = ['--help']
+
+ args = parser.parse_args(argv)
+ args.command.ProcessArgs(args)
+ args.command.Run()
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/deps/v8/build/mac/OWNERS b/deps/v8/build/mac/OWNERS
new file mode 100644
index 0000000000..a2d7cc837d
--- /dev/null
+++ b/deps/v8/build/mac/OWNERS
@@ -0,0 +1,4 @@
+mark@chromium.org
+rsesek@chromium.org
+
+# COMPONENT: Build
diff --git a/deps/v8/build/mac/find_sdk.py b/deps/v8/build/mac/find_sdk.py
new file mode 100755
index 0000000000..540a3202e4
--- /dev/null
+++ b/deps/v8/build/mac/find_sdk.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Prints the lowest locally available SDK version greater than or equal to a
+given minimum sdk version to standard output. If --developer_dir is passed, then
+the script will use the Xcode toolchain located at DEVELOPER_DIR.
+
+Usage:
+ python find_sdk.py [--developer_dir DEVELOPER_DIR] 10.6 # Ignores SDKs < 10.6
+"""
+
+import os
+import re
+import subprocess
+import sys
+
+from optparse import OptionParser
+
+
+class SdkError(Exception):
+ def __init__(self, value):
+ self.value = value
+ def __str__(self):
+ return repr(self.value)
+
+
+def parse_version(version_str):
+ """'10.6' => [10, 6]"""
+ return map(int, re.findall(r'(\d+)', version_str))
+
+
+def main():
+ parser = OptionParser()
+ parser.add_option("--verify",
+ action="store_true", dest="verify", default=False,
+ help="return the sdk argument and warn if it doesn't exist")
+ parser.add_option("--sdk_path",
+ action="store", type="string", dest="sdk_path", default="",
+ help="user-specified SDK path; bypasses verification")
+ parser.add_option("--print_sdk_path",
+ action="store_true", dest="print_sdk_path", default=False,
+ help="Additionally print the path the SDK (appears first).")
+ parser.add_option("--developer_dir", help='Path to Xcode.')
+ options, args = parser.parse_args()
+ if len(args) != 1:
+ parser.error('Please specify a minimum SDK version')
+ min_sdk_version = args[0]
+
+ if options.developer_dir:
+ os.environ['DEVELOPER_DIR'] = options.developer_dir
+
+ job = subprocess.Popen(['xcode-select', '-print-path'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ out, err = job.communicate()
+ if job.returncode != 0:
+ print >> sys.stderr, out
+ print >> sys.stderr, err
+ raise Exception('Error %d running xcode-select' % job.returncode)
+ sdk_dir = os.path.join(
+ out.rstrip(), 'Platforms/MacOSX.platform/Developer/SDKs')
+ # Xcode must be installed, its license agreement must be accepted, and its
+ # command-line tools must be installed. Stand-alone installations (in
+ # /Library/Developer/CommandLineTools) are not supported.
+ # https://bugs.chromium.org/p/chromium/issues/detail?id=729990#c1
+ if not os.path.isdir(sdk_dir) or not '.app/Contents/Developer' in sdk_dir:
+ raise SdkError('Install Xcode, launch it, accept the license ' +
+ 'agreement, and run `sudo xcode-select -s /path/to/Xcode.app` ' +
+ 'to continue.')
+ sdks = [re.findall('^MacOSX(10\.\d+)\.sdk$', s) for s in os.listdir(sdk_dir)]
+ sdks = [s[0] for s in sdks if s] # [['10.5'], ['10.6']] => ['10.5', '10.6']
+ sdks = [s for s in sdks # ['10.5', '10.6'] => ['10.6']
+ if parse_version(s) >= parse_version(min_sdk_version)]
+ if not sdks:
+ raise Exception('No %s+ SDK found' % min_sdk_version)
+ best_sdk = sorted(sdks, key=parse_version)[0]
+
+ if options.verify and best_sdk != min_sdk_version and not options.sdk_path:
+ print >> sys.stderr, ''
+ print >> sys.stderr, ' vvvvvvv'
+ print >> sys.stderr, ''
+ print >> sys.stderr, \
+ 'This build requires the %s SDK, but it was not found on your system.' \
+ % min_sdk_version
+ print >> sys.stderr, \
+ 'Either install it, or explicitly set mac_sdk in your GYP_DEFINES.'
+ print >> sys.stderr, ''
+ print >> sys.stderr, ' ^^^^^^^'
+ print >> sys.stderr, ''
+ sys.exit(1)
+
+ if options.print_sdk_path:
+ print subprocess.check_output(
+ ['xcrun', '-sdk', 'macosx' + best_sdk, '--show-sdk-path']).strip()
+
+ return best_sdk
+
+
+if __name__ == '__main__':
+ if sys.platform != 'darwin':
+ raise Exception("This script only runs on Mac")
+ print main()
+ sys.exit(0)
diff --git a/deps/v8/build/mac/should_use_hermetic_xcode.py b/deps/v8/build/mac/should_use_hermetic_xcode.py
new file mode 100755
index 0000000000..08e9886144
--- /dev/null
+++ b/deps/v8/build/mac/should_use_hermetic_xcode.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Prints "1" if Chrome targets should be built with hermetic Xcode.
+Prints "2" if Chrome targets should be built with hermetic Xcode, but the OS
+version does not meet the minimum requirements of the hermetic version of Xcode.
+Prints "3" if FORCE_MAC_TOOLCHAIN is set for an iOS target_os
+Otherwise prints "0".
+
+Usage:
+ python should_use_hermetic_xcode.py <target_os>
+"""
+
+import os
+import sys
+
+_THIS_DIR_PATH = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
+_BUILD_PATH = os.path.join(_THIS_DIR_PATH, os.pardir)
+sys.path.insert(0, _BUILD_PATH)
+
+import mac_toolchain
+
+
+def _IsCorpMachine():
+ return os.path.isdir('/Library/GoogleCorpSupport/')
+
+
+def main():
+ force_toolchain = os.environ.get('FORCE_MAC_TOOLCHAIN')
+ if force_toolchain and sys.argv[1] == 'ios':
+ return "3"
+ allow_corp = sys.argv[1] == 'mac' and _IsCorpMachine()
+ if force_toolchain or allow_corp:
+ if not mac_toolchain.PlatformMeetsHermeticXcodeRequirements():
+ return "2"
+ return "1"
+ else:
+ return "0"
+
+
+if __name__ == '__main__':
+ print main()
+ sys.exit(0)
diff --git a/deps/v8/build/mac/tweak_info_plist.gni b/deps/v8/build/mac/tweak_info_plist.gni
new file mode 100644
index 0000000000..9f4cbb3fdf
--- /dev/null
+++ b/deps/v8/build/mac/tweak_info_plist.gni
@@ -0,0 +1,85 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/util/lastchange.gni")
+
+# Template to run the tweak_info_plist.py script on a plist.
+#
+# Arguments:
+#
+# info_plist:
+# (optional), string, the plist to tweak.
+#
+# info_plists:
+# (optional), list of string, the plist files to merge and tweak.
+#
+# args:
+# (optional), list of string, the arguments to pass to the
+# tweak_info_plist.py script.
+#
+# Callers should use get_target_outputs() to get the output name. One of
+# info_plist or info_plists must be specified.
+template("tweak_info_plist") {
+ _output_name = "$target_gen_dir/${target_name}_tweaked.plist"
+
+ if (defined(invoker.info_plists)) {
+ assert(!defined(invoker.info_plist),
+ "Cannot have both info_plist and info_plists for $target_name")
+
+ _source_name = "$target_gen_dir/${target_name}_merged.plist"
+ _deps = [ ":" + target_name + "_merge_plist" ]
+
+ action(target_name + "_merge_plist") {
+ forward_variables_from(invoker, [ "testonly" ])
+ script = "//build/config/mac/plist_util.py"
+ sources = invoker.info_plists
+ outputs = [
+ _source_name,
+ ]
+ args = [
+ "merge",
+ "-f=xml1",
+ "-o=" + rebase_path(_source_name, root_build_dir),
+ ] + rebase_path(invoker.info_plists, root_build_dir)
+ }
+ } else {
+ assert(defined(invoker.info_plist),
+ "The info_plist must be specified in $target_name")
+
+ _source_name = invoker.info_plist
+ _deps = []
+ }
+
+ action(target_name) {
+ forward_variables_from(invoker,
+ [
+ "args",
+ "testonly",
+ ])
+ script = "//build/mac/tweak_info_plist.py"
+ inputs = [
+ script,
+ "//build/util/version.py",
+ lastchange_file,
+ "//chrome/VERSION",
+ ]
+ sources = [
+ _source_name,
+ ]
+ outputs = [
+ _output_name,
+ ]
+ if (!defined(args)) {
+ args = []
+ }
+ args += [
+ "--plist",
+ rebase_path(_source_name, root_build_dir),
+ "--output",
+ rebase_path(_output_name, root_build_dir),
+ "--platform=$current_os",
+ ]
+ deps = _deps
+ }
+}
diff --git a/deps/v8/build/mac/tweak_info_plist.py b/deps/v8/build/mac/tweak_info_plist.py
new file mode 100755
index 0000000000..9ea794b151
--- /dev/null
+++ b/deps/v8/build/mac/tweak_info_plist.py
@@ -0,0 +1,366 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Xcode supports build variable substitutions and CPP; sadly, that doesn't work
+# because:
+#
+# 1. Xcode wants to do the Info.plist work before it runs any build phases,
+# this means if we were to generate a .h file for INFOPLIST_PREFIX_HEADER
+# we'd have to put it in another target so it runs in time.
+# 2. Xcode also doesn't check to see if the header being used as a prefix for
+# the Info.plist has changed. So even if we updated it, it's only looking
+# at the modtime of the info.plist to see if that's changed.
+#
+# So, we work around all of this by making a script build phase that will run
+# during the app build, and simply update the info.plist in place. This way
+# by the time the app target is done, the info.plist is correct.
+#
+
+import optparse
+import os
+import plistlib
+import re
+import subprocess
+import sys
+import tempfile
+
+TOP = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
+
+
+def _ConvertPlist(source_plist, output_plist, fmt):
+ """Convert |source_plist| to |fmt| and save as |output_plist|."""
+ return subprocess.call(
+ ['plutil', '-convert', fmt, '-o', output_plist, source_plist])
+
+
+def _GetOutput(args):
+ """Runs a subprocess and waits for termination. Returns (stdout, returncode)
+ of the process. stderr is attached to the parent."""
+ proc = subprocess.Popen(args, stdout=subprocess.PIPE)
+ (stdout, stderr) = proc.communicate()
+ return (stdout, proc.returncode)
+
+
+def _GetOutputNoError(args):
+ """Similar to _GetOutput() but ignores stderr. If there's an error launching
+ the child (like file not found), the exception will be caught and (None, 1)
+ will be returned to mimic quiet failure."""
+ try:
+ proc = subprocess.Popen(args, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ except OSError:
+ return (None, 1)
+ (stdout, stderr) = proc.communicate()
+ return (stdout, proc.returncode)
+
+
+def _RemoveKeys(plist, *keys):
+ """Removes a varargs of keys from the plist."""
+ for key in keys:
+ try:
+ del plist[key]
+ except KeyError:
+ pass
+
+
+def _ApplyVersionOverrides(version, keys, overrides, separator='.'):
+ """Applies version overrides.
+
+ Given a |version| string as "a.b.c.d" (assuming a default separator) with
+ version components named by |keys| then overrides any value that is present
+ in |overrides|.
+
+ >>> _ApplyVersionOverrides('a.b', ['major', 'minor'], {'minor': 'd'})
+ 'a.d'
+ """
+ if not overrides:
+ return version
+ version_values = version.split(separator)
+ for i, (key, value) in enumerate(zip(keys, version_values)):
+ if key in overrides:
+ version_values[i] = overrides[key]
+ return separator.join(version_values)
+
+
+def _GetVersion(version_format, values, overrides=None):
+ """Generates a version number according to |version_format| using the values
+ from |values| or |overrides| if given."""
+ result = version_format
+ for key in values:
+ if overrides and key in overrides:
+ value = overrides[key]
+ else:
+ value = values[key]
+ result = result.replace('@%s@' % key, value)
+ return result
+
+
+def _AddVersionKeys(
+ plist, version_format_for_key, version=None, overrides=None):
+ """Adds the product version number into the plist. Returns True on success and
+ False on error. The error will be printed to stderr."""
+ if not version:
+ # Pull in the Chrome version number.
+ VERSION_TOOL = os.path.join(TOP, 'build/util/version.py')
+ VERSION_FILE = os.path.join(TOP, 'chrome/VERSION')
+ (stdout, retval) = _GetOutput([
+ VERSION_TOOL, '-f', VERSION_FILE,
+ '-t', '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'])
+
+ # If the command finished with a non-zero return code, then report the
+ # error up.
+ if retval != 0:
+ return False
+
+ version = stdout.strip()
+
+ # Parse the given version number, that should be in MAJOR.MINOR.BUILD.PATCH
+ # format (where each value is a number). Note that str.isdigit() returns
+ # True if the string is composed only of digits (and thus match \d+ regexp).
+ groups = version.split('.')
+ if len(groups) != 4 or not all(element.isdigit() for element in groups):
+ print >>sys.stderr, 'Invalid version string specified: "%s"' % version
+ return False
+ values = dict(zip(('MAJOR', 'MINOR', 'BUILD', 'PATCH'), groups))
+
+ for key in version_format_for_key:
+ plist[key] = _GetVersion(version_format_for_key[key], values, overrides)
+
+ # Return with no error.
+ return True
+
+
+def _DoSCMKeys(plist, add_keys):
+ """Adds the SCM information, visible in about:version, to property list. If
+ |add_keys| is True, it will insert the keys, otherwise it will remove them."""
+ scm_revision = None
+ if add_keys:
+ # Pull in the Chrome revision number.
+ VERSION_TOOL = os.path.join(TOP, 'build/util/version.py')
+ LASTCHANGE_FILE = os.path.join(TOP, 'build/util/LASTCHANGE')
+ (stdout, retval) = _GetOutput([VERSION_TOOL, '-f', LASTCHANGE_FILE, '-t',
+ '@LASTCHANGE@'])
+ if retval:
+ return False
+ scm_revision = stdout.rstrip()
+
+ # See if the operation failed.
+ _RemoveKeys(plist, 'SCMRevision')
+ if scm_revision != None:
+ plist['SCMRevision'] = scm_revision
+ elif add_keys:
+ print >>sys.stderr, 'Could not determine SCM revision. This may be OK.'
+
+ return True
+
+
+def _AddBreakpadKeys(plist, branding, platform, staging):
+ """Adds the Breakpad keys. This must be called AFTER _AddVersionKeys() and
+ also requires the |branding| argument."""
+ plist['BreakpadReportInterval'] = '3600' # Deliberately a string.
+ plist['BreakpadProduct'] = '%s_%s' % (branding, platform)
+ plist['BreakpadProductDisplay'] = branding
+ if staging:
+ plist['BreakpadURL'] = 'https://clients2.google.com/cr/staging_report'
+ else:
+ plist['BreakpadURL'] = 'https://clients2.google.com/cr/report'
+
+ # These are both deliberately strings and not boolean.
+ plist['BreakpadSendAndExit'] = 'YES'
+ plist['BreakpadSkipConfirm'] = 'YES'
+
+
+def _RemoveBreakpadKeys(plist):
+ """Removes any set Breakpad keys."""
+ _RemoveKeys(plist,
+ 'BreakpadURL',
+ 'BreakpadReportInterval',
+ 'BreakpadProduct',
+ 'BreakpadProductDisplay',
+ 'BreakpadVersion',
+ 'BreakpadSendAndExit',
+ 'BreakpadSkipConfirm')
+
+
+def _TagSuffixes():
+ # Keep this list sorted in the order that tag suffix components are to
+ # appear in a tag value. That is to say, it should be sorted per ASCII.
+ components = ('full',)
+ assert tuple(sorted(components)) == components
+
+ components_len = len(components)
+ combinations = 1 << components_len
+ tag_suffixes = []
+ for combination in xrange(0, combinations):
+ tag_suffix = ''
+ for component_index in xrange(0, components_len):
+ if combination & (1 << component_index):
+ tag_suffix += '-' + components[component_index]
+ tag_suffixes.append(tag_suffix)
+ return tag_suffixes
+
+
+def _AddKeystoneKeys(plist, bundle_identifier):
+ """Adds the Keystone keys. This must be called AFTER _AddVersionKeys() and
+ also requires the |bundle_identifier| argument (com.example.product)."""
+ plist['KSVersion'] = plist['CFBundleShortVersionString']
+ plist['KSProductID'] = bundle_identifier
+ plist['KSUpdateURL'] = 'https://tools.google.com/service/update2'
+
+ _RemoveKeys(plist, 'KSChannelID')
+ for tag_suffix in _TagSuffixes():
+ if tag_suffix:
+ plist['KSChannelID' + tag_suffix] = tag_suffix
+
+
+def _RemoveKeystoneKeys(plist):
+ """Removes any set Keystone keys."""
+ _RemoveKeys(plist,
+ 'KSVersion',
+ 'KSProductID',
+ 'KSUpdateURL')
+
+ tag_keys = []
+ for tag_suffix in _TagSuffixes():
+ tag_keys.append('KSChannelID' + tag_suffix)
+ _RemoveKeys(plist, *tag_keys)
+
+
+def Main(argv):
+ parser = optparse.OptionParser('%prog [options]')
+ parser.add_option('--plist', dest='plist_path', action='store',
+ type='string', default=None, help='The path of the plist to tweak.')
+ parser.add_option('--output', dest='plist_output', action='store',
+ type='string', default=None, help='If specified, the path to output ' + \
+ 'the tweaked plist, rather than overwriting the input.')
+ parser.add_option('--breakpad', dest='use_breakpad', action='store',
+ type='int', default=False, help='Enable Breakpad [1 or 0]')
+ parser.add_option('--breakpad_staging', dest='use_breakpad_staging',
+ action='store_true', default=False,
+ help='Use staging breakpad to upload reports. Ignored if --breakpad=0.')
+ parser.add_option('--keystone', dest='use_keystone', action='store',
+ type='int', default=False, help='Enable Keystone [1 or 0]')
+ parser.add_option('--scm', dest='add_scm_info', action='store', type='int',
+ default=True, help='Add SCM metadata [1 or 0]')
+ parser.add_option('--branding', dest='branding', action='store',
+ type='string', default=None, help='The branding of the binary')
+ parser.add_option('--bundle_id', dest='bundle_identifier',
+ action='store', type='string', default=None,
+ help='The bundle id of the binary')
+ parser.add_option('--platform', choices=('ios', 'mac'), default='mac',
+ help='The target platform of the bundle')
+ parser.add_option('--version-overrides', action='append',
+ help='Key-value pair to override specific component of version '
+ 'like key=value (can be passed multiple time to configure '
+ 'more than one override)')
+ parser.add_option('--format', choices=('binary1', 'xml1', 'json'),
+ default='xml1', help='Format to use when writing property list '
+ '(default: %(default)s)')
+ parser.add_option('--version', dest='version', action='store', type='string',
+ default=None, help='The version string [major.minor.build.patch]')
+ (options, args) = parser.parse_args(argv)
+
+ if len(args) > 0:
+ print >>sys.stderr, parser.get_usage()
+ return 1
+
+ if not options.plist_path:
+ print >>sys.stderr, 'No --plist specified.'
+ return 1
+
+ # Read the plist into its parsed format. Convert the file to 'xml1' as
+ # plistlib only supports that format in Python 2.7.
+ with tempfile.NamedTemporaryFile() as temp_info_plist:
+ retcode = _ConvertPlist(options.plist_path, temp_info_plist.name, 'xml1')
+ if retcode != 0:
+ return retcode
+ plist = plistlib.readPlist(temp_info_plist.name)
+
+ # Convert overrides.
+ overrides = {}
+ if options.version_overrides:
+ for pair in options.version_overrides:
+ if not '=' in pair:
+ print >>sys.stderr, 'Invalid value for --version-overrides:', pair
+ return 1
+ key, value = pair.split('=', 1)
+ overrides[key] = value
+ if key not in ('MAJOR', 'MINOR', 'BUILD', 'PATCH'):
+ print >>sys.stderr, 'Unsupported key for --version-overrides:', key
+ return 1
+
+ if options.platform == 'mac':
+ version_format_for_key = {
+ # Add public version info so "Get Info" works.
+ 'CFBundleShortVersionString': '@MAJOR@.@MINOR@.@BUILD@.@PATCH@',
+
+ # Honor the 429496.72.95 limit. The maximum comes from splitting 2^32 - 1
+ # into 6, 2, 2 digits. The limitation was present in Tiger, but it could
+ # have been fixed in later OS release, but hasn't been tested (it's easy
+ # enough to find out with "lsregister -dump).
+ # http://lists.apple.com/archives/carbon-dev/2006/Jun/msg00139.html
+ # BUILD will always be an increasing value, so BUILD_PATH gives us
+ # something unique that meetings what LS wants.
+ 'CFBundleVersion': '@BUILD@.@PATCH@',
+ }
+ else:
+ version_format_for_key = {
+ 'CFBundleShortVersionString': '@MAJOR@.@BUILD@.@PATCH@',
+ 'CFBundleVersion': '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'
+ }
+
+ if options.use_breakpad:
+ version_format_for_key['BreakpadVersion'] = \
+ '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'
+
+ # Insert the product version.
+ if not _AddVersionKeys(
+ plist, version_format_for_key, version=options.version,
+ overrides=overrides):
+ return 2
+
+ # Add Breakpad if configured to do so.
+ if options.use_breakpad:
+ if options.branding is None:
+ print >>sys.stderr, 'Use of Breakpad requires branding.'
+ return 1
+ # Map "target_os" passed from gn via the --platform parameter
+ # to the platform as known by breakpad.
+ platform = {'mac': 'Mac', 'ios': 'iOS'}[options.platform]
+ _AddBreakpadKeys(plist, options.branding, platform,
+ options.use_breakpad_staging)
+ else:
+ _RemoveBreakpadKeys(plist)
+
+ # Add Keystone if configured to do so.
+ if options.use_keystone:
+ if options.bundle_identifier is None:
+ print >>sys.stderr, 'Use of Keystone requires the bundle id.'
+ return 1
+ _AddKeystoneKeys(plist, options.bundle_identifier)
+ else:
+ _RemoveKeystoneKeys(plist)
+
+ # Adds or removes any SCM keys.
+ if not _DoSCMKeys(plist, options.add_scm_info):
+ return 3
+
+ output_path = options.plist_path
+ if options.plist_output is not None:
+ output_path = options.plist_output
+
+ # Now that all keys have been mutated, rewrite the file.
+ with tempfile.NamedTemporaryFile() as temp_info_plist:
+ plistlib.writePlist(plist, temp_info_plist.name)
+
+ # Convert Info.plist to the format requested by the --format flag. Any
+ # format would work on Mac but iOS requires specific format.
+ return _ConvertPlist(temp_info_plist.name, output_path, options.format)
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv[1:]))
diff --git a/deps/v8/build/mac_toolchain.py b/deps/v8/build/mac_toolchain.py
new file mode 100755
index 0000000000..87ed256812
--- /dev/null
+++ b/deps/v8/build/mac_toolchain.py
@@ -0,0 +1,171 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+If should_use_hermetic_xcode.py emits "1", and the current toolchain is out of
+date:
+ * Downloads the hermetic mac toolchain
+ * Requires CIPD authentication. Run `cipd auth-login`, use Google account.
+ * Accepts the license.
+ * If xcode-select and xcodebuild are not passwordless in sudoers, requires
+ user interaction.
+
+The toolchain version can be overridden by setting MAC_TOOLCHAIN_REVISION with
+the full revision, e.g. 9A235.
+"""
+
+import os
+import platform
+import shutil
+import subprocess
+import sys
+
+
+# This can be changed after running:
+# mac_toolchain upload -xcode-path path/to/Xcode.app
+MAC_TOOLCHAIN_VERSION = '9E501'
+
+# The toolchain will not be downloaded if the minimum OS version is not met.
+# 17 is the major version number for macOS 10.13.
+# 9E145 (Xcode 9.3) only runs on 10.13.2 and newer.
+MAC_MINIMUM_OS_VERSION = 17
+
+MAC_TOOLCHAIN_INSTALLER = 'mac_toolchain'
+
+# Absolute path to src/ directory.
+REPO_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+# Absolute path to a file with gclient solutions.
+GCLIENT_CONFIG = os.path.join(os.path.dirname(REPO_ROOT), '.gclient')
+
+BASE_DIR = os.path.abspath(os.path.dirname(__file__))
+TOOLCHAIN_ROOT = os.path.join(BASE_DIR, 'mac_files')
+TOOLCHAIN_BUILD_DIR = os.path.join(TOOLCHAIN_ROOT, 'Xcode.app')
+STAMP_FILE = os.path.join(TOOLCHAIN_ROOT, 'toolchain_build_revision')
+
+
+def PlatformMeetsHermeticXcodeRequirements():
+ major_version = int(platform.release().split('.')[0])
+ return major_version >= MAC_MINIMUM_OS_VERSION
+
+
+def _UseHermeticToolchain():
+ current_dir = os.path.dirname(os.path.realpath(__file__))
+ script_path = os.path.join(current_dir, 'mac/should_use_hermetic_xcode.py')
+ proc = subprocess.Popen([script_path, 'mac'], stdout=subprocess.PIPE)
+ return '1' in proc.stdout.readline()
+
+
+def RequestCipdAuthentication():
+ """Requests that the user authenticate to access Xcode CIPD packages."""
+
+ print 'Access to Xcode CIPD package requires authentication.'
+ print '-----------------------------------------------------------------'
+ print
+ print 'You appear to be a Googler.'
+ print
+ print 'I\'m sorry for the hassle, but you may need to do a one-time manual'
+ print 'authentication. Please run:'
+ print
+ print ' cipd auth-login'
+ print
+ print 'and follow the instructions.'
+ print
+ print 'NOTE: Use your google.com credentials, not chromium.org.'
+ print
+ print '-----------------------------------------------------------------'
+ print
+ sys.stdout.flush()
+
+
+def PrintError(message):
+ # Flush buffers to ensure correct output ordering.
+ sys.stdout.flush()
+ sys.stderr.write(message + '\n')
+ sys.stderr.flush()
+
+
+def InstallXcode(xcode_build_version, installer_cmd, xcode_app_path):
+ """Installs the requested Xcode build version.
+
+ Args:
+ xcode_build_version: (string) Xcode build version to install.
+ installer_cmd: (string) Path to mac_toolchain command to install Xcode.
+ See https://chromium.googlesource.com/infra/infra/+/master/go/src/infra/cmd/mac_toolchain/
+ xcode_app_path: (string) Path to install the contents of Xcode.app.
+
+ Returns:
+ True if installation was successful. False otherwise.
+ """
+ args = [
+ installer_cmd, 'install',
+ '-kind', 'mac',
+ '-xcode-version', xcode_build_version.lower(),
+ '-output-dir', xcode_app_path,
+ ]
+
+ # Buildbot slaves need to use explicit credentials. LUCI bots should NOT set
+ # this variable.
+ creds = os.environ.get('MAC_TOOLCHAIN_CREDS')
+ if creds:
+ args.extend(['--service-account-json', creds])
+
+ try:
+ subprocess.check_call(args)
+ except subprocess.CalledProcessError as e:
+ PrintError('Xcode build version %s failed to install: %s\n' % (
+ xcode_build_version, e))
+ RequestCipdAuthentication()
+ return False
+ except OSError as e:
+ PrintError(('Xcode installer "%s" failed to execute'
+ ' (not on PATH or not installed).') % installer_cmd)
+ return False
+
+ return True
+
+
+def main():
+ if sys.platform != 'darwin':
+ return 0
+
+ if not _UseHermeticToolchain():
+ print 'Skipping Mac toolchain installation for mac'
+ return 0
+
+ if not PlatformMeetsHermeticXcodeRequirements():
+ print 'OS version does not support toolchain.'
+ return 0
+
+ toolchain_version = os.environ.get('MAC_TOOLCHAIN_REVISION',
+ MAC_TOOLCHAIN_VERSION)
+
+ # On developer machines, mac_toolchain tool is provided by
+ # depot_tools. On the bots, the recipe is responsible for installing
+ # it and providing the path to the executable.
+ installer_cmd = os.environ.get('MAC_TOOLCHAIN_INSTALLER',
+ MAC_TOOLCHAIN_INSTALLER)
+
+ toolchain_root = TOOLCHAIN_ROOT
+ xcode_app_path = TOOLCHAIN_BUILD_DIR
+ stamp_file = STAMP_FILE
+
+ # Delete the old "hermetic" installation if detected.
+ # TODO(crbug.com/797051): remove this once the old "hermetic" solution is no
+ # longer in use.
+ if os.path.exists(stamp_file):
+ print 'Detected old hermetic installation at %s. Deleting.' % (
+ toolchain_root)
+ shutil.rmtree(toolchain_root)
+
+ success = InstallXcode(toolchain_version, installer_cmd, xcode_app_path)
+ if not success:
+ return 1
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/nocompile.gni b/deps/v8/build/nocompile.gni
new file mode 100644
index 0000000000..81d7626cfb
--- /dev/null
+++ b/deps/v8/build/nocompile.gni
@@ -0,0 +1,117 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an target to create a unittest that
+# invokes a set of no-compile tests. A no-compile test is a test that asserts
+# a particular construct will not compile.
+#
+# Also see:
+# http://dev.chromium.org/developers/testing/no-compile-tests
+#
+# To use this, create a gyp target with the following form:
+#
+# import("//build/nocompile.gni")
+# nocompile_test("my_module_nc_unittests") {
+# sources = [
+# 'nc_testset_1.nc',
+# 'nc_testset_2.nc',
+# ]
+# }
+#
+# The .nc files are C++ files that contain code we wish to assert will not
+# compile. Each individual test case in the file should be put in its own
+# #ifdef section. The expected output should be appended with a C++-style
+# comment that has a python list of regular expressions. This will likely
+# be greater than 80-characters. Giving a solid expected output test is
+# important so that random compile failures do not cause the test to pass.
+#
+# Example .nc file:
+#
+# #if defined(TEST_NEEDS_SEMICOLON) // [r"expected ',' or ';' at end of input"]
+#
+# int a = 1
+#
+# #elif defined(TEST_NEEDS_CAST) // [r"invalid conversion from 'void*' to 'char*'"]
+#
+# void* a = NULL;
+# char* b = a;
+#
+# #endif
+#
+# If we needed disable TEST_NEEDS_SEMICOLON, then change the define to:
+#
+# DISABLE_TEST_NEEDS_SEMICOLON
+# TEST_NEEDS_CAST
+#
+# The lines above are parsed by a regexp so avoid getting creative with the
+# formatting or ifdef logic; it will likely just not work.
+#
+# Implementation notes:
+# The .nc files are actually processed by a python script which executes the
+# compiler and generates a .cc file that is empty on success, or will have a
+# series of #error lines on failure, and a set of trivially passing gunit
+# TEST() functions on success. This allows us to fail at the compile step when
+# something goes wrong, and know during the unittest run that the test was at
+# least processed when things go right.
+
+import("//build/config/clang/clang.gni")
+import("//testing/test.gni")
+
+declare_args() {
+ # TODO(crbug.com/105388): make sure no-compile test is not flaky.
+ enable_nocompile_tests =
+ (is_linux || is_mac || is_ios) && is_clang && host_cpu == target_cpu
+}
+
+if (enable_nocompile_tests) {
+ import("//build/config/c++/c++.gni")
+ import("//build/config/sysroot.gni")
+ template("nocompile_test") {
+ nocompile_target = target_name + "_run_nocompile"
+
+ action_foreach(nocompile_target) {
+ testonly = true
+ script = "//tools/nocompile_driver.py"
+ sources = invoker.sources
+ deps = invoker.deps
+ if (defined(invoker.public_deps)) {
+ public_deps = invoker.public_deps
+ }
+
+ result_path = "$target_gen_dir/{{source_name_part}}_nc.cc"
+ depfile = "${result_path}.d"
+ outputs = [
+ result_path,
+ ]
+ args = [
+ rebase_path("$clang_base_path/bin/clang++", root_build_dir),
+ "4", # number of compilers to invoke in parallel.
+ "{{source}}",
+ rebase_path(result_path, root_build_dir),
+ "--",
+ "-nostdinc++",
+ "-isystem" + rebase_path("$libcxx_prefix/include", root_build_dir),
+ "-isystem" + rebase_path("$libcxxabi_prefix/include", root_build_dir),
+ "-std=c++14",
+ "-Wall",
+ "-Werror",
+ "-Wfatal-errors",
+ "-Wthread-safety",
+ "-I" + rebase_path("//", root_build_dir),
+ "-I" + rebase_path(root_gen_dir, root_build_dir),
+ ]
+ if (sysroot != "") {
+ args += [
+ "--sysroot",
+ rebase_path(sysroot, root_build_dir),
+ ]
+ }
+ }
+
+ test(target_name) {
+ deps = invoker.deps + [ ":$nocompile_target" ]
+ sources = get_target_outputs(":$nocompile_target")
+ }
+ }
+}
diff --git a/deps/v8/build/package_mac_toolchain.py b/deps/v8/build/package_mac_toolchain.py
new file mode 100755
index 0000000000..48672bb49a
--- /dev/null
+++ b/deps/v8/build/package_mac_toolchain.py
@@ -0,0 +1,145 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compress and upload Mac toolchain files.
+
+Stored in in https://pantheon.corp.google.com/storage/browser/chrome-mac-sdk/.
+"""
+
+import argparse
+import glob
+import os
+import plistlib
+import re
+import subprocess
+import sys
+import tarfile
+import tempfile
+
+
+TOOLCHAIN_URL = "gs://chrome-mac-sdk"
+
+# It's important to at least remove unused Platform folders to cut down on the
+# size of the toolchain folder. There are other various unused folders that
+# have been removed through trial and error. If future versions of Xcode become
+# problematic it's possible this list is incorrect, and can be reduced to just
+# the unused platforms. On the flip side, it's likely more directories can be
+# excluded.
+DEFAULT_EXCLUDE_FOLDERS = [
+'Contents/Applications',
+'Contents/Developer/Documentation',
+'Contents/Developer/Library/Xcode/Templates',
+'Contents/Developer/Platforms/AppleTVOS.platform',
+'Contents/Developer/Platforms/AppleTVSimulator.platform',
+'Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/'
+ 'usr/share/man/',
+'Contents/Developer/Platforms/WatchOS.platform',
+'Contents/Developer/Platforms/WatchSimulator.platform',
+'Contents/Developer/Toolchains/Swift*',
+'Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift',
+'Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift-migrator',
+'Contents/Resources/Packages/MobileDevice.pkg',
+]
+
+MAC_EXCLUDE_FOLDERS = [
+# The only thing we need in iPhoneOS.platform on mac is:
+# \Developer\Library\Xcode\PrivatePlugins
+# \Info.Plist.
+# This is the cleanest way to get these.
+'Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/Frameworks',
+'Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/GPUTools',
+'Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/'
+ 'GPUToolsPlatform',
+'Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/'
+ 'PrivateFrameworks',
+'Contents/Developer/Platforms/iPhoneOS.platform/Developer/usr',
+'Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs',
+'Contents/Developer/Platforms/iPhoneOS.platform/DeviceSupport',
+'Contents/Developer/Platforms/iPhoneOS.platform/Library',
+'Contents/Developer/Platforms/iPhoneOS.platform/usr',
+
+# iPhoneSimulator has a similar requirement, but the bulk of the binary size is
+# in \Developer\SDKs, so only excluding that here.
+'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs',
+]
+
+IOS_EXCLUDE_FOLDERS = [
+'Contents/Developer/Platforms/iPhoneOS.platform/DeviceSupport/'
+'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/'
+ 'iPhoneSimulator.sdk/Applications/',
+'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/'
+ 'iPhoneSimulator.sdk/System/Library/AccessibilityBundles/',
+'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/'
+ 'iPhoneSimulator.sdk/System/Library/CoreServices/',
+'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/'
+ 'iPhoneSimulator.sdk/System/Library/LinguisticData/',
+]
+
+def main():
+ """Compress |target_dir| and upload to |TOOLCHAIN_URL|"""
+ parser = argparse.ArgumentParser()
+ parser.add_argument('target_dir',
+ help="Xcode installation directory.")
+ parser.add_argument('platform', choices=['ios', 'mac'],
+ help="Target platform for bundle.")
+ parser_args = parser.parse_args()
+
+ # Verify this looks like an Xcode directory.
+ contents_dir = os.path.join(parser_args.target_dir, 'Contents')
+ plist_file = os.path.join(contents_dir, 'version.plist')
+ try:
+ info = plistlib.readPlist(plist_file)
+ except:
+ print "Invalid Xcode dir."
+ return 0
+ build_version = info['ProductBuildVersion']
+
+ # Look for previous toolchain tgz files with the same |build_version|.
+ fname = 'toolchain'
+ if parser_args.platform == 'ios':
+ fname = 'ios-' + fname
+ wildcard_filename = '%s/%s-%s-*.tgz' % (TOOLCHAIN_URL, fname, build_version)
+ p = subprocess.Popen(['gsutil.py', 'ls', wildcard_filename],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ output = p.communicate()[0]
+ next_count = 1
+ if p.returncode == 0:
+ next_count = len(output.split('\n'))
+ sys.stdout.write("%s already exists (%s). "
+ "Do you want to create another? [y/n] "
+ % (build_version, next_count - 1))
+
+ if raw_input().lower() not in set(['yes','y', 'ye']):
+ print "Skipping duplicate upload."
+ return 0
+
+ os.chdir(parser_args.target_dir)
+ toolchain_file_name = "%s-%s-%s" % (fname, build_version, next_count)
+ toolchain_name = tempfile.mktemp(suffix='toolchain.tgz')
+
+ print "Creating %s (%s)." % (toolchain_file_name, toolchain_name)
+ os.environ["COPYFILE_DISABLE"] = "1"
+ os.environ["GZ_OPT"] = "-8"
+ args = ['tar', '-cvzf', toolchain_name]
+ exclude_folders = DEFAULT_EXCLUDE_FOLDERS
+ if parser_args.platform == 'mac':
+ exclude_folders += MAC_EXCLUDE_FOLDERS
+ else:
+ exclude_folders += IOS_EXCLUDE_FOLDERS
+ args.extend(map('--exclude={0}'.format, exclude_folders))
+ args.extend(['.'])
+ subprocess.check_call(args)
+
+ print "Uploading %s toolchain." % toolchain_file_name
+ destination_path = '%s/%s.tgz' % (TOOLCHAIN_URL, toolchain_file_name)
+ subprocess.check_call(['gsutil.py', 'cp', '-n', toolchain_name,
+ destination_path])
+
+ print "Done with %s upload." % toolchain_file_name
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/precompile.cc b/deps/v8/build/precompile.cc
new file mode 100644
index 0000000000..db1ef6dfe5
--- /dev/null
+++ b/deps/v8/build/precompile.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Precompiled header generator for Windows builds. No include is needed
+// in this file as the PCH include is forced via the "Forced Include File"
+// flag in the projects generated by GYP.
diff --git a/deps/v8/build/precompile.h b/deps/v8/build/precompile.h
new file mode 100644
index 0000000000..c699562c0c
--- /dev/null
+++ b/deps/v8/build/precompile.h
@@ -0,0 +1,53 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is used as a precompiled header for both C and C++ files. So
+// any C++ headers must go in the __cplusplus block below.
+
+#if defined(BUILD_PRECOMPILE_H_)
+#error You shouldn't include the precompiled header file more than once.
+#endif
+
+#define BUILD_PRECOMPILE_H_
+
+#include <errno.h>
+#include <fcntl.h>
+#include <limits.h>
+#include <math.h>
+#include <memory.h>
+#include <signal.h>
+#include <stdarg.h>
+#include <stddef.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+
+#if defined(__cplusplus)
+
+#include <algorithm>
+#include <bitset>
+#include <cmath>
+#include <cstddef>
+#include <cstdio>
+#include <cstdlib>
+#include <cstring>
+#include <fstream>
+#include <functional>
+#include <iomanip>
+#include <iosfwd>
+#include <iterator>
+#include <limits>
+#include <list>
+#include <map>
+#include <numeric>
+#include <ostream>
+#include <queue>
+#include <set>
+#include <sstream>
+#include <string>
+#include <utility>
+#include <vector>
+
+#endif // __cplusplus
diff --git a/deps/v8/build/print_python_deps.py b/deps/v8/build/print_python_deps.py
new file mode 100755
index 0000000000..e7ac5e2f15
--- /dev/null
+++ b/deps/v8/build/print_python_deps.py
@@ -0,0 +1,155 @@
+#!/usr/bin/python2.7
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Prints all non-system dependencies for the given module.
+
+The primary use-case for this script is to genererate the list of python modules
+required for .isolate files.
+"""
+
+import argparse
+import imp
+import os
+import pipes
+import sys
+
+# Don't use any helper modules, or else they will end up in the results.
+
+
+_SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
+
+
+def _ComputePythonDependencies():
+ """Gets the paths of imported non-system python modules.
+
+ A path is assumed to be a "system" import if it is outside of chromium's
+ src/. The paths will be relative to the current directory.
+ """
+ module_paths = (m.__file__ for m in sys.modules.values()
+ if m and hasattr(m, '__file__'))
+
+ src_paths = set()
+ for path in module_paths:
+ if path == __file__:
+ continue
+ path = os.path.abspath(path)
+ if not path.startswith(_SRC_ROOT):
+ continue
+
+ if (path.endswith('.pyc')
+ or (path.endswith('c') and not os.path.splitext(path)[1])):
+ path = path[:-1]
+ src_paths.add(path)
+
+ return src_paths
+
+
+def _NormalizeCommandLine(options):
+ """Returns a string that when run from SRC_ROOT replicates the command."""
+ args = ['build/print_python_deps.py']
+ root = os.path.relpath(options.root, _SRC_ROOT)
+ if root != '.':
+ args.extend(('--root', root))
+ if options.output:
+ args.extend(('--output', os.path.relpath(options.output, _SRC_ROOT)))
+ if options.gn_paths:
+ args.extend(('--gn-paths',))
+ for whitelist in sorted(options.whitelists):
+ args.extend(('--whitelist', os.path.relpath(whitelist, _SRC_ROOT)))
+ args.append(os.path.relpath(options.module, _SRC_ROOT))
+ return ' '.join(pipes.quote(x) for x in args)
+
+
+def _FindPythonInDirectory(directory):
+ """Returns an iterable of all non-test python files in the given directory."""
+ files = []
+ for root, _dirnames, filenames in os.walk(directory):
+ for filename in filenames:
+ if filename.endswith('.py') and not filename.endswith('_test.py'):
+ yield os.path.join(root, filename)
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Prints all non-system dependencies for the given module.')
+ parser.add_argument('module',
+ help='The python module to analyze.')
+ parser.add_argument('--root', default='.',
+ help='Directory to make paths relative to.')
+ parser.add_argument('--output',
+ help='Write output to a file rather than stdout.')
+ parser.add_argument('--inplace', action='store_true',
+ help='Write output to a file with the same path as the '
+ 'module, but with a .pydeps extension. Also sets the '
+ 'root to the module\'s directory.')
+ parser.add_argument('--no-header', action='store_true',
+ help='Do not write the "# Generated by" header.')
+ parser.add_argument('--gn-paths', action='store_true',
+ help='Write paths as //foo/bar/baz.py')
+ parser.add_argument('--did-relaunch', action='store_true',
+ help=argparse.SUPPRESS)
+ parser.add_argument('--whitelist', default=[], action='append',
+ dest='whitelists',
+ help='Recursively include all non-test python files '
+ 'within this directory. May be specified multiple times.')
+ options = parser.parse_args()
+
+ if options.inplace:
+ if options.output:
+ parser.error('Cannot use --inplace and --output at the same time!')
+ if not options.module.endswith('.py'):
+ parser.error('Input module path should end with .py suffix!')
+ options.output = options.module + 'deps'
+ options.root = os.path.dirname(options.module)
+
+ # Trybots run with vpython as default Python, but with a different config
+ # from //.vpython. To make the is_vpython test work, and to match the behavior
+ # of dev machines, the shebang line must be run with python2.7.
+ #
+ # E.g. $HOME/.vpython-root/dd50d3/bin/python
+ # E.g. /b/s/w/ir/cache/vpython/ab5c79/bin/python
+ is_vpython = 'vpython' in sys.executable
+ if not is_vpython:
+ with open(options.module) as f:
+ shebang = f.readline()
+ # Re-launch using vpython will cause us to pick up modules specified in
+ # //.vpython, but does not cause it to pick up modules defined inline via
+ # [VPYTHON:BEGIN] ... [VPYTHON:END] comments.
+ # TODO(agrieve): Add support for this if the need ever arises.
+ if True or shebang.startswith('#!') and 'vpython' in shebang:
+ os.execvp('vpython', ['vpython'] + sys.argv + ['--did-relaunch'])
+
+ # Replace the path entry for print_python_deps.py with the one for the given
+ # module.
+ try:
+ sys.path[0] = os.path.dirname(options.module)
+ imp.load_source('NAME', options.module)
+ except Exception:
+ # Output extra diagnostics when loading the script fails.
+ sys.stderr.write('Error running print_python_deps.py.\n')
+ sys.stderr.write('is_vpython={}\n'.format(is_vpython))
+ sys.stderr.write('did_relanuch={}\n'.format(options.did_relaunch))
+ sys.stderr.write('python={}\n'.format(sys.executable))
+ raise
+
+ paths_set = _ComputePythonDependencies()
+ for path in options.whitelists:
+ paths_set.update(os.path.abspath(p) for p in _FindPythonInDirectory(path))
+
+ paths = [os.path.relpath(p, options.root) for p in paths_set]
+
+ normalized_cmdline = _NormalizeCommandLine(options)
+ out = open(options.output, 'w') if options.output else sys.stdout
+ with out:
+ if not options.no_header:
+ out.write('# Generated by running:\n')
+ out.write('# %s\n' % normalized_cmdline)
+ prefix = '//' if options.gn_paths else ''
+ for path in sorted(paths):
+ out.write(prefix + path + '\n')
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/protoc_java.py b/deps/v8/build/protoc_java.py
new file mode 100755
index 0000000000..09cd8082b6
--- /dev/null
+++ b/deps/v8/build/protoc_java.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate java source files from protobuf files.
+
+This is a helper file for the genproto_java action in protoc_java.gypi.
+
+It performs the following steps:
+1. Deletes all old sources (ensures deleted classes are not part of new jars).
+2. Creates source directory.
+3. Generates Java files using protoc (output into either --java-out-dir or
+ --srcjar).
+4. Creates a new stamp file.
+"""
+
+import os
+import optparse
+import shutil
+import subprocess
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "android", "gyp"))
+from util import build_utils
+
+def main(argv):
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_option("--protoc", help="Path to protoc binary.")
+ parser.add_option("--proto-path", help="Path to proto directory.")
+ parser.add_option("--java-out-dir",
+ help="Path to output directory for java files.")
+ parser.add_option("--srcjar", help="Path to output srcjar.")
+ parser.add_option("--stamp", help="File to touch on success.")
+ parser.add_option("--nano",
+ help="Use to generate nano protos.", action='store_true')
+ parser.add_option("--protoc-javalite-plugin-dir",
+ help="Path to protoc java lite plugin directory.")
+ options, args = parser.parse_args(argv)
+
+ build_utils.CheckOptions(options, parser, ['protoc', 'proto_path'])
+ if not options.java_out_dir and not options.srcjar:
+ print 'One of --java-out-dir or --srcjar must be specified.'
+ return 1
+
+ if not options.nano and not options.protoc_javalite_plugin_dir:
+ print 'One of --nano or --protoc-javalite-plugin-dir must be specified.'
+ return 1
+
+ with build_utils.TempDir() as temp_dir:
+ if options.nano:
+ # Specify arguments to the generator.
+ generator_args = ['optional_field_style=reftypes',
+ 'store_unknown_fields=true']
+ out_arg = '--javanano_out=' + ','.join(generator_args) + ':' + temp_dir
+ else:
+ out_arg = '--javalite_out=' + temp_dir
+
+ custom_env = os.environ.copy()
+ if options.protoc_javalite_plugin_dir:
+ # if we are generating lite protos, then the lite plugin needs to be in the path when protoc
+ # is called. See https://github.com/protocolbuffers/protobuf/blob/master/java/lite.md
+ custom_env['PATH'] = '{}:{}'.format(
+ os.path.abspath(options.protoc_javalite_plugin_dir), custom_env['PATH'])
+
+ # Generate Java files using protoc.
+ build_utils.CheckOutput(
+ [options.protoc, '--proto_path', options.proto_path, out_arg]
+ + args, env=custom_env)
+
+ if options.java_out_dir:
+ build_utils.DeleteDirectory(options.java_out_dir)
+ shutil.copytree(temp_dir, options.java_out_dir)
+ else:
+ build_utils.ZipDir(options.srcjar, temp_dir)
+
+ if options.depfile:
+ assert options.srcjar
+ deps = args + [options.protoc]
+ build_utils.WriteDepfile(options.depfile, options.srcjar, deps,
+ add_pydeps=False)
+
+ if options.stamp:
+ build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/protoc_java.pydeps b/deps/v8/build/protoc_java.pydeps
new file mode 100644
index 0000000000..a26622b476
--- /dev/null
+++ b/deps/v8/build/protoc_java.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build --output build/protoc_java.pydeps build/protoc_java.py
+android/gyp/util/__init__.py
+android/gyp/util/build_utils.py
+android/gyp/util/md5_check.py
+gn_helpers.py
+protoc_java.py
diff --git a/deps/v8/build/redirect_stdout.py b/deps/v8/build/redirect_stdout.py
new file mode 100644
index 0000000000..72d0732af1
--- /dev/null
+++ b/deps/v8/build/redirect_stdout.py
@@ -0,0 +1,19 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import subprocess
+import sys
+
+# This script executes a command and redirects the stdout to a file. This is
+# equivalent to |command... > output_file|.
+#
+# Usage: python redirect_stdout.py output_file command...
+
+if __name__ == '__main__':
+ if len(sys.argv) < 2:
+ print >> sys.stderr, "Usage: %s output_file command..." % (sys.argv[0])
+ sys.exit(1)
+
+ with open(sys.argv[1], 'w') as fp:
+ sys.exit(subprocess.check_call(sys.argv[2:], stdout=fp))
diff --git a/deps/v8/build/rm.py b/deps/v8/build/rm.py
new file mode 100755
index 0000000000..5ca642d46e
--- /dev/null
+++ b/deps/v8/build/rm.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Delete a file.
+
+This module works much like the rm posix command.
+"""
+
+import argparse
+import os
+import sys
+
+
+def Main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('files', nargs='+')
+ parser.add_argument('-f', '--force', action='store_true',
+ help="don't err on missing")
+ parser.add_argument('--stamp', required=True, help='touch this file')
+ args = parser.parse_args()
+ for f in args.files:
+ try:
+ os.remove(f)
+ except OSError:
+ if not args.force:
+ print >>sys.stderr, "'%s' does not exist" % f
+ return 1
+
+ with open(args.stamp, 'w'):
+ os.utime(args.stamp, None)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/deps/v8/build/run_swarming_xcode_install.py b/deps/v8/build/run_swarming_xcode_install.py
new file mode 100755
index 0000000000..a731c1bd6b
--- /dev/null
+++ b/deps/v8/build/run_swarming_xcode_install.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This script runs swarming_xcode_install on the bots. It should be run when we
+need to upgrade all the swarming testers. It:
+ 1) Packages two python files into an isolate.
+ 2) Runs the isolate on swarming machines that satisfy certain dimensions.
+
+Example usage:
+ $ ./build/run_swarming_xcode_install.py --luci_path ~/work/luci-py \
+ --swarming-server touch-swarming.appspot.com \
+ --isolate-server touch-isolate.appspot.com
+"""
+
+import argparse
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Run swarming_xcode_install on the bots.')
+ parser.add_argument('--luci_path', required=True, type=os.path.abspath)
+ parser.add_argument('--swarming-server', required=True, type=str)
+ parser.add_argument('--isolate-server', required=True, type=str)
+ parser.add_argument('--batches', type=int, default=25,
+ help="Run xcode install in batches of size |batches|.")
+ parser.add_argument('--dimension', nargs=2, action='append')
+ args = parser.parse_args()
+
+ args.dimension = args.dimension or []
+
+ script_dir = os.path.dirname(os.path.abspath(__file__))
+ tmp_dir = tempfile.mkdtemp(prefix='swarming_xcode')
+ try:
+ print 'Making isolate.'
+ shutil.copyfile(os.path.join(script_dir, 'swarming_xcode_install.py'),
+ os.path.join(tmp_dir, 'swarming_xcode_install.py'))
+ shutil.copyfile(os.path.join(script_dir, 'mac_toolchain.py'),
+ os.path.join(tmp_dir, 'mac_toolchain.py'))
+
+ luci_client = os.path.join(args.luci_path, 'client')
+ cmd = [
+ sys.executable, os.path.join(luci_client, 'isolateserver.py'), 'archive',
+ '-I', args.isolate_server, tmp_dir,
+ ]
+ isolate_hash = subprocess.check_output(cmd).split()[0]
+
+ print 'Running swarming_xcode_install.'
+ # TODO(crbug.com/765361): The dimensions below should be updated once
+ # swarming for iOS is fleshed out, likely removing xcode_version 9 and
+ # adding different dimensions.
+ luci_tools = os.path.join(luci_client, 'tools')
+ dimensions = [['pool', 'Chrome'], ['xcode_version', '9.0']] + args.dimension
+ dim_args = []
+ for d in dimensions:
+ dim_args += ['--dimension'] + d
+ cmd = [
+ sys.executable, os.path.join(luci_tools, 'run_on_bots.py'),
+ '--swarming', args.swarming_server, '--isolate-server',
+ args.isolate_server, '--priority', '20', '--batches', str(args.batches),
+ '--tags', 'name:run_swarming_xcode_install',
+ ] + dim_args + ['--name', 'run_swarming_xcode_install', '--', isolate_hash,
+ 'python', 'swarming_xcode_install.py',
+ ]
+ subprocess.check_call(cmd)
+ print 'All tasks completed.'
+
+ finally:
+ shutil.rmtree(tmp_dir)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/sample_arg_file.gn b/deps/v8/build/sample_arg_file.gn
new file mode 100644
index 0000000000..91e90456e8
--- /dev/null
+++ b/deps/v8/build/sample_arg_file.gn
@@ -0,0 +1,6 @@
+# Build arguments go here. Here are some of the most commonly set ones.
+# Run `gn args <out_dir> --list` for the full list.
+# is_component_build = true
+# is_debug = true
+# symbol_level = 2
+# use_goma = false
diff --git a/deps/v8/build/sanitize-mac-build-log.sed b/deps/v8/build/sanitize-mac-build-log.sed
new file mode 100644
index 0000000000..b4111c7b82
--- /dev/null
+++ b/deps/v8/build/sanitize-mac-build-log.sed
@@ -0,0 +1,33 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use this sed script to reduce a Mac build log into something readable.
+
+# Drop uninformative lines.
+/^distcc/d
+/^Check dependencies/d
+/^ setenv /d
+/^ cd /d
+/^make: Nothing to be done/d
+/^$/d
+
+# Xcode prints a short "compiling foobar.o" line followed by the lengthy
+# full command line. These deletions drop the command line.
+\|^ /Developer/usr/bin/|d
+\|^ /Developer/Library/PrivateFrameworks/DevToolsCore\.framework/|d
+\|^ /Developer/Library/Xcode/Plug-ins/CoreBuildTasks\.xcplugin/|d
+
+# Drop any goma command lines as well.
+\|^ .*/gomacc |d
+
+# And, if you've overridden something from your own bin directory, remove those
+# full command lines, too.
+\|^ /Users/[^/]*/bin/|d
+
+# There's already a nice note for bindings, don't need the command line.
+\|^python scripts/rule_binding\.py|d
+
+# Shorten the "compiling foobar.o" line.
+s|^Distributed-CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2| CC \1|
+s|^CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2| CC \1|
diff --git a/deps/v8/build/sanitize-mac-build-log.sh b/deps/v8/build/sanitize-mac-build-log.sh
new file mode 100755
index 0000000000..df5a7af29e
--- /dev/null
+++ b/deps/v8/build/sanitize-mac-build-log.sh
@@ -0,0 +1,5 @@
+#!/bin/sh
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed
diff --git a/deps/v8/build/sanitize-win-build-log.sed b/deps/v8/build/sanitize-win-build-log.sed
new file mode 100644
index 0000000000..c18e664c83
--- /dev/null
+++ b/deps/v8/build/sanitize-win-build-log.sed
@@ -0,0 +1,15 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use this sed script to reduce a Windows build log into something
+# machine-parsable.
+
+# Drop uninformative lines.
+/The operation completed successfully\./d
+
+# Drop parallelization indicators on lines.
+s/^[0-9]+>//
+
+# Shorten bindings generation lines
+s/^.*"python".*idl_compiler\.py".*("[^"]+\.idl").*$/ idl_compiler \1/
diff --git a/deps/v8/build/sanitize-win-build-log.sh b/deps/v8/build/sanitize-win-build-log.sh
new file mode 100755
index 0000000000..df5a7af29e
--- /dev/null
+++ b/deps/v8/build/sanitize-win-build-log.sh
@@ -0,0 +1,5 @@
+#!/bin/sh
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed
diff --git a/deps/v8/build/sanitizers/OWNERS b/deps/v8/build/sanitizers/OWNERS
new file mode 100644
index 0000000000..e9a248c4af
--- /dev/null
+++ b/deps/v8/build/sanitizers/OWNERS
@@ -0,0 +1,10 @@
+ochang@chromium.org
+eugenis@chromium.org
+glider@chromium.org
+inferno@chromium.org
+mbarbella@chromium.org
+metzman@chromium.org
+mmoroz@chromium.org
+rnk@chromium.org
+per-file tsan_suppressions.cc=*
+per-file lsan_suppressions.cc=*
diff --git a/deps/v8/build/sanitizers/asan_suppressions.cc b/deps/v8/build/sanitizers/asan_suppressions.cc
new file mode 100644
index 0000000000..f3b9459ded
--- /dev/null
+++ b/deps/v8/build/sanitizers/asan_suppressions.cc
@@ -0,0 +1,23 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for AddressSanitizer.
+// It should only be used under very limited circumstances such as suppressing
+// a report caused by an interceptor call in a system-installed library.
+
+#if defined(ADDRESS_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kASanDefaultSuppressions which contains ASan suppressions delimited by
+// newlines. Entries should look, for example, like:
+//
+// // http://crbug.com/178677
+// "interceptor_via_lib:libsqlite3.so\n"
+char kASanDefaultSuppressions[] =
+
+ // End of suppressions.
+ // PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
+ ""; // Please keep this semicolon.
+
+#endif // ADDRESS_SANITIZER
diff --git a/deps/v8/build/sanitizers/lsan_suppressions.cc b/deps/v8/build/sanitizers/lsan_suppressions.cc
new file mode 100644
index 0000000000..abda863fe2
--- /dev/null
+++ b/deps/v8/build/sanitizers/lsan_suppressions.cc
@@ -0,0 +1,71 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for LeakSanitizer.
+// You can also pass additional suppressions via LSAN_OPTIONS:
+// LSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
+// http://dev.chromium.org/developers/testing/leaksanitizer for more info.
+
+#if defined(LEAK_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kLSanDefaultSuppressions which contains LSan suppressions delimited by
+// newlines. See http://dev.chromium.org/developers/testing/leaksanitizer
+// for the instructions on writing suppressions.
+char kLSanDefaultSuppressions[] =
+ // Intentional leak used as sanity test for Valgrind/memcheck.
+ "leak:base::ToolsSanityTest_MemoryLeak_Test::TestBody\n"
+
+ // ================ Leaks in third-party code ================
+
+ // False positives in libfontconfig. http://crbug.com/39050
+ "leak:libfontconfig\n"
+ // eglibc-2.19/string/strdup.c creates false positive leak errors because of
+ // the same reason as crbug.com/39050. The leak error stack trace, when
+ // unwind on malloc, includes a call to libfontconfig. But the default stack
+ // trace is too short in leak sanitizer bot to make the libfontconfig
+ // suppression works. http://crbug.com/605286
+ "leak:__strdup\n"
+
+ // Leaks in Nvidia's libGL.
+ "leak:libGL.so\n"
+
+ // XRandR has several one time leaks.
+ "leak:libxrandr\n"
+
+ // xrandr leak. http://crbug.com/119677
+ "leak:XRRFindDisplay\n"
+
+ // http://crbug.com/431213, http://crbug.com/416665
+ "leak:gin/object_template_builder.h\n"
+
+ // Leaks in swrast_dri.so. http://crbug.com/540042
+ "leak:swrast_dri.so\n"
+
+ // Leak in glibc's gconv caused by fopen(..., "r,ccs=UNICODE")
+ "leak:__gconv_lookup_cache\n"
+
+ // ================ Leaks in Chromium code ================
+ // PLEASE DO NOT ADD SUPPRESSIONS FOR NEW LEAKS.
+ // Instead, commits that introduce memory leaks should be reverted.
+ // Suppressing the leak is acceptable in some cases when reverting is
+ // impossible, i.e. when enabling leak detection for the first time for a
+ // test target with pre-existing leaks.
+
+ // http://crbug.com/356306
+ "leak:service_manager::SetProcessTitleFromCommandLine\n"
+
+ // https://crbug.com/755670
+ "leak:third_party/yasm/\n"
+
+ // v8 leaks caused by weak ref not call
+ "leak:blink::DOMWrapperWorld::Create\n"
+ "leak:blink::ScriptState::Create\n"
+
+ // PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
+
+ // End of suppressions.
+ ; // Please keep this semicolon.
+
+#endif // LEAK_SANITIZER
diff --git a/deps/v8/build/sanitizers/sanitizer_options.cc b/deps/v8/build/sanitizers/sanitizer_options.cc
new file mode 100644
index 0000000000..d40d4d254a
--- /dev/null
+++ b/deps/v8/build/sanitizers/sanitizer_options.cc
@@ -0,0 +1,181 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// This file contains the default options for various compiler-based dynamic
+// tools.
+
+#include "build/build_config.h"
+
+#if defined(ADDRESS_SANITIZER) || defined(LEAK_SANITIZER) || \
+ defined(MEMORY_SANITIZER) || defined(THREAD_SANITIZER) || \
+ defined(UNDEFINED_SANITIZER)
+// Functions returning default options are declared weak in the tools' runtime
+// libraries. To make the linker pick the strong replacements for those
+// functions from this module, we explicitly force its inclusion by passing
+// -Wl,-u_sanitizer_options_link_helper
+extern "C"
+void _sanitizer_options_link_helper() { }
+
+// The callbacks we define here will be called from the sanitizer runtime, but
+// aren't referenced from the Chrome executable. We must ensure that those
+// callbacks are not sanitizer-instrumented, and that they aren't stripped by
+// the linker.
+#define SANITIZER_HOOK_ATTRIBUTE \
+ extern "C" \
+ __attribute__((no_sanitize("address", "memory", "thread", "undefined"))) \
+ __attribute__((visibility("default"))) \
+ __attribute__((used))
+#endif
+
+#if defined(ADDRESS_SANITIZER)
+// Default options for AddressSanitizer in various configurations:
+// malloc_context_size=5 - limit the size of stack traces collected by ASan
+// for each malloc/free by 5 frames. These stack traces tend to accumulate
+// very fast in applications using JIT (v8 in Chrome's case), see
+// https://code.google.com/p/address-sanitizer/issues/detail?id=177
+// symbolize=1 - enable in-process symbolization.
+// legacy_pthread_cond=1 - run in the libpthread 2.2.5 compatibility mode to
+// work around libGL.so using the obsolete API, see
+// http://crbug.com/341805. This may break if pthread_cond_t objects are
+// accessed by both instrumented and non-instrumented binaries (e.g. if
+// they reside in shared memory). This option is going to be deprecated in
+// upstream AddressSanitizer and must not be used anywhere except the
+// official builds.
+// check_printf=1 - check the memory accesses to printf (and other formatted
+// output routines) arguments.
+// use_sigaltstack=1 - handle signals on an alternate signal stack. Useful
+// for stack overflow detection.
+// strip_path_prefix=/../../ - prefixes up to and including this
+// substring will be stripped from source file paths in symbolized reports
+// fast_unwind_on_fatal=1 - use the fast (frame-pointer-based) stack unwinder
+// to print error reports. V8 doesn't generate debug info for the JIT code,
+// so the slow unwinder may not work properly.
+// detect_stack_use_after_return=1 - use fake stack to delay the reuse of
+// stack allocations and detect stack-use-after-return errors.
+#if defined(OS_LINUX)
+#if defined(GOOGLE_CHROME_BUILD)
+// Default AddressSanitizer options for the official build. These do not affect
+// tests on buildbots (which don't set GOOGLE_CHROME_BUILD) or non-official
+// Chromium builds.
+const char kAsanDefaultOptions[] =
+ "legacy_pthread_cond=1 malloc_context_size=5 "
+ "symbolize=1 check_printf=1 use_sigaltstack=1 detect_leaks=0 "
+ "strip_path_prefix=/../../ fast_unwind_on_fatal=1 "
+ "allow_user_segv_handler=1 ";
+#else
+// Default AddressSanitizer options for buildbots and non-official builds.
+const char* kAsanDefaultOptions =
+ "symbolize=1 check_printf=1 use_sigaltstack=1 "
+ "detect_leaks=0 strip_path_prefix=/../../ fast_unwind_on_fatal=1 "
+ "detect_stack_use_after_return=1 "
+ "allow_user_segv_handler=1 ";
+#endif // GOOGLE_CHROME_BUILD
+
+#elif defined(OS_MACOSX)
+const char *kAsanDefaultOptions =
+ "check_printf=1 use_sigaltstack=1 "
+ "strip_path_prefix=/../../ fast_unwind_on_fatal=1 "
+ "detect_stack_use_after_return=1 detect_odr_violation=0 ";
+
+#elif defined(OS_WIN)
+const char* kAsanDefaultOptions =
+ "check_printf=1 use_sigaltstack=1 "
+ "strip_path_prefix=\\..\\..\\ fast_unwind_on_fatal=1 ";
+#endif // OS_LINUX
+
+#if defined(OS_LINUX) || defined(OS_MACOSX) || defined(OS_WIN)
+// Allow NaCl to override the default asan options.
+extern const char* kAsanDefaultOptionsNaCl;
+__attribute__((weak)) const char* kAsanDefaultOptionsNaCl = nullptr;
+
+SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_options() {
+ if (kAsanDefaultOptionsNaCl)
+ return kAsanDefaultOptionsNaCl;
+ return kAsanDefaultOptions;
+}
+
+extern char kASanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_suppressions() {
+ return kASanDefaultSuppressions;
+}
+#endif // OS_LINUX || OS_MACOSX || OS_WIN
+#endif // ADDRESS_SANITIZER
+
+#if defined(THREAD_SANITIZER) && defined(OS_LINUX)
+// Default options for ThreadSanitizer in various configurations:
+// detect_deadlocks=1 - enable deadlock (lock inversion) detection.
+// second_deadlock_stack=1 - more verbose deadlock reports.
+// report_signal_unsafe=0 - do not report async-signal-unsafe functions
+// called from signal handlers.
+// report_thread_leaks=0 - do not report unjoined threads at the end of
+// the program execution.
+// print_suppressions=1 - print the list of matched suppressions.
+// history_size=7 - make the history buffer proportional to 2^7 (the maximum
+// value) to keep more stack traces.
+// strip_path_prefix=/../../ - prefixes up to and including this
+// substring will be stripped from source file paths in symbolized reports.
+const char kTsanDefaultOptions[] =
+ "detect_deadlocks=1 second_deadlock_stack=1 report_signal_unsafe=0 "
+ "report_thread_leaks=0 print_suppressions=1 history_size=7 "
+ "strict_memcmp=0 strip_path_prefix=/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_options() {
+ return kTsanDefaultOptions;
+}
+
+extern char kTSanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_suppressions() {
+ return kTSanDefaultSuppressions;
+}
+
+#endif // THREAD_SANITIZER && OS_LINUX
+
+#if defined(MEMORY_SANITIZER)
+// Default options for MemorySanitizer:
+// intercept_memcmp=0 - do not detect uninitialized memory in memcmp() calls.
+// Pending cleanup, see http://crbug.com/523428
+// strip_path_prefix=/../../ - prefixes up to and including this
+// substring will be stripped from source file paths in symbolized reports.
+const char kMsanDefaultOptions[] =
+ "intercept_memcmp=0 strip_path_prefix=/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char *__msan_default_options() {
+ return kMsanDefaultOptions;
+}
+
+#endif // MEMORY_SANITIZER
+
+#if defined(LEAK_SANITIZER)
+// Default options for LeakSanitizer:
+// print_suppressions=1 - print the list of matched suppressions.
+// strip_path_prefix=/../../ - prefixes up to and including this
+// substring will be stripped from source file paths in symbolized reports.
+const char kLsanDefaultOptions[] =
+ "print_suppressions=1 strip_path_prefix=/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_options() {
+ return kLsanDefaultOptions;
+}
+
+extern char kLSanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_suppressions() {
+ return kLSanDefaultSuppressions;
+}
+
+#endif // LEAK_SANITIZER
+
+#if defined(UNDEFINED_SANITIZER)
+// Default options for UndefinedBehaviorSanitizer:
+// print_stacktrace=1 - print the stacktrace when UBSan reports an error.
+const char kUbsanDefaultOptions[] =
+ "print_stacktrace=1 strip_path_prefix=/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char* __ubsan_default_options() {
+ return kUbsanDefaultOptions;
+}
+
+#endif // UNDEFINED_SANITIZER
diff --git a/deps/v8/build/sanitizers/tsan_suppressions.cc b/deps/v8/build/sanitizers/tsan_suppressions.cc
new file mode 100644
index 0000000000..53c2f3234b
--- /dev/null
+++ b/deps/v8/build/sanitizers/tsan_suppressions.cc
@@ -0,0 +1,213 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for ThreadSanitizer.
+// You can also pass additional suppressions via TSAN_OPTIONS:
+// TSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
+// http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+// for more info.
+
+#if defined(THREAD_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kTSanDefaultSuppressions contains TSan suppressions delimited by newlines.
+// See http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+// for the instructions on writing suppressions.
+char kTSanDefaultSuppressions[] =
+ // False positives in libflashplayer.so, libgio.so and libglib.so.
+ // Since we don't instrument them, we cannot reason about the
+ // synchronization in them.
+ "race:libflashplayer.so\n"
+ "race:libgio*.so\n"
+ "race:libglib*.so\n"
+
+ // Intentional race in ToolsSanityTest.DataRace in base_unittests.
+ "race:base/tools_sanity_unittest.cc\n"
+
+ // Data race on WatchdogCounter [test-only].
+ "race:base/threading/watchdog_unittest.cc\n"
+
+ // Races in libevent, http://crbug.com/23244.
+ "race:libevent/event.c\n"
+
+ // Data race caused by swapping out the network change notifier with a mock
+ // [test-only]. http://crbug.com/927330.
+ "race:content/browser/net_info_browsertest.cc\n"
+
+ // http://crbug.com/84094.
+ "race:sqlite3StatusSet\n"
+ "race:pcache1EnforceMaxPage\n"
+ "race:pcache1AllocPage\n"
+
+ // http://crbug.com/120808
+ "race:base/threading/watchdog.cc\n"
+
+ // http://crbug.com/157586
+ "race:third_party/libvpx/source/libvpx/vp8/decoder/threading.c\n"
+
+ // http://crbug.com/158718
+ "race:third_party/ffmpeg/libavcodec/pthread.c\n"
+ "race:third_party/ffmpeg/libavcodec/pthread_frame.c\n"
+ "race:third_party/ffmpeg/libavcodec/vp8.c\n"
+ "race:third_party/ffmpeg/libavutil/mem.c\n"
+ "race:*HashFrameForTesting\n"
+ "race:third_party/ffmpeg/libavcodec/h264pred.c\n"
+ "race:media::ReleaseData\n"
+
+ // http://crbug.com/239359
+ "race:media::TestInputCallback::OnData\n"
+
+ // http://crbug.com/244385
+ "race:unixTempFileDir\n"
+
+ // http://crbug.com/244755
+ "race:v8::internal::Zone::NewExpand\n"
+
+ // http://crbug.com/244774
+ "race:webrtc::RTPReceiver::ProcessBitrate\n"
+ "race:webrtc::RTPSender::ProcessBitrate\n"
+ "race:webrtc::VideoCodingModuleImpl::Decode\n"
+ "race:webrtc::RTPSender::SendOutgoingData\n"
+ "race:webrtc::LibvpxVp8Encoder::GetEncodedPartitions\n"
+ "race:webrtc::LibvpxVp8Encoder::Encode\n"
+ "race:webrtc::ViEEncoder::DeliverFrame\n"
+ "race:webrtc::vcm::VideoReceiver::Decode\n"
+ "race:webrtc::VCMReceiver::FrameForDecoding\n"
+
+ // http://crbug.com/244856
+ "race:libpulsecommon*.so\n"
+
+ // http://crbug.com/246968
+ "race:webrtc::VideoCodingModuleImpl::RegisterPacketRequestCallback\n"
+
+ // http://crbug.com/257396
+ "race:base::trace_event::"
+
+ // http://crbug.com/258479
+ "race:SamplingStateScope\n"
+ "race:g_trace_state\n"
+
+ // http://crbug.com/258499
+ "race:third_party/skia/include/core/SkRefCnt.h\n"
+
+ // http://crbug.com/268924
+ "race:base::g_power_monitor\n"
+ "race:base::PowerMonitor::PowerMonitor\n"
+ "race:base::PowerMonitor::AddObserver\n"
+ "race:base::PowerMonitor::RemoveObserver\n"
+ "race:base::PowerMonitor::IsOnBatteryPower\n"
+
+ // http://crbug.com/258935
+ "race:base::Thread::StopSoon\n"
+
+ // http://crbug.com/272095
+ "race:base::g_top_manager\n"
+
+ // http://crbug.com/308590
+ "race:CustomThreadWatcher::~CustomThreadWatcher\n"
+
+ // http://crbug.com/476529
+ "deadlock:cc::VideoLayerImpl::WillDraw\n"
+
+ // http://crbug.com/328826
+ "race:gLCDOrder\n"
+ "race:gLCDOrientation\n"
+
+ // http://crbug.com/328868
+ "race:PR_Lock\n"
+
+ // http://crbug.com/333244
+ "race:content::"
+ "VideoCaptureImplTest::MockVideoCaptureImpl::~MockVideoCaptureImpl\n"
+
+ // http://crbug.com/347534
+ "race:v8::internal::V8::TearDown\n"
+
+ // http://crbug.com/347538
+ "race:sctp_timer_start\n"
+
+ // http://crbug.com/348511
+ "race:webrtc::acm1::AudioCodingModuleImpl::PlayoutData10Ms\n"
+
+ // http://crbug.com/348982
+ "race:cricket::P2PTransportChannel::OnConnectionDestroyed\n"
+ "race:cricket::P2PTransportChannel::AddConnection\n"
+
+ // http://crbug.com/348984
+ "race:sctp_express_handle_sack\n"
+ "race:system_base_info\n"
+
+ // https://code.google.com/p/v8/issues/detail?id=3143
+ "race:v8::internal::FLAG_track_double_fields\n"
+
+ // http://crbug.com/374135
+ "race:media::AlsaWrapper::PcmWritei\n"
+
+ // False positive in libc's tzset_internal, http://crbug.com/379738.
+ "race:tzset_internal\n"
+
+ // http://crbug.com/380554
+ "deadlock:g_type_add_interface_static\n"
+
+ // http:://crbug.com/386385
+ "race:content::AppCacheStorageImpl::DatabaseTask::CallRunCompleted\n"
+
+ // http://crbug.com/397022
+ "deadlock:"
+ "base::trace_event::TraceEventTestFixture_ThreadOnceBlocking_Test::"
+ "TestBody\n"
+
+ // http://crbug.com/415472
+ "deadlock:base::trace_event::TraceLog::GetCategoryGroupEnabled\n"
+
+ // http://crbug.com/490856
+ "deadlock:content::TracingControllerImpl::SetEnabledOnFileThread\n"
+
+ // https://code.google.com/p/skia/issues/detail?id=3294
+ "race:SkBaseMutex::acquire\n"
+
+ // Lock inversion in third party code, won't fix.
+ // https://crbug.com/455638
+ "deadlock:dbus::Bus::ShutdownAndBlock\n"
+
+ // https://crbug.com/459429
+ "race:randomnessPid\n"
+
+ // https://crbug.com/454655
+ "race:content::BrowserTestBase::PostTaskToInProcessRendererAndWait\n"
+
+ // http://crbug.com/582274
+ "race:usrsctp_close\n"
+
+ // http://crbug.com/633145
+ "race:third_party/libjpeg_turbo/simd/jsimd_x86_64.c\n"
+
+ // http://crbug.com/v8/6065
+ "race:net::(anonymous namespace)::ProxyResolverV8TracingImpl::RequestImpl"
+ "::~RequestImpl()\n"
+
+ // http://crbug.com/691029
+ "deadlock:libGLX.so*\n"
+
+ // http://crbug.com/695929
+ "race:base::i18n::IsRTL\n"
+ "race:base::i18n::SetICUDefaultLocale\n"
+
+ // https://crbug.com/794920
+ "race:base::debug::SetCrashKeyString\n"
+ "race:crash_reporter::internal::CrashKeyStringImpl::Set\n"
+
+ // http://crbug.com/795110
+ "race:third_party/fontconfig/*\n"
+
+ // http://crbug.com/797998
+ "race:content::SandboxIPCHandler::HandleLocaltime\n"
+
+ // http://crbug.com/927330
+ "race:net::(anonymous namespace)::g_network_change_notifier\n"
+
+ // End of suppressions.
+ ; // Please keep this semicolon.
+
+#endif // THREAD_SANITIZER
diff --git a/deps/v8/build/shim_headers.gni b/deps/v8/build/shim_headers.gni
new file mode 100644
index 0000000000..56591484d1
--- /dev/null
+++ b/deps/v8/build/shim_headers.gni
@@ -0,0 +1,41 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+template("shim_headers") {
+ action_name = "gen_${target_name}"
+ config_name = "${target_name}_config"
+ shim_headers_path = "${root_gen_dir}/shim_headers/${target_name}"
+
+ config(config_name) {
+ include_dirs = [ shim_headers_path ]
+ }
+
+ action(action_name) {
+ script = "//tools/generate_shim_headers/generate_shim_headers.py"
+ args = [
+ "--generate",
+ "--headers-root",
+ rebase_path(invoker.root_path),
+ "--output-directory",
+ rebase_path(shim_headers_path),
+ ]
+ if (defined(invoker.prefix)) {
+ args += [
+ "--prefix",
+ invoker.prefix,
+ ]
+ }
+ args += invoker.headers
+
+ outputs = process_file_template(invoker.headers,
+ "${shim_headers_path}/{{source_file_part}}")
+ }
+
+ group(target_name) {
+ deps = [
+ ":${action_name}",
+ ]
+ all_dependent_configs = [ ":${config_name}" ]
+ }
+}
diff --git a/deps/v8/build/split_static_library.gni b/deps/v8/build/split_static_library.gni
new file mode 100644
index 0000000000..60ad4528c4
--- /dev/null
+++ b/deps/v8/build/split_static_library.gni
@@ -0,0 +1,77 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/compiler/compiler.gni")
+
+template("split_static_library") {
+ assert(defined(invoker.split_count),
+ "Must define split_count for split_static_library")
+
+ # In many conditions the number of inputs will be 1 (because the
+ # count will be conditional on platform or configuration) and for
+ # some build configurations it's unnecessary to split libraries
+ # since the tooling will never create files of a problematic size.
+ if (invoker.split_count == 1 || use_lld) {
+ static_library(target_name) {
+ forward_variables_from(invoker, "*")
+ }
+ } else {
+ group_name = target_name
+
+ generated_static_libraries = []
+ current_library_index = 0
+ foreach(current_sources, split_list(invoker.sources, invoker.split_count)) {
+ current_name = "${target_name}_$current_library_index"
+ assert(
+ current_sources != [],
+ "Your values for splitting a static library generate one that has no sources.")
+ generated_static_libraries += [ ":$current_name" ]
+
+ static_library(current_name) {
+ # Generated static library shard gets everything but sources (which
+ # we're redefining) and visibility (which is set to be the group
+ # below).
+ forward_variables_from(invoker,
+ "*",
+ [
+ "check_includes",
+ "sources",
+ "visibility",
+ ])
+ sources = current_sources
+ visibility = [ ":$group_name" ]
+
+ # When splitting a target's sources up into a series of static
+ # libraries, those targets will naturally include headers from each
+ # other arbitrarily. We could theoretically generate a web of
+ # dependencies and allow_circular_includes_from between all pairs of
+ # targets, but that's very cumbersome. Typical usage in Chrome is that
+ # only official Windows builds use split static libraries due to the
+ # Visual Studio size limits, and this means we'll still get header
+ # checking coverage for the other configurations.
+ check_includes = false
+
+ # Uniquify the output name if one is specified.
+ if (defined(invoker.output_name)) {
+ output_name = "${invoker.output_name}_$current_library_index"
+ }
+ }
+
+ current_library_index = current_library_index + 1
+ }
+
+ group(group_name) {
+ public_deps = generated_static_libraries
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+ ])
+ }
+ }
+}
+
+set_defaults("split_static_library") {
+ configs = default_compiler_configs
+}
diff --git a/deps/v8/build/swarming_xcode_install.py b/deps/v8/build/swarming_xcode_install.py
new file mode 100755
index 0000000000..7764aa55c7
--- /dev/null
+++ b/deps/v8/build/swarming_xcode_install.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Script used to install Xcode on the swarming bots.
+"""
+
+import os
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+
+import mac_toolchain
+
+VERSION = '9A235'
+URL = 'gs://chrome-mac-sdk/ios-toolchain-9A235-1.tgz'
+REMOVE_DIR = '/Applications/Xcode9.0-Beta4.app/'
+OUTPUT_DIR = '/Applications/Xcode9.0.app/'
+
+def main():
+ # Check if it's already installed.
+ if os.path.exists(OUTPUT_DIR):
+ env = os.environ.copy()
+ env['DEVELOPER_DIR'] = OUTPUT_DIR
+ cmd = ['xcodebuild', '-version']
+ found_version = \
+ subprocess.Popen(cmd, env=env, stdout=subprocess.PIPE).communicate()[0]
+ if VERSION in found_version:
+ print "Xcode %s already installed" % VERSION
+ sys.exit(0)
+
+ # Confirm old dir is there first.
+ if not os.path.exists(REMOVE_DIR):
+ print "Failing early since %s isn't there." % REMOVE_DIR
+ sys.exit(1)
+
+ # Download Xcode.
+ with tempfile.NamedTemporaryFile() as temp:
+ env = os.environ.copy()
+ env['PATH'] += ":/b/depot_tools"
+ subprocess.check_call(['gsutil.py', 'cp', URL, temp.name], env=env)
+ if os.path.exists(OUTPUT_DIR):
+ shutil.rmtree(OUTPUT_DIR)
+ if not os.path.exists(OUTPUT_DIR):
+ os.makedirs(OUTPUT_DIR)
+ tarfile.open(mode='r:gz', name=temp.name).extractall(path=OUTPUT_DIR)
+
+ # Accept license, call runFirstLaunch.
+ mac_toolchain.FinalizeUnpack(OUTPUT_DIR, 'ios')
+
+ # Set new Xcode as default.
+ subprocess.check_call(['sudo', '/usr/bin/xcode-select', '-s', OUTPUT_DIR])
+
+ if os.path.exists(REMOVE_DIR):
+ shutil.rmtree(REMOVE_DIR)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
+
diff --git a/deps/v8/build/symlink.gni b/deps/v8/build/symlink.gni
new file mode 100644
index 0000000000..4da5a57e43
--- /dev/null
+++ b/deps/v8/build/symlink.gni
@@ -0,0 +1,85 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Creates a symlink.
+# Args:
+# source: Path to link to.
+# output: Where to create the symlink.
+template("symlink") {
+ action(target_name) {
+ forward_variables_from(invoker,
+ [
+ "data_deps",
+ "deps",
+ "testonly",
+ "visibility",
+ ])
+ outputs = [
+ invoker.output,
+ ]
+ script = "//build/symlink.py"
+ args = [
+ "-f",
+ rebase_path(invoker.source, get_path_info(invoker.output, "dir")),
+ rebase_path(invoker.output, root_build_dir),
+ ]
+ }
+}
+
+# Creates a symlink from root_build_dir/target_name to |binary_label|. This rule
+# is meant to be used within if (current_toolchain == default_toolchain) blocks
+# and point to targets in the non-default toolchain.
+# Note that for executables, using a copy (as opposed to a symlink) does not
+# work when is_component_build=true, since dependent libraries are found via
+# relative location.
+#
+# Args:
+# binary_label: Target that builds the file to symlink to. e.g.:
+# ":$target_name($host_toolchain)".
+# binary_output_name: The output_name set by the binary_label target
+# (if applicable).
+# output_name: Where to create the symlink
+# (default="$root_out_dir/$binary_output_name").
+#
+# Example:
+# if (current_toolchain == host_toolchain) {
+# executable("foo") { ... }
+# } else if (current_toolchain == default_toolchain) {
+# binary_symlink("foo") {
+# binary_label = ":foo($host_toolchain)"
+# }
+# }
+template("binary_symlink") {
+ symlink(target_name) {
+ forward_variables_from(invoker,
+ [
+ "output",
+ "testonly",
+ "visibility",
+ ])
+ deps = [
+ invoker.binary_label,
+ ]
+ data_deps = [
+ invoker.binary_label,
+ ]
+ if (defined(invoker.data_deps)) {
+ data_deps += invoker.data_deps
+ }
+
+ _out_dir = get_label_info(invoker.binary_label, "root_out_dir")
+ if (defined(invoker.binary_output_name)) {
+ _name = invoker.binary_output_name
+ } else {
+ _name = get_label_info(invoker.binary_label, "name")
+ }
+ source = "$_out_dir/$_name"
+
+ _output_name = _name
+ if (defined(invoker.output_name)) {
+ _output_name = invoker.output_name
+ }
+ output = "$root_out_dir/$_output_name"
+ }
+}
diff --git a/deps/v8/build/symlink.py b/deps/v8/build/symlink.py
new file mode 100755
index 0000000000..12942aa170
--- /dev/null
+++ b/deps/v8/build/symlink.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+description = """
+Make a symlink and optionally touch a file (to handle dependencies).
+"""
+usage = "%prog [options] source[ source ...] linkname"
+epilog = """
+A sym link to source is created at linkname. If multiple sources are specfied,
+then linkname is assumed to be a directory, and will contain all the links to
+the sources (basenames identical to their source).
+
+On Windows, this will use hard links (mklink /H) to avoid requiring elevation.
+This means that if the original is deleted and replaced, the link will still
+have the old contents. This is not expected to interfere with the Chromium
+build.
+"""
+
+import errno
+import optparse
+import os.path
+import shutil
+import subprocess
+import sys
+
+
+def Main(argv):
+ parser = optparse.OptionParser(usage=usage, description=description,
+ epilog=epilog)
+ parser.add_option('-f', '--force', action='store_true')
+ parser.add_option('--touch')
+
+ options, args = parser.parse_args(argv[1:])
+ if len(args) < 2:
+ parser.error('at least two arguments required.')
+
+ target = args[-1]
+ sources = args[:-1]
+ for s in sources:
+ t = os.path.join(target, os.path.basename(s))
+ if len(sources) == 1 and not os.path.isdir(target):
+ t = target
+ t = os.path.expanduser(t)
+ if os.path.realpath(t) == os.path.realpath(s):
+ continue
+ try:
+ # N.B. Python 2.x does not have os.symlink for Windows.
+ # Python 3 has os.symlink for Windows, but requires either the admin-
+ # granted privilege SeCreateSymbolicLinkPrivilege or, as of Windows 10
+ # 1703, that Developer Mode be enabled. Hard links and junctions do not
+ # require any extra privileges to create.
+ if os.name == 'nt':
+ # mklink does not tolerate /-delimited path names.
+ t = t.replace('/', '\\')
+ s = s.replace('/', '\\')
+ # N.B. This tool only handles file hardlinks, not directory junctions.
+ subprocess.check_output(['cmd.exe', '/c', 'mklink', '/H', t, s],
+ stderr=subprocess.STDOUT)
+ else:
+ os.symlink(s, t)
+ except OSError, e:
+ if e.errno == errno.EEXIST and options.force:
+ if os.path.isdir(t):
+ shutil.rmtree(t, ignore_errors=True)
+ else:
+ os.remove(t)
+ os.symlink(s, t)
+ else:
+ raise
+ except subprocess.CalledProcessError, e:
+ # Since subprocess.check_output does not return an easily checked error
+ # number, in the 'force' case always assume it is 'file already exists'
+ # and retry.
+ if options.force:
+ if os.path.isdir(t):
+ shutil.rmtree(t, ignore_errors=True)
+ else:
+ os.remove(t)
+ subprocess.check_output(e.cmd, stderr=subprocess.STDOUT)
+ else:
+ raise
+
+
+ if options.touch:
+ with open(options.touch, 'w') as f:
+ pass
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv))
diff --git a/deps/v8/build/timestamp.gni b/deps/v8/build/timestamp.gni
new file mode 100644
index 0000000000..4d805c0942
--- /dev/null
+++ b/deps/v8/build/timestamp.gni
@@ -0,0 +1,34 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Defines the build_timestamp variable.
+
+import("//build/util/lastchange.gni")
+
+declare_args() {
+ # This should be the filename of a script that prints a single line
+ # containing an integer that's a unix timestamp in UTC.
+ # This timestamp is used as build time and will be compiled into
+ # other code.
+ #
+ # This argument may look unused. Before removing please check with the
+ # chromecast team to see if they still use it internally.
+ compute_build_timestamp = "compute_build_timestamp.py"
+}
+
+if (is_official_build) {
+ official_name = "official"
+} else {
+ official_name = "default"
+}
+
+# This will return a timestamp that's different each day (official builds)
+# or each month (regular builds). Just rely on gn rerunning due to other
+# changes to keep this up to date. (Bots run gn on each build, and for devs
+# the timestamp being 100% accurate doesn't matter.)
+# See compute_build_timestamp.py for tradeoffs for picking the timestamp.
+build_timestamp = exec_script(compute_build_timestamp,
+ [ official_name ],
+ "trim string",
+ [ lastchange_file ])
diff --git a/deps/v8/build/toolchain/BUILD.gn b/deps/v8/build/toolchain/BUILD.gn
new file mode 100644
index 0000000000..75701ded70
--- /dev/null
+++ b/deps/v8/build/toolchain/BUILD.gn
@@ -0,0 +1,25 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/concurrent_links.gni")
+import("//build/toolchain/goma.gni")
+
+declare_args() {
+ # Pool for non goma tasks.
+ action_pool_depth = -1
+}
+
+if (action_pool_depth == -1 || use_goma) {
+ action_pool_depth = exec_script("get_cpu_count.py", [], "value")
+}
+
+if (current_toolchain == default_toolchain) {
+ pool("link_pool") {
+ depth = concurrent_links
+ }
+
+ pool("action_pool") {
+ depth = action_pool_depth
+ }
+}
diff --git a/deps/v8/build/toolchain/OWNERS b/deps/v8/build/toolchain/OWNERS
new file mode 100644
index 0000000000..b329d48bfb
--- /dev/null
+++ b/deps/v8/build/toolchain/OWNERS
@@ -0,0 +1,8 @@
+dpranke@chromium.org
+scottmg@chromium.org
+
+# Clang Static Analyzer.
+per-file clang_static_analyzer*=mmoroz@chromium.org
+
+# Code Coverage.
+per-file *code_coverage*=mmoroz@chromium.org
diff --git a/deps/v8/build/toolchain/aix/BUILD.gn b/deps/v8/build/toolchain/aix/BUILD.gn
new file mode 100644
index 0000000000..202e59e652
--- /dev/null
+++ b/deps/v8/build/toolchain/aix/BUILD.gn
@@ -0,0 +1,21 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/gcc_toolchain.gni")
+
+gcc_toolchain("ppc64") {
+ cc = "gcc"
+ cxx = "g++"
+
+ readelf = "readelf"
+ nm = "nm"
+ ar = "ar"
+ ld = cxx
+
+ toolchain_args = {
+ current_cpu = "ppc64"
+ current_os = "aix"
+ is_clang = false
+ }
+}
diff --git a/deps/v8/build/toolchain/android/BUILD.gn b/deps/v8/build/toolchain/android/BUILD.gn
new file mode 100644
index 0000000000..97dd12dc75
--- /dev/null
+++ b/deps/v8/build/toolchain/android/BUILD.gn
@@ -0,0 +1,141 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/sysroot.gni") # Imports android/config.gni.
+import("//build/toolchain/gcc_toolchain.gni")
+
+declare_args() {
+ # Whether unstripped binaries, i.e. compiled with debug symbols, should be
+ # considered runtime_deps rather than stripped ones.
+ android_unstripped_runtime_outputs = true
+}
+
+# The Android clang toolchains share most of the same parameters, so we have this
+# wrapper around gcc_toolchain to avoid duplication of logic.
+#
+# Parameters:
+# - toolchain_root
+# Path to cpu-specific toolchain within the ndk.
+# - sysroot
+# Sysroot for this architecture.
+# - lib_dir
+# Subdirectory inside of sysroot where libs go.
+# - binary_prefix
+# Prefix of compiler executables.
+template("android_clang_toolchain") {
+ gcc_toolchain(target_name) {
+ assert(defined(invoker.toolchain_args),
+ "toolchain_args must be defined for android_clang_toolchain()")
+ toolchain_args = invoker.toolchain_args
+ toolchain_args.current_os = "android"
+
+ # Output linker map files for binary size analysis.
+ enable_linker_map = true
+
+ # Make our manually injected libs relative to the build dir.
+ _ndk_lib =
+ rebase_path(invoker.sysroot + "/" + invoker.lib_dir, root_build_dir)
+
+ libs_section_prefix = "$_ndk_lib/crtbegin_dynamic.o"
+ libs_section_postfix = "$_ndk_lib/crtend_android.o"
+
+ solink_libs_section_prefix = "$_ndk_lib/crtbegin_so.o"
+ solink_libs_section_postfix = "$_ndk_lib/crtend_so.o"
+
+ _android_tool_prefix =
+ "${invoker.toolchain_root}/bin/${invoker.binary_prefix}-"
+
+ # The tools should be run relative to the build dir.
+ _tool_prefix = rebase_path("$_android_tool_prefix", root_build_dir)
+
+ _prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+ cc = "$_prefix/clang"
+ cxx = "$_prefix/clang++"
+ ar = "$_prefix/llvm-ar"
+ ld = cxx
+ readelf = _tool_prefix + "readelf"
+ nm = _tool_prefix + "nm"
+ strip = rebase_path("//buildtools/third_party/eu-strip/bin/eu-strip",
+ root_build_dir)
+ use_unstripped_as_runtime_outputs = android_unstripped_runtime_outputs
+
+ # Don't use .cr.so for loadable_modules since they are always loaded via
+ # absolute path.
+ loadable_module_extension = ".so"
+ }
+}
+
+android_clang_toolchain("android_clang_x86") {
+ toolchain_root = x86_android_toolchain_root
+ sysroot = "$android_ndk_root/$x86_android_sysroot_subdir"
+ lib_dir = "usr/lib"
+ binary_prefix = "i686-linux-android"
+ toolchain_args = {
+ current_cpu = "x86"
+
+ # We lack the libclang_rt.profile library for x86 and x86_64, so we cannot
+ # link any binaries that are generated with coverage instrumentation.
+ # Therefore we need to turn off 'use_clang_coverage' for this toolchain.
+ # TODO(crbug.com/865376)
+ use_clang_coverage = false
+ }
+}
+
+android_clang_toolchain("android_clang_arm") {
+ toolchain_root = arm_android_toolchain_root
+ sysroot = "$android_ndk_root/$arm_android_sysroot_subdir"
+ lib_dir = "usr/lib"
+ binary_prefix = "arm-linux-androideabi"
+ toolchain_args = {
+ current_cpu = "arm"
+ }
+}
+
+android_clang_toolchain("android_clang_mipsel") {
+ toolchain_root = mips_android_toolchain_root
+ sysroot = "$android_ndk_root/$mips_android_sysroot_subdir"
+ lib_dir = "usr/lib"
+ binary_prefix = "mipsel-linux-android"
+ toolchain_args = {
+ current_cpu = "mipsel"
+ }
+}
+
+android_clang_toolchain("android_clang_x64") {
+ toolchain_root = x86_64_android_toolchain_root
+ sysroot = "$android_ndk_root/$x86_64_android_sysroot_subdir"
+ lib_dir = "usr/lib64"
+ binary_prefix = "x86_64-linux-android"
+ toolchain_args = {
+ current_cpu = "x64"
+
+ # We lack the libclang_rt.profile library for x86 and x86_64, so we cannot
+ # link any binaries that are generated with coverage instrumentation.
+ # Therefore we need to turn off 'use_clang_coverage' for this toolchain.
+ # TODO(crbug.com/865376)
+ use_clang_coverage = false
+ }
+}
+
+android_clang_toolchain("android_clang_arm64") {
+ toolchain_root = arm64_android_toolchain_root
+ sysroot = "$android_ndk_root/$arm64_android_sysroot_subdir"
+ lib_dir = "usr/lib"
+ binary_prefix = "aarch64-linux-android"
+ toolchain_args = {
+ current_cpu = "arm64"
+ }
+}
+
+android_clang_toolchain("android_clang_mips64el") {
+ toolchain_root = mips64_android_toolchain_root
+ sysroot = "$android_ndk_root/$mips64_android_sysroot_subdir"
+ lib_dir = "usr/lib64"
+ binary_prefix = "mips64el-linux-android"
+ toolchain_args = {
+ current_cpu = "mips64el"
+ }
+}
diff --git a/deps/v8/build/toolchain/cc_wrapper.gni b/deps/v8/build/toolchain/cc_wrapper.gni
new file mode 100644
index 0000000000..0a03dde8d6
--- /dev/null
+++ b/deps/v8/build/toolchain/cc_wrapper.gni
@@ -0,0 +1,40 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/goma.gni")
+
+# Defines the configuration of cc wrapper
+# ccache: a c/c++ compiler cache which can greatly reduce recompilation times.
+# icecc, distcc: it takes compile jobs from a build and distributes them among
+# remote machines allowing a parallel build.
+#
+# TIPS
+#
+# 1) ccache
+# Set clang_use_chrome_plugins=false if using ccache 3.1.9 or earlier, since
+# these versions don't support -Xclang. (3.1.10 and later will silently
+# ignore -Xclang, so it doesn't matter if you disable clang_use_chrome_plugins
+# or not).
+#
+# Use ccache 3.2 or later to avoid clang unused argument warnings:
+# https://bugzilla.samba.org/show_bug.cgi?id=8118
+#
+# To avoid -Wparentheses-equality clang warnings, at some cost in terms of
+# speed, you can do:
+# export CCACHE_CPP2=yes
+#
+# 2) icecc
+# Set clang_use_chrome_plugins=false because icecc cannot distribute custom
+# clang libraries.
+#
+# To use icecc and ccache together, set cc_wrapper = "ccache" with
+# export CCACHE_PREFIX=icecc
+
+declare_args() {
+ # Set to "ccache", "icecc" or "distcc". Probably doesn't work on windows.
+ cc_wrapper = ""
+}
+
+assert(!use_goma || cc_wrapper == "",
+ "use_goma and cc_wrapper can not be used together.")
diff --git a/deps/v8/build/toolchain/clang_code_coverage_wrapper.py b/deps/v8/build/toolchain/clang_code_coverage_wrapper.py
new file mode 100755
index 0000000000..9697805690
--- /dev/null
+++ b/deps/v8/build/toolchain/clang_code_coverage_wrapper.py
@@ -0,0 +1,149 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Removes code coverage flags from invocations of the Clang C/C++ compiler.
+
+If the GN arg `use_clang_coverage=true`, this script will be invoked by default.
+GN will add coverage instrumentation flags to almost all source files.
+
+This script is used to remove instrumentation flags from a subset of the source
+files. By default, it will not remove flags from any files. If the option
+--files-to-instrument is passed, this script will remove flags from all files
+except the ones listed in --files-to-instrument.
+
+This script also contains hard-coded exclusion lists of files to never
+instrument, indexed by target operating system. Files in these lists have their
+flags removed in both modes. The OS can be selected with --target-os.
+
+The path to the coverage instrumentation input file should be relative to the
+root build directory, and the file consists of multiple lines where each line
+represents a path to a source file, and the specified paths must be relative to
+the root build directory. e.g. ../../base/task/post_task.cc for build
+directory 'out/Release'.
+
+One caveat with this compiler wrapper is that it may introduce unexpected
+behaviors in incremental builds when the file path to the coverage
+instrumentation input file changes between consecutive runs, so callers of this
+script are strongly advised to always use the same path such as
+"${root_build_dir}/coverage_instrumentation_input.txt".
+
+It's worth noting on try job builders, if the contents of the instrumentation
+file changes so that a file doesn't need to be instrumented any longer, it will
+be recompiled automatically because if try job B runs after try job A, the files
+that were instrumented in A will be updated (i.e., reverted to the checked in
+version) in B, and so they'll be considered out of date by ninja and recompiled.
+
+Example usage:
+ clang_code_coverage_wrapper.py \\
+ --files-to-instrument=coverage_instrumentation_input.txt
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+
+# Flags used to enable coverage instrumentation.
+# Flags should be listed in the same order that they are added in
+# build/config/coverage/BUILD.gn
+_COVERAGE_FLAGS = [
+ '-fprofile-instr-generate', '-fcoverage-mapping',
+ # Following experimental flags remove unused header functions from the
+ # coverage mapping data embedded in the test binaries, and the reduction
+ # of binary size enables building Chrome's large unit test targets on
+ # MacOS. Please refer to crbug.com/796290 for more details.
+ '-mllvm', '-limited-coverage-experimental=true'
+]
+
+# Map of exclusion lists indexed by target OS.
+# If no target OS is defined, or one is defined that doesn't have a specific
+# entry, use the 'default' exclusion_list. Anything added to 'default' will
+# apply to all platforms that don't have their own specific list.
+_COVERAGE_EXCLUSION_LIST_MAP = {
+ 'default': [],
+ 'chromeos': [
+ # These files caused clang to crash while compiling them. They are
+ # excluded pending an investigation into the underlying compiler bug.
+ '../../third_party/webrtc/p2p/base/p2p_transport_channel.cc',
+ '../../third_party/icu/source/common/uts46.cpp',
+ '../../third_party/icu/source/common/ucnvmbcs.cpp',
+ '../../base/android/android_image_reader_compat.cc',
+ ]
+}
+
+
+def _remove_flags_from_command(command):
+ # We need to remove the coverage flags for this file, but we only want to
+ # remove them if we see the exact sequence defined in _COVERAGE_FLAGS.
+ # That ensures that we only remove the flags added by GN when
+ # "use_clang_coverage" is true. Otherwise, we would remove flags set by
+ # other parts of the build system.
+ start_flag = _COVERAGE_FLAGS[0]
+ num_flags = len(_COVERAGE_FLAGS)
+ start_idx = 0
+ try:
+ while True:
+ idx = command.index(start_flag, start_idx)
+ start_idx = idx + 1
+ if command[idx:idx+num_flags] == _COVERAGE_FLAGS:
+ del command[idx:idx+num_flags]
+ break
+ except ValueError:
+ pass
+
+def main():
+ # TODO(crbug.com/898695): Make this wrapper work on Windows platform.
+ arg_parser = argparse.ArgumentParser()
+ arg_parser.usage = __doc__
+ arg_parser.add_argument(
+ '--files-to-instrument',
+ type=str,
+ help='Path to a file that contains a list of file names to instrument.')
+ arg_parser.add_argument(
+ '--target-os',
+ required=False,
+ help='The OS to compile for.')
+ arg_parser.add_argument('args', nargs=argparse.REMAINDER)
+ parsed_args = arg_parser.parse_args()
+
+ if (parsed_args.files_to_instrument and
+ not os.path.isfile(parsed_args.files_to_instrument)):
+ raise Exception('Path to the coverage instrumentation file: "%s" doesn\'t '
+ 'exist.' % parsed_args.files_to_instrument)
+
+ compile_command = parsed_args.args
+ if not any('clang' in s for s in compile_command):
+ return subprocess.call(compile_command)
+
+ try:
+ # The command is assumed to use Clang as the compiler, and the path to the
+ # source file is behind the -c argument, and the path to the source path is
+ # relative to the root build directory. For example:
+ # clang++ -fvisibility=hidden -c ../../base/files/file_path.cc -o \
+ # obj/base/base/file_path.o
+ index_dash_c = compile_command.index('-c')
+ except ValueError:
+ print '-c argument is not found in the compile command.'
+ raise
+
+ if index_dash_c + 1 >= len(compile_command):
+ raise Exception('Source file to be compiled is missing from the command.')
+
+ compile_source_file = compile_command[index_dash_c + 1]
+ target_os = parsed_args.target_os
+ if target_os not in _COVERAGE_EXCLUSION_LIST_MAP:
+ target_os = 'default'
+ exclusion_list = _COVERAGE_EXCLUSION_LIST_MAP[target_os]
+
+ if compile_source_file in exclusion_list:
+ _remove_flags_from_command(compile_command)
+ elif parsed_args.files_to_instrument:
+ with open(parsed_args.files_to_instrument) as f:
+ if compile_source_file not in f.read():
+ _remove_flags_from_command(compile_command)
+
+ return subprocess.call(compile_command)
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/toolchain/concurrent_links.gni b/deps/v8/build/toolchain/concurrent_links.gni
new file mode 100644
index 0000000000..84607bc676
--- /dev/null
+++ b/deps/v8/build/toolchain/concurrent_links.gni
@@ -0,0 +1,60 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file should only be imported from files that define toolchains.
+# There's no way to enforce this exactly, but all toolchains are processed
+# in the context of the default_toolchain, so we can at least check for that.
+assert(current_toolchain == default_toolchain)
+
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+ # Limit the number of concurrent links; we often want to run fewer
+ # links at once than we do compiles, because linking is memory-intensive.
+ # The default to use varies by platform and by the amount of memory
+ # available, so we call out to a script to get the right value.
+ concurrent_links = -1
+}
+
+if (concurrent_links == -1) {
+ if (use_thin_lto) {
+ _args = [
+ "--mem_per_link_gb=10",
+ "--reserve_mem_gb=10",
+ ]
+ } else if (use_sanitizer_coverage || use_fuzzing_engine) {
+ # Sanitizer coverage instrumentation increases linker memory consumption
+ # significantly.
+ _args = [ "--mem_per_link_gb=16" ]
+ } else if (is_win && symbol_level == 1 && !is_debug) {
+ _args = [ "--mem_per_link_gb=3" ]
+ } else if (is_win) {
+ _args = [ "--mem_per_link_gb=5" ]
+ } else if (is_mac) {
+ _args = [ "--mem_per_link_gb=4" ]
+ } else if (is_android && !is_component_build && symbol_level == 2) {
+ # Full debug symbols require large memory for link.
+ _args = [ "--mem_per_link_gb=25" ]
+ } else if (is_android && !is_debug && !using_sanitizer && symbol_level < 2) {
+ # Increase the number of concurrent links for release bots. Debug builds
+ # make heavier use of ProGuard, and so should not be raised. Sanitizers also
+ # increase the memory overhead.
+ if (symbol_level == 1) {
+ _args = [ "--mem_per_link_gb=6" ]
+ } else {
+ _args = [ "--mem_per_link_gb=4" ]
+ }
+ } else if (is_linux && !is_chromeos && symbol_level == 0) {
+ # Memory consumption on link without debug symbols is low on linux.
+ _args = [ "--mem_per_link_gb=3" ]
+ } else {
+ _args = []
+ }
+
+ # TODO(crbug.com/617429) Pass more build configuration info to the script
+ # so that we can compute better values.
+ concurrent_links = exec_script("get_concurrent_links.py", _args, "value")
+}
diff --git a/deps/v8/build/toolchain/cros/BUILD.gn b/deps/v8/build/toolchain/cros/BUILD.gn
new file mode 100644
index 0000000000..5a9561f232
--- /dev/null
+++ b/deps/v8/build/toolchain/cros/BUILD.gn
@@ -0,0 +1,173 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sysroot.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+import("//build/toolchain/cros_toolchain.gni")
+
+# This is the normal toolchain for most targets.
+gcc_toolchain("target") {
+ ar = cros_target_ar
+ cc = cros_target_cc
+ cxx = cros_target_cxx
+
+ # Relativize path if compiler is specified such that not to lookup from $PATH
+ # and cc/cxx does not contain additional flags.
+ if (cc != get_path_info(cc, "file") && string_replace(cc, " ", "") == cc) {
+ cc = rebase_path(cc, root_build_dir)
+ }
+ if (cxx != get_path_info(cxx, "file") && string_replace(cxx, " ", "") == cxx) {
+ cxx = rebase_path(cxx, root_build_dir)
+ }
+
+ ld = cxx
+ if (cros_target_ld != "") {
+ ld = cros_target_ld
+ }
+ if (cros_target_nm != "") {
+ nm = cros_target_nm
+ }
+ if (cros_target_readelf != "") {
+ readelf = cros_target_readelf
+ }
+ extra_cflags = cros_target_extra_cflags
+ extra_cppflags = cros_target_extra_cppflags
+ extra_cxxflags = cros_target_extra_cxxflags
+ extra_ldflags = cros_target_extra_ldflags
+
+ toolchain_args = {
+ cc_wrapper = ""
+ current_cpu = target_cpu
+ current_os = "chromeos"
+ is_clang = is_clang
+ use_debug_fission = use_debug_fission
+ use_gold = use_gold
+ use_sysroot = use_sysroot
+ }
+}
+
+# This is a special toolchain needed just for the nacl_bootstrap target in
+# //native_client/src/trusted/service_runtime/linux. It is identical
+# to ":target" except that it forces use_debug_fission, use_gold, and
+# use_sysroot off, and allows the user to set different sets of extra flags.
+gcc_toolchain("nacl_bootstrap") {
+ ar = cros_target_ar
+ cc = cros_target_cc
+ cxx = cros_target_cxx
+
+ # Relativize path if compiler is specified such that not to lookup from $PATH
+ # and cc/cxx does not contain additional flags.
+ if (cc != get_path_info(cc, "file") && string_replace(cc, " ", "") == cc) {
+ cc = rebase_path(cc, root_build_dir)
+ }
+ if (cxx != get_path_info(cxx, "file") && string_replace(cxx, " ", "") == cxx) {
+ cxx = rebase_path(cxx, root_build_dir)
+ }
+ ld = cxx
+ if (cros_target_ld != "") {
+ ld = cros_target_ld
+ }
+ if (cros_target_nm != "") {
+ nm = cros_target_nm
+ }
+ if (cros_target_readelf != "") {
+ readelf = cros_target_readelf
+ }
+ extra_cflags = cros_nacl_bootstrap_extra_cflags
+ extra_cppflags = cros_nacl_bootstrap_extra_cppflags
+ extra_cxxflags = cros_nacl_bootstrap_extra_cxxflags
+ extra_ldflags = cros_nacl_bootstrap_extra_ldflags
+
+ toolchain_args = {
+ cc_wrapper = ""
+ current_cpu = target_cpu
+ current_os = "chromeos"
+ is_clang = is_clang
+ use_debug_fission = false
+ use_gold = false
+ use_sysroot = false
+ }
+}
+
+gcc_toolchain("host") {
+ # These are args for the template.
+ ar = cros_host_ar
+ cc = cros_host_cc
+ cxx = cros_host_cxx
+
+ # Relativize path if compiler is specified such that not to lookup from $PATH
+ # and cc/cxx does not contain additional flags.
+ if (cc != get_path_info(cc, "file") && string_replace(cc, " ", "") == cc) {
+ cc = rebase_path(cc, root_build_dir)
+ }
+ if (cxx != get_path_info(cxx, "file") && string_replace(cxx, " ", "") == cxx) {
+ cxx = rebase_path(cxx, root_build_dir)
+ }
+ ld = cxx
+ if (cros_host_ld != "") {
+ ld = cros_host_ld
+ }
+ if (cros_host_nm != "") {
+ nm = cros_host_nm
+ }
+ if (cros_host_readelf != "") {
+ readelf = cros_host_readelf
+ }
+ extra_cflags = cros_host_extra_cflags
+ extra_cppflags = cros_host_extra_cppflags
+ extra_cxxflags = cros_host_extra_cxxflags
+ extra_ldflags = cros_host_extra_ldflags
+
+ toolchain_args = {
+ cc_wrapper = ""
+ is_clang = cros_host_is_clang
+ current_cpu = host_cpu
+ current_os = "linux"
+ use_sysroot = false
+ }
+}
+
+gcc_toolchain("v8_snapshot") {
+ # These are args for the template.
+ ar = cros_v8_snapshot_ar
+ cc = cros_v8_snapshot_cc
+ cxx = cros_v8_snapshot_cxx
+
+ # Relativize path if compiler is specified such that not to lookup from $PATH
+ # and cc/cxx does not contain additional flags.
+ if (cc != get_path_info(cc, "file") && string_replace(cc, " ", "") == cc) {
+ cc = rebase_path(cc, root_build_dir)
+ }
+ if (cxx != get_path_info(cxx, "file") && string_replace(cxx, " ", "") == cxx) {
+ cxx = rebase_path(cxx, root_build_dir)
+ }
+ ld = cxx
+ if (cros_v8_snapshot_ld != "") {
+ ld = cros_v8_snapshot_ld
+ }
+ if (cros_v8_snapshot_nm != "") {
+ nm = cros_v8_snapshot_nm
+ }
+ if (cros_v8_snapshot_readelf != "") {
+ readelf = cros_v8_snapshot_readelf
+ }
+ extra_cflags = cros_v8_snapshot_extra_cflags
+ extra_cppflags = cros_v8_snapshot_extra_cppflags
+ extra_cxxflags = cros_v8_snapshot_extra_cxxflags
+ extra_ldflags = cros_v8_snapshot_extra_ldflags
+
+ toolchain_args = {
+ cc_wrapper = ""
+ is_clang = cros_v8_snapshot_is_clang
+ if (target_cpu == "x86" || target_cpu == "arm" || target_cpu == "mipsel") {
+ current_cpu = "x86"
+ } else {
+ current_cpu = "x64"
+ }
+ v8_current_cpu = v8_target_cpu
+ current_os = "linux"
+ use_sysroot = false
+ }
+}
diff --git a/deps/v8/build/toolchain/cros_toolchain.gni b/deps/v8/build/toolchain/cros_toolchain.gni
new file mode 100644
index 0000000000..fdfdb0704e
--- /dev/null
+++ b/deps/v8/build/toolchain/cros_toolchain.gni
@@ -0,0 +1,81 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# CrOS builds must cross-compile on a Linux host for the actual CrOS
+# device target. There are many different CrOS devices so the build
+# system provides configuration variables that permit a CrOS build to
+# control the cross-compilation tool chain. However, requiring such
+# fine-grain specification is tedious for build-bots and developers.
+# Consequently, the CrOS build system defaults to a convenience
+# compilation mode where the compilation host is also the build target.
+#
+# Chrome can be compiled in this way with the gn variable:
+#
+# target_os = "chromeos"
+#
+# To perform a board-specific build, first obtain the correct system
+# root (http://goo.gl/aFB4XH) for the board. Then configure GN to use it
+# by setting appropriate cross-compilation variables.
+#
+# For example, to compile a Chrome source tree in /g/src for an
+# auron_paine CrOS device with the system root cached in /g/.cros_cache,
+# the following GN arguments must be provided to configure
+# cross-compilation with Goma acceleration. (NB: additional variables
+# will be necessary to successfully compile a working CrOS Chrome. See
+# the definition of GYP_DEFINES inside a sysroot shell.)
+#
+# goma_dir = "/g/.cros_cache/common/goma+2"
+# target_sysroot= /g/.cros_cache/chrome-sdk/tarballs/auron_paine+7644.0.0+sysroot_chromeos-base_chromeos-chrome.tar.xz"
+# cros_target_cc = "x86_64-cros-linux-gnu-gcc -B/g/.cros_cache/chrome-sdk/tarballs/auron_paine+7657.0.0+target_toolchain/usr/x86_64-pc-linux-gnu/x86_64-cros-linux-gnu/binutils-bin/2.25.51-gold"
+# cros_target_cxx = "x86_64-cros-linux-gnu-g++ -B/g/.cros_cache/chrome-sdk/tarballs/auron_paine+7657.0.0+target_toolchain/usr/x86_64-pc-linux-gnu/x86_64-cros-linux-gnu/binutils-bin/2.25.51-gold"
+# cros_target_ar = "x86_64-cros-linux-gnu-gcc-ar"
+# target_cpu = "x64"
+
+declare_args() {
+ # These must be specified for a board-specific build.
+ cros_target_ar = "ar"
+ cros_target_cc = "gcc"
+ cros_target_cxx = "g++"
+ cros_target_ld = ""
+ cros_target_nm = ""
+ cros_target_readelf = ""
+
+ # These can be optionally set. The "_cppflags" will be applied to *both*
+ # C and C++ files; use "_cxxflags" for C++-only flags.
+ cros_target_extra_cflags = ""
+ cros_target_extra_cppflags = ""
+ cros_target_extra_cxxflags = ""
+ cros_target_extra_ldflags = ""
+
+ # is_clang is used instead of cros_target_is_clang
+
+ cros_host_ar = "ar"
+ cros_host_cc = "gcc"
+ cros_host_cxx = "g++"
+ cros_host_ld = ""
+ cros_host_nm = ""
+ cros_host_readelf = ""
+ cros_host_extra_cflags = ""
+ cros_host_extra_cppflags = ""
+ cros_host_extra_cxxflags = ""
+ cros_host_extra_ldflags = ""
+ cros_host_is_clang = false
+
+ cros_v8_snapshot_ar = "ar"
+ cros_v8_snapshot_cc = "gcc"
+ cros_v8_snapshot_cxx = "g++"
+ cros_v8_snapshot_ld = ""
+ cros_v8_snapshot_nm = ""
+ cros_v8_snapshot_readelf = ""
+ cros_v8_snapshot_extra_cflags = ""
+ cros_v8_snapshot_extra_cppflags = ""
+ cros_v8_snapshot_extra_cxxflags = ""
+ cros_v8_snapshot_extra_ldflags = ""
+ cros_v8_snapshot_is_clang = false
+
+ cros_nacl_bootstrap_extra_cflags = ""
+ cros_nacl_bootstrap_extra_cppflags = ""
+ cros_nacl_bootstrap_extra_cxxflags = ""
+ cros_nacl_bootstrap_extra_ldflags = ""
+}
diff --git a/deps/v8/build/toolchain/fuchsia/BUILD.gn b/deps/v8/build/toolchain/fuchsia/BUILD.gn
new file mode 100644
index 0000000000..06ac5e5b74
--- /dev/null
+++ b/deps/v8/build/toolchain/fuchsia/BUILD.gn
@@ -0,0 +1,41 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/fuchsia/config.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+
+# Fuchsia builds using the Clang toolchain, with most parameters common across
+# the different target architectures.
+template("fuchsia_clang_toolchain") {
+ clang_toolchain(target_name) {
+ assert(host_os == "linux" || host_os == "mac")
+ assert(defined(invoker.toolchain_args),
+ "toolchain_args must be defined for fuchsia_clang_toolchain()")
+
+ # We want to build and strip binaries, but retain the unstripped binaries
+ # in runtime_deps to make them available for isolates.
+ if (host_os == "linux") {
+ strip = rebase_path("//buildtools/third_party/eu-strip/bin/eu-strip",
+ root_build_dir)
+ use_unstripped_as_runtime_outputs = true
+ }
+
+ default_shlib_subdir = "/lib"
+
+ toolchain_args = invoker.toolchain_args
+ toolchain_args.current_os = "fuchsia"
+ }
+}
+
+fuchsia_clang_toolchain("x64") {
+ toolchain_args = {
+ current_cpu = "x64"
+ }
+}
+
+fuchsia_clang_toolchain("arm64") {
+ toolchain_args = {
+ current_cpu = "arm64"
+ }
+}
diff --git a/deps/v8/build/toolchain/fuchsia/OWNERS b/deps/v8/build/toolchain/fuchsia/OWNERS
new file mode 100644
index 0000000000..3f809e82b1
--- /dev/null
+++ b/deps/v8/build/toolchain/fuchsia/OWNERS
@@ -0,0 +1 @@
+scottmg@chromium.org
diff --git a/deps/v8/build/toolchain/gcc_link_wrapper.py b/deps/v8/build/toolchain/gcc_link_wrapper.py
new file mode 100755
index 0000000000..8892f14bfe
--- /dev/null
+++ b/deps/v8/build/toolchain/gcc_link_wrapper.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs a linking command and optionally a strip command.
+
+This script exists to avoid using complex shell commands in
+gcc_toolchain.gni's tool("link"), in case the host running the compiler
+does not have a POSIX-like shell (e.g. Windows).
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+
+import wrapper_utils
+
+
+# When running on a Windows host and using a toolchain whose tools are
+# actually wrapper scripts (i.e. .bat files on Windows) rather than binary
+# executables, the "command" to run has to be prefixed with this magic.
+# The GN toolchain definitions take care of that for when GN/Ninja is
+# running the tool directly. When that command is passed in to this
+# script, it appears as a unitary string but needs to be split up so that
+# just 'cmd' is the actual command given to Python's subprocess module.
+BAT_PREFIX = 'cmd /c call '
+
+def CommandToRun(command):
+ if command[0].startswith(BAT_PREFIX):
+ command = command[0].split(None, 3) + command[1:]
+ return command
+
+
+def main():
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument('--strip',
+ help='The strip binary to run',
+ metavar='PATH')
+ parser.add_argument('--unstripped-file',
+ help='Executable file produced by linking command',
+ metavar='FILE')
+ parser.add_argument('--map-file',
+ help=('Use --Wl,-Map to generate a map file. Will be '
+ 'gzipped if extension ends with .gz'),
+ metavar='FILE')
+ parser.add_argument('--output',
+ required=True,
+ help='Final output executable file',
+ metavar='FILE')
+ parser.add_argument('command', nargs='+',
+ help='Linking command')
+ args = parser.parse_args()
+
+ # Work-around for gold being slow-by-default. http://crbug.com/632230
+ fast_env = dict(os.environ)
+ fast_env['LC_ALL'] = 'C'
+ result = wrapper_utils.RunLinkWithOptionalMapFile(args.command, env=fast_env,
+ map_file=args.map_file)
+ if result != 0:
+ return result
+
+ # Finally, strip the linked executable (if desired).
+ if args.strip:
+ result = subprocess.call(CommandToRun([
+ args.strip, '-o', args.output, args.unstripped_file
+ ]))
+
+ return result
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/deps/v8/build/toolchain/gcc_solink_wrapper.py b/deps/v8/build/toolchain/gcc_solink_wrapper.py
new file mode 100755
index 0000000000..cb1c02d24e
--- /dev/null
+++ b/deps/v8/build/toolchain/gcc_solink_wrapper.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs 'ld -shared' and generates a .TOC file that's untouched when unchanged.
+
+This script exists to avoid using complex shell commands in
+gcc_toolchain.gni's tool("solink"), in case the host running the compiler
+does not have a POSIX-like shell (e.g. Windows).
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+
+import wrapper_utils
+
+
+def CollectSONAME(args):
+ """Replaces: readelf -d $sofile | grep SONAME"""
+ toc = ''
+ readelf = subprocess.Popen(wrapper_utils.CommandToRun(
+ [args.readelf, '-d', args.sofile]), stdout=subprocess.PIPE, bufsize=-1)
+ for line in readelf.stdout:
+ if 'SONAME' in line:
+ toc += line
+ return readelf.wait(), toc
+
+
+def CollectDynSym(args):
+ """Replaces: nm --format=posix -g -D $sofile | cut -f1-2 -d' '"""
+ toc = ''
+ nm = subprocess.Popen(wrapper_utils.CommandToRun([
+ args.nm, '--format=posix', '-g', '-D', args.sofile]),
+ stdout=subprocess.PIPE, bufsize=-1)
+ for line in nm.stdout:
+ toc += ' '.join(line.split(' ', 2)[:2]) + '\n'
+ return nm.wait(), toc
+
+
+def CollectTOC(args):
+ result, toc = CollectSONAME(args)
+ if result == 0:
+ result, dynsym = CollectDynSym(args)
+ toc += dynsym
+ return result, toc
+
+
+def UpdateTOC(tocfile, toc):
+ if os.path.exists(tocfile):
+ old_toc = open(tocfile, 'r').read()
+ else:
+ old_toc = None
+ if toc != old_toc:
+ open(tocfile, 'w').write(toc)
+
+
+def main():
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument('--readelf',
+ required=True,
+ help='The readelf binary to run',
+ metavar='PATH')
+ parser.add_argument('--nm',
+ required=True,
+ help='The nm binary to run',
+ metavar='PATH')
+ parser.add_argument('--strip',
+ help='The strip binary to run',
+ metavar='PATH')
+ parser.add_argument('--sofile',
+ required=True,
+ help='Shared object file produced by linking command',
+ metavar='FILE')
+ parser.add_argument('--tocfile',
+ required=True,
+ help='Output table-of-contents file',
+ metavar='FILE')
+ parser.add_argument('--map-file',
+ help=('Use --Wl,-Map to generate a map file. Will be '
+ 'gzipped if extension ends with .gz'),
+ metavar='FILE')
+ parser.add_argument('--output',
+ required=True,
+ help='Final output shared object file',
+ metavar='FILE')
+ parser.add_argument('command', nargs='+',
+ help='Linking command')
+ args = parser.parse_args()
+
+ # Work-around for gold being slow-by-default. http://crbug.com/632230
+ fast_env = dict(os.environ)
+ fast_env['LC_ALL'] = 'C'
+
+ # First, run the actual link.
+ command = wrapper_utils.CommandToRun(args.command)
+ result = wrapper_utils.RunLinkWithOptionalMapFile(command, env=fast_env,
+ map_file=args.map_file)
+
+ if result != 0:
+ return result
+
+ # Next, generate the contents of the TOC file.
+ result, toc = CollectTOC(args)
+ if result != 0:
+ return result
+
+ # If there is an existing TOC file with identical contents, leave it alone.
+ # Otherwise, write out the TOC file.
+ UpdateTOC(args.tocfile, toc)
+
+ # Finally, strip the linked shared object file (if desired).
+ if args.strip:
+ result = subprocess.call(wrapper_utils.CommandToRun(
+ [args.strip, '-o', args.output, args.sofile]))
+
+ return result
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/deps/v8/build/toolchain/gcc_toolchain.gni b/deps/v8/build/toolchain/gcc_toolchain.gni
new file mode 100644
index 0000000000..80e2a362a5
--- /dev/null
+++ b/deps/v8/build/toolchain/gcc_toolchain.gni
@@ -0,0 +1,643 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/coverage/coverage.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/v8_target_cpu.gni")
+import("//build/toolchain/cc_wrapper.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+
+if (is_nacl) {
+ # To keep NaCl variables out of builds that don't include NaCl, all
+ # variables defined in nacl/config.gni referenced here should be protected by
+ # is_nacl conditions.
+ import("//build/config/nacl/config.gni")
+}
+
+declare_args() {
+ # Enables whitelist generation for IDR_ grit defines seen by the compiler.
+ # Currently works only on some platforms and enabled by default for release
+ # builds.
+ # Requires debug info, so disabled for symbol_level=0 & strip_debug_info=true.
+ enable_resource_whitelist_generation =
+ is_official_build &&
+ # Don't enable for Android-on-Chrome OS so that they can build with
+ # symbol_level=0 without this failing (crbug.com/891164).
+ (target_os == "android" || target_os == "win")
+}
+
+# When the arg is set via args.gn, it applies to all toolchains. In order to not
+# hit the assert in grit_rule.gni, explicitly disable for host toolchains.
+if (is_linux && target_os == "android") {
+ enable_resource_whitelist_generation = false
+}
+
+# This template defines a toolchain for something that works like gcc
+# (including clang).
+#
+# It requires the following variables specifying the executables to run:
+# - ar
+# - cc
+# - cxx
+# - ld
+#
+# Optional parameters that control the tools:
+#
+# - extra_cflags
+# Extra flags to be appended when compiling C files (but not C++ files).
+# - extra_cppflags
+# Extra flags to be appended when compiling both C and C++ files. "CPP"
+# stands for "C PreProcessor" in this context, although it can be
+# used for non-preprocessor flags as well. Not to be confused with
+# "CXX" (which follows).
+# - extra_cxxflags
+# Extra flags to be appended when compiling C++ files (but not C files).
+# - extra_asmflags
+# Extra flags to be appended when compiling assembly.
+# - extra_ldflags
+# Extra flags to be appended when linking
+#
+# - libs_section_prefix
+# - libs_section_postfix
+# The contents of these strings, if specified, will be placed around
+# the libs section of the linker line. It allows one to inject libraries
+# at the beginning and end for all targets in a toolchain.
+# - solink_libs_section_prefix
+# - solink_libs_section_postfix
+# Same as libs_section_{pre,post}fix except used for solink instead of link.
+# - link_outputs
+# The content of this array, if specified, will be added to the list of
+# outputs from the link command. This can be useful in conjunction with
+# the post_link parameter.
+# - use_unstripped_as_runtime_outputs
+# When |strip| is set, mark unstripped executables as runtime deps rather
+# than stripped ones.
+# - post_link
+# The content of this string, if specified, will be run as a separate
+# command following the the link command.
+# - deps
+# Just forwarded to the toolchain definition.
+# - executable_extension
+# If this string is specified it will be used for the file extension
+# for an executable, rather than using no extension; targets will
+# still be able to override the extension using the output_extension
+# variable.
+# - rebuild_define
+# The contents of this string, if specified, will be passed as a #define
+# to the toolchain. It can be used to force recompiles whenever a
+# toolchain is updated.
+# - shlib_extension
+# If this string is specified it will be used for the file extension
+# for a shared library, rather than default value specified in
+# toolchain.gni
+# - strip
+# Location of the strip executable. When specified, strip will be run on
+# all shared libraries and executables as they are built. The pre-stripped
+# artifacts will be put in lib.unstripped/ and exe.unstripped/.
+template("gcc_toolchain") {
+ toolchain(target_name) {
+ assert(defined(invoker.ar), "gcc_toolchain() must specify a \"ar\" value")
+ assert(defined(invoker.cc), "gcc_toolchain() must specify a \"cc\" value")
+ assert(defined(invoker.cxx), "gcc_toolchain() must specify a \"cxx\" value")
+ assert(defined(invoker.ld), "gcc_toolchain() must specify a \"ld\" value")
+
+ # This define changes when the toolchain changes, forcing a rebuild.
+ # Nothing should ever use this define.
+ if (defined(invoker.rebuild_define)) {
+ rebuild_string = "-D" + invoker.rebuild_define + " "
+ } else {
+ rebuild_string = ""
+ }
+
+ # GN's syntax can't handle more than one scope dereference at once, like
+ # "invoker.toolchain_args.foo", so make a temporary to hold the toolchain
+ # args so we can do "invoker_toolchain_args.foo".
+ assert(defined(invoker.toolchain_args),
+ "Toolchains must specify toolchain_args")
+ invoker_toolchain_args = invoker.toolchain_args
+ assert(defined(invoker_toolchain_args.current_cpu),
+ "toolchain_args must specify a current_cpu")
+ assert(defined(invoker_toolchain_args.current_os),
+ "toolchain_args must specify a current_os")
+
+ # When invoking this toolchain not as the default one, these args will be
+ # passed to the build. They are ignored when this is the default toolchain.
+ toolchain_args = {
+ # Populate toolchain args from the invoker.
+ forward_variables_from(invoker_toolchain_args, "*")
+
+ # The host toolchain value computed by the default toolchain's setup
+ # needs to be passed through unchanged to all secondary toolchains to
+ # ensure that it's always the same, regardless of the values that may be
+ # set on those toolchains.
+ host_toolchain = host_toolchain
+
+ if (!defined(invoker_toolchain_args.v8_current_cpu)) {
+ v8_current_cpu = invoker_toolchain_args.current_cpu
+ }
+ }
+
+ # When the invoker has explicitly overridden use_goma or cc_wrapper in the
+ # toolchain args, use those values, otherwise default to the global one.
+ # This works because the only reasonable override that toolchains might
+ # supply for these values are to force-disable them.
+ if (defined(toolchain_args.use_goma)) {
+ toolchain_uses_goma = toolchain_args.use_goma
+ } else {
+ toolchain_uses_goma = use_goma
+ }
+ if (defined(toolchain_args.cc_wrapper)) {
+ toolchain_cc_wrapper = toolchain_args.cc_wrapper
+ } else {
+ toolchain_cc_wrapper = cc_wrapper
+ }
+ assert(!(toolchain_cc_wrapper != "" && toolchain_uses_goma),
+ "Goma and cc_wrapper can't be used together.")
+
+ # When the invoker has explicitly overridden use_goma or cc_wrapper in the
+ # toolchain args, use those values, otherwise default to the global one.
+ # This works because the only reasonable override that toolchains might
+ # supply for these values are to force-disable them.
+ # But if has_gomacc_path is set in simple chrome build, we assumes that
+ # *chromeos* compiler wrapper find gomacc from GOMACC_PATH envvar.
+ # Note: In this case, we use gomacc for host toolchain compiling.
+ if (toolchain_uses_goma &&
+ (!has_gomacc_path || invoker_toolchain_args.current_os != "chromeos")) {
+ goma_path = "$goma_dir/gomacc"
+ compiler_prefix = "${goma_path} "
+ } else {
+ compiler_prefix = "${toolchain_cc_wrapper} "
+ }
+
+ # Create a distinct variable for "asm", since coverage runs pass a bunch of
+ # flags to clang/clang++ that are nonsensical on assembler runs.
+ asm_prefix = compiler_prefix
+
+ # A specific toolchain may wish to avoid coverage instrumentation, so we
+ # allow the global "use_clang_coverage" arg to be overridden.
+ if (defined(toolchain_args.use_clang_coverage)) {
+ toolchain_use_clang_coverage = toolchain_args.use_clang_coverage
+ } else {
+ toolchain_use_clang_coverage = use_clang_coverage
+ }
+
+ # For a coverage build, we use the wrapper script globally so that it can
+ # remove coverage cflags from files that should not have them.
+ if (toolchain_use_clang_coverage) {
+ # "coverage_instrumentation_input_file" is set in args.gn, but it can be
+ # overridden by a toolchain config.
+ if (defined(toolchain_args.coverage_instrumentation_input_file)) {
+ toolchain_coverage_instrumentation_input_file =
+ toolchain_args.coverage_instrumentation_input_file
+ } else {
+ toolchain_coverage_instrumentation_input_file =
+ coverage_instrumentation_input_file
+ }
+
+ _coverage_wrapper =
+ rebase_path("//build/toolchain/clang_code_coverage_wrapper.py",
+ root_build_dir)
+
+ # The wrapper needs to know what OS we target because it uses that to
+ # select a list of files that should not be instrumented.
+ _coverage_wrapper = _coverage_wrapper + " --target-os=" + target_os
+
+ # We want to instrument everything if there is no input file set.
+ # If there is a file we need to give it to the wrapper script so it can
+ # instrument only those files.
+ if (toolchain_coverage_instrumentation_input_file != "") {
+ _coverage_wrapper =
+ _coverage_wrapper + " --files-to-instrument=" +
+ rebase_path(toolchain_coverage_instrumentation_input_file,
+ root_build_dir)
+ }
+ compiler_prefix = "${_coverage_wrapper} " + compiler_prefix
+ }
+
+ cc = compiler_prefix + invoker.cc
+ cxx = compiler_prefix + invoker.cxx
+ asm = asm_prefix + invoker.cc
+ ar = invoker.ar
+ ld = invoker.ld
+ if (defined(invoker.readelf)) {
+ readelf = invoker.readelf
+ } else {
+ readelf = "readelf"
+ }
+ if (defined(invoker.nm)) {
+ nm = invoker.nm
+ } else {
+ nm = "nm"
+ }
+
+ if (defined(invoker.shlib_extension)) {
+ default_shlib_extension = invoker.shlib_extension
+ } else {
+ default_shlib_extension = shlib_extension
+ }
+
+ if (defined(invoker.default_shlib_subdir)) {
+ default_shlib_subdir = invoker.default_shlib_subdir
+ } else {
+ default_shlib_subdir = ""
+ }
+
+ if (defined(invoker.executable_extension)) {
+ default_executable_extension = invoker.executable_extension
+ } else {
+ default_executable_extension = ""
+ }
+
+ # Bring these into our scope for string interpolation with default values.
+ if (defined(invoker.libs_section_prefix)) {
+ libs_section_prefix = invoker.libs_section_prefix
+ } else {
+ libs_section_prefix = ""
+ }
+
+ if (defined(invoker.libs_section_postfix)) {
+ libs_section_postfix = invoker.libs_section_postfix
+ } else {
+ libs_section_postfix = ""
+ }
+
+ if (defined(invoker.solink_libs_section_prefix)) {
+ solink_libs_section_prefix = invoker.solink_libs_section_prefix
+ } else {
+ solink_libs_section_prefix = ""
+ }
+
+ if (defined(invoker.solink_libs_section_postfix)) {
+ solink_libs_section_postfix = invoker.solink_libs_section_postfix
+ } else {
+ solink_libs_section_postfix = ""
+ }
+
+ if (defined(invoker.extra_cflags) && invoker.extra_cflags != "") {
+ extra_cflags = " " + invoker.extra_cflags
+ } else {
+ extra_cflags = ""
+ }
+
+ if (defined(invoker.extra_cppflags) && invoker.extra_cppflags != "") {
+ extra_cppflags = " " + invoker.extra_cppflags
+ } else {
+ extra_cppflags = ""
+ }
+
+ if (defined(invoker.extra_cxxflags) && invoker.extra_cxxflags != "") {
+ extra_cxxflags = " " + invoker.extra_cxxflags
+ } else {
+ extra_cxxflags = ""
+ }
+
+ if (defined(invoker.extra_asmflags) && invoker.extra_asmflags != "") {
+ extra_asmflags = " " + invoker.extra_asmflags
+ } else {
+ extra_asmflags = ""
+ }
+
+ if (defined(invoker.extra_ldflags) && invoker.extra_ldflags != "") {
+ extra_ldflags = " " + invoker.extra_ldflags
+ } else {
+ extra_ldflags = ""
+ }
+
+ enable_linker_map = defined(invoker.enable_linker_map) &&
+ invoker.enable_linker_map && generate_linker_map
+
+ # These library switches can apply to all tools below.
+ lib_switch = "-l"
+ lib_dir_switch = "-L"
+
+ # Object files go in this directory.
+ object_subdir = "{{target_out_dir}}/{{label_name}}"
+
+ tool("cc") {
+ depfile = "{{output}}.d"
+ precompiled_header_type = "gcc"
+ command = "$cc -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}${extra_cppflags}${extra_cflags} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "CC {{output}}"
+ outputs = [
+ "$object_subdir/{{source_name_part}}.o",
+ ]
+ }
+
+ tool("cxx") {
+ depfile = "{{output}}.d"
+ precompiled_header_type = "gcc"
+ command = "$cxx -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}${extra_cppflags}${extra_cxxflags} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "CXX {{output}}"
+ outputs = [
+ "$object_subdir/{{source_name_part}}.o",
+ ]
+ }
+
+ tool("asm") {
+ # For GCC we can just use the C compiler to compile assembly.
+ depfile = "{{output}}.d"
+ command = "$asm -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{asmflags}}${extra_asmflags} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "ASM {{output}}"
+ outputs = [
+ "$object_subdir/{{source_name_part}}.o",
+ ]
+ }
+
+ tool("alink") {
+ if (current_os == "aix") {
+ # AIX does not support either -D (deterministic output) or response
+ # files.
+ command = "$ar -X64 {{arflags}} -r -c -s {{output}} {{inputs}}"
+ } else {
+ rspfile = "{{output}}.rsp"
+ rspfile_content = "{{inputs}}"
+ command = "\"$ar\" {{arflags}} -r -c -s -D {{output}} @\"$rspfile\""
+ }
+
+ # Remove the output file first so that ar doesn't try to modify the
+ # existing file.
+ if (host_os == "win") {
+ tool_wrapper_path =
+ rebase_path("//build/toolchain/win/tool_wrapper.py", root_build_dir)
+ command = "cmd /c $python_path $tool_wrapper_path delete-file {{output}} && $command"
+ } else {
+ command = "rm -f {{output}} && $command"
+ }
+
+ # Almost all targets build with //build/config/compiler:thin_archive which
+ # adds -T to arflags.
+ description = "AR {{output}}"
+ outputs = [
+ "{{output_dir}}/{{target_output_name}}{{output_extension}}",
+ ]
+
+ # Shared libraries go in the target out directory by default so we can
+ # generate different targets with the same name and not have them collide.
+ default_output_dir = "{{target_out_dir}}"
+ default_output_extension = ".a"
+ output_prefix = "lib"
+ }
+
+ tool("solink") {
+ soname = "{{target_output_name}}{{output_extension}}" # e.g. "libfoo.so".
+ sofile = "{{output_dir}}/$soname" # Possibly including toolchain dir.
+ rspfile = sofile + ".rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ if (defined(invoker.strip)) {
+ unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$soname"
+ } else {
+ unstripped_sofile = sofile
+ }
+
+ # These variables are not built into GN but are helpers that
+ # implement (1) linking to produce a .so, (2) extracting the symbols
+ # from that file (3) if the extracted list differs from the existing
+ # .TOC file, overwrite it, otherwise, don't change it.
+ tocfile = sofile + ".TOC"
+
+ link_command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" -Wl,-soname=\"$soname\" @\"$rspfile\""
+
+ # Generate a map file to be used for binary size analysis.
+ # Map file adds ~10% to the link time on a z620.
+ # With target_os="android", libchrome.so.map.gz is ~20MB.
+ map_switch = ""
+ if (enable_linker_map && is_official_build) {
+ map_file = "$unstripped_sofile.map.gz"
+ map_switch = " --map-file \"$map_file\""
+ }
+
+ assert(defined(readelf), "to solink you must have a readelf")
+ assert(defined(nm), "to solink you must have an nm")
+ strip_switch = ""
+ if (defined(invoker.strip)) {
+ strip_switch = "--strip=${invoker.strip} "
+ }
+
+ # This needs a Python script to avoid using a complex shell command
+ # requiring sh control structures, pipelines, and POSIX utilities.
+ # The host might not have a POSIX shell and utilities (e.g. Windows).
+ solink_wrapper =
+ rebase_path("//build/toolchain/gcc_solink_wrapper.py", root_build_dir)
+ command = "$python_path \"$solink_wrapper\" --readelf=\"$readelf\" --nm=\"$nm\" $strip_switch--sofile=\"$unstripped_sofile\" --tocfile=\"$tocfile\"$map_switch --output=\"$sofile\" -- $link_command"
+
+ if (target_cpu == "mipsel" && is_component_build && is_android) {
+ rspfile_content = "-Wl,--start-group -Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix -Wl,--end-group"
+ } else {
+ rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
+ }
+
+ description = "SOLINK $sofile"
+
+ # Use this for {{output_extension}} expansions unless a target manually
+ # overrides it (in which case {{output_extension}} will be what the target
+ # specifies).
+ default_output_extension = default_shlib_extension
+
+ default_output_dir = "{{root_out_dir}}${default_shlib_subdir}"
+
+ output_prefix = "lib"
+
+ # Since the above commands only updates the .TOC file when it changes, ask
+ # Ninja to check if the timestamp actually changed to know if downstream
+ # dependencies should be recompiled.
+ restat = true
+
+ # Tell GN about the output files. It will link to the sofile but use the
+ # tocfile for dependency management.
+ outputs = [
+ sofile,
+ tocfile,
+ ]
+ if (sofile != unstripped_sofile) {
+ outputs += [ unstripped_sofile ]
+ if (defined(invoker.use_unstripped_as_runtime_outputs) &&
+ invoker.use_unstripped_as_runtime_outputs) {
+ runtime_outputs = [ unstripped_sofile ]
+ }
+ }
+ if (defined(map_file)) {
+ outputs += [ map_file ]
+ }
+ link_output = sofile
+ depend_output = tocfile
+ }
+
+ tool("solink_module") {
+ soname = "{{target_output_name}}{{output_extension}}" # e.g. "libfoo.so".
+ sofile = "{{output_dir}}/$soname"
+ rspfile = sofile + ".rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ if (defined(invoker.strip)) {
+ unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$soname"
+ } else {
+ unstripped_sofile = sofile
+ }
+
+ command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" -Wl,-soname=\"$soname\" @\"$rspfile\""
+
+ if (defined(invoker.strip)) {
+ strip_command = "${invoker.strip} -o \"$sofile\" \"$unstripped_sofile\""
+ command += " && " + strip_command
+ }
+ rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
+
+ description = "SOLINK_MODULE $sofile"
+
+ # Use this for {{output_extension}} expansions unless a target manually
+ # overrides it (in which case {{output_extension}} will be what the target
+ # specifies).
+ if (defined(invoker.loadable_module_extension)) {
+ default_output_extension = invoker.loadable_module_extension
+ } else {
+ default_output_extension = default_shlib_extension
+ }
+
+ default_output_dir = "{{root_out_dir}}${default_shlib_subdir}"
+
+ output_prefix = "lib"
+
+ outputs = [
+ sofile,
+ ]
+ if (sofile != unstripped_sofile) {
+ outputs += [ unstripped_sofile ]
+ if (defined(invoker.use_unstripped_as_runtime_outputs) &&
+ invoker.use_unstripped_as_runtime_outputs) {
+ runtime_outputs = [ unstripped_sofile ]
+ }
+ }
+ }
+
+ tool("link") {
+ exename = "{{target_output_name}}{{output_extension}}"
+ outfile = "{{output_dir}}/$exename"
+ rspfile = "$outfile.rsp"
+ unstripped_outfile = outfile
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ # Use this for {{output_extension}} expansions unless a target manually
+ # overrides it (in which case {{output_extension}} will be what the target
+ # specifies).
+ default_output_extension = default_executable_extension
+
+ default_output_dir = "{{root_out_dir}}"
+
+ if (defined(invoker.strip)) {
+ unstripped_outfile = "{{root_out_dir}}/exe.unstripped/$exename"
+ }
+
+ # Generate a map file to be used for binary size analysis.
+ # Map file adds ~10% to the link time on a z620.
+ # With target_os="android", libchrome.so.map.gz is ~20MB.
+ map_switch = ""
+ if (enable_linker_map && is_official_build) {
+ map_file = "$unstripped_outfile.map.gz"
+ map_switch = " --map-file \"$map_file\""
+ }
+
+ start_group_flag = ""
+ end_group_flag = ""
+ if (current_os != "aix") {
+ # the "--start-group .. --end-group" feature isn't available on the aix ld.
+ start_group_flag = "-Wl,--start-group"
+ end_group_flag = "-Wl,--end-group "
+ }
+ link_command = "$ld {{ldflags}}${extra_ldflags} -o \"$unstripped_outfile\" $start_group_flag @\"$rspfile\" {{solibs}} $end_group_flag $libs_section_prefix {{libs}} $libs_section_postfix"
+
+ strip_switch = ""
+
+ if (defined(invoker.strip)) {
+ strip_switch = " --strip=\"${invoker.strip}\" --unstripped-file=\"$unstripped_outfile\""
+ }
+
+ link_wrapper =
+ rebase_path("//build/toolchain/gcc_link_wrapper.py", root_build_dir)
+ command = "$python_path \"$link_wrapper\" --output=\"$outfile\"$strip_switch$map_switch -- $link_command"
+ description = "LINK $outfile"
+ rspfile_content = "{{inputs}}"
+ outputs = [
+ outfile,
+ ]
+ if (outfile != unstripped_outfile) {
+ outputs += [ unstripped_outfile ]
+ if (defined(invoker.use_unstripped_as_runtime_outputs) &&
+ invoker.use_unstripped_as_runtime_outputs) {
+ runtime_outputs = [ unstripped_outfile ]
+ }
+ }
+ if (defined(invoker.link_outputs)) {
+ outputs += invoker.link_outputs
+ }
+ if (defined(map_file)) {
+ outputs += [ map_file ]
+ }
+ }
+
+ # These two are really entirely generic, but have to be repeated in
+ # each toolchain because GN doesn't allow a template to be used here.
+ # See //build/toolchain/toolchain.gni for details.
+ tool("stamp") {
+ command = stamp_command
+ description = stamp_description
+ }
+ tool("copy") {
+ command = copy_command
+ description = copy_description
+ }
+
+ tool("action") {
+ pool = "//build/toolchain:action_pool($default_toolchain)"
+ }
+
+ forward_variables_from(invoker, [ "deps" ])
+ }
+}
+
+# This is a shorthand for gcc_toolchain instances based on the Chromium-built
+# version of Clang. Only the toolchain_cpu and toolchain_os variables need to
+# be specified by the invoker, and optionally toolprefix if it's a
+# cross-compile case. Note that for a cross-compile case this toolchain
+# requires a config to pass the appropriate -target option, or else it will
+# actually just be doing a native compile. The invoker can optionally override
+# use_gold too.
+template("clang_toolchain") {
+ if (defined(invoker.toolprefix)) {
+ toolprefix = invoker.toolprefix
+ } else {
+ toolprefix = ""
+ }
+
+ gcc_toolchain(target_name) {
+ prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+ cc = "$prefix/clang"
+ cxx = "$prefix/clang++"
+ ld = cxx
+ readelf = "${toolprefix}readelf"
+ ar = "${prefix}/llvm-ar"
+ nm = "${toolprefix}nm"
+
+ forward_variables_from(invoker,
+ [
+ "strip",
+ "default_shlib_subdir",
+ "enable_linker_map",
+ "use_unstripped_as_runtime_outputs",
+ ])
+
+ toolchain_args = {
+ if (defined(invoker.toolchain_args)) {
+ forward_variables_from(invoker.toolchain_args, "*")
+ }
+ is_clang = true
+ }
+ }
+}
diff --git a/deps/v8/build/toolchain/get_concurrent_links.py b/deps/v8/build/toolchain/get_concurrent_links.py
new file mode 100644
index 0000000000..e5121c77a9
--- /dev/null
+++ b/deps/v8/build/toolchain/get_concurrent_links.py
@@ -0,0 +1,86 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script computs the number of concurrent links we want to run in the build
+# as a function of machine spec. It's based on GetDefaultConcurrentLinks in GYP.
+
+from __future__ import print_function
+
+import multiprocessing
+import optparse
+import os
+import re
+import subprocess
+import sys
+
+def _GetTotalMemoryInBytes():
+ if sys.platform in ('win32', 'cygwin'):
+ import ctypes
+
+ class MEMORYSTATUSEX(ctypes.Structure):
+ _fields_ = [
+ ("dwLength", ctypes.c_ulong),
+ ("dwMemoryLoad", ctypes.c_ulong),
+ ("ullTotalPhys", ctypes.c_ulonglong),
+ ("ullAvailPhys", ctypes.c_ulonglong),
+ ("ullTotalPageFile", ctypes.c_ulonglong),
+ ("ullAvailPageFile", ctypes.c_ulonglong),
+ ("ullTotalVirtual", ctypes.c_ulonglong),
+ ("ullAvailVirtual", ctypes.c_ulonglong),
+ ("sullAvailExtendedVirtual", ctypes.c_ulonglong),
+ ]
+
+ stat = MEMORYSTATUSEX(dwLength=ctypes.sizeof(MEMORYSTATUSEX))
+ ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
+ return stat.ullTotalPhys
+ elif sys.platform.startswith('linux'):
+ if os.path.exists("/proc/meminfo"):
+ with open("/proc/meminfo") as meminfo:
+ memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
+ for line in meminfo:
+ match = memtotal_re.match(line)
+ if not match:
+ continue
+ return float(match.group(1)) * 2**10
+ elif sys.platform == 'darwin':
+ try:
+ return int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
+ except Exception:
+ return 0
+ # TODO(scottmg): Implement this for other platforms.
+ return 0
+
+
+def _GetDefaultConcurrentLinks(mem_per_link_gb, reserve_mem_gb):
+ # Inherit the legacy environment variable for people that have set it in GYP.
+ pool_size = int(os.getenv('GYP_LINK_CONCURRENCY', 0))
+ if pool_size:
+ return pool_size
+
+ mem_total_bytes = _GetTotalMemoryInBytes()
+ mem_total_bytes = max(0, mem_total_bytes - reserve_mem_gb * 2**30)
+ num_concurrent_links = int(max(1, mem_total_bytes / mem_per_link_gb / 2**30))
+ hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
+
+ try:
+ cpu_cap = multiprocessing.cpu_count()
+ except:
+ cpu_cap = 1
+
+ return min(num_concurrent_links, hard_cap, cpu_cap)
+
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option('--mem_per_link_gb', action="store", type="int", default=8)
+ parser.add_option('--reserve_mem_gb', action="store", type="int", default=0)
+ parser.disable_interspersed_args()
+ options, _ = parser.parse_args()
+
+ print(_GetDefaultConcurrentLinks(options.mem_per_link_gb,
+ options.reserve_mem_gb))
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/toolchain/get_cpu_count.py b/deps/v8/build/toolchain/get_cpu_count.py
new file mode 100644
index 0000000000..765c7c78f6
--- /dev/null
+++ b/deps/v8/build/toolchain/get_cpu_count.py
@@ -0,0 +1,23 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script shows cpu count to specify capacity of action pool.
+
+from __future__ import print_function
+
+import multiprocessing
+import sys
+
+def main():
+ try:
+ cpu_count = multiprocessing.cpu_count()
+ except:
+ cpu_count = 1
+
+ print(cpu_count)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/toolchain/goma.gni b/deps/v8/build/toolchain/goma.gni
new file mode 100644
index 0000000000..2fbf572389
--- /dev/null
+++ b/deps/v8/build/toolchain/goma.gni
@@ -0,0 +1,29 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines the configuration of Goma.
+
+declare_args() {
+ # Set to true to enable distributed compilation using Goma.
+ use_goma = false
+
+ # This flag is for ChromeOS compiler wrapper.
+ # By passing gomacc path via GOMACC_PATH environment variable, ChromeOS'
+ # compiler wrapper invokes gomacc inside it.
+ has_gomacc_path = false
+
+ # Set the default value based on the platform.
+ if (host_os == "win") {
+ # Absolute directory containing the gomacc.exe binary.
+ goma_dir = "C:\src\goma\goma-win64"
+ } else {
+ if (getenv("GOMA_DIR") != "") {
+ # Absolute directory containing the gomacc binary.
+ goma_dir = getenv("GOMA_DIR")
+ } else {
+ # Absolute directory containing the gomacc binary.
+ goma_dir = getenv("HOME") + "/goma"
+ }
+ }
+}
diff --git a/deps/v8/build/toolchain/linux/BUILD.gn b/deps/v8/build/toolchain/linux/BUILD.gn
new file mode 100644
index 0000000000..fa8b17e9db
--- /dev/null
+++ b/deps/v8/build/toolchain/linux/BUILD.gn
@@ -0,0 +1,300 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+
+clang_toolchain("clang_ppc64") {
+ enable_linker_map = true
+ toolchain_args = {
+ current_cpu = "ppc64"
+ current_os = "linux"
+ }
+}
+
+clang_toolchain("clang_arm") {
+ toolprefix = "arm-linux-gnueabihf-"
+ toolchain_args = {
+ current_cpu = "arm"
+ current_os = "linux"
+ }
+}
+
+clang_toolchain("clang_arm64") {
+ toolprefix = "aarch64-linux-gnu-"
+ toolchain_args = {
+ current_cpu = "arm64"
+ current_os = "linux"
+ }
+}
+
+gcc_toolchain("arm64") {
+ toolprefix = "aarch64-linux-gnu-"
+
+ cc = "${toolprefix}gcc"
+ cxx = "${toolprefix}g++"
+
+ ar = "${toolprefix}ar"
+ ld = cxx
+ readelf = "${toolprefix}readelf"
+ nm = "${toolprefix}nm"
+
+ toolchain_args = {
+ current_cpu = "arm64"
+ current_os = "linux"
+ is_clang = false
+ }
+}
+
+gcc_toolchain("arm") {
+ toolprefix = "arm-linux-gnueabihf-"
+
+ cc = "${toolprefix}gcc"
+ cxx = "${toolprefix}g++"
+
+ ar = "${toolprefix}ar"
+ ld = cxx
+ readelf = "${toolprefix}readelf"
+ nm = "${toolprefix}nm"
+
+ toolchain_args = {
+ current_cpu = "arm"
+ current_os = "linux"
+ is_clang = false
+ }
+}
+
+clang_toolchain("clang_x86") {
+ # Output linker map files for binary size analysis.
+ enable_linker_map = true
+
+ toolchain_args = {
+ current_cpu = "x86"
+ current_os = "linux"
+ }
+}
+
+clang_toolchain("clang_x86_v8_arm") {
+ toolchain_args = {
+ current_cpu = "x86"
+ v8_current_cpu = "arm"
+ current_os = "linux"
+ }
+}
+
+clang_toolchain("clang_x86_v8_mipsel") {
+ toolchain_args = {
+ current_cpu = "x86"
+ v8_current_cpu = "mipsel"
+ current_os = "linux"
+ }
+}
+
+clang_toolchain("clang_x86_v8_mips") {
+ toolchain_args = {
+ current_cpu = "x86"
+ v8_current_cpu = "mips"
+ current_os = "linux"
+ }
+}
+
+gcc_toolchain("x86") {
+ cc = "gcc"
+ cxx = "g++"
+
+ readelf = "readelf"
+ nm = "nm"
+ ar = "ar"
+ ld = cxx
+
+ # Output linker map files for binary size analysis.
+ enable_linker_map = true
+
+ toolchain_args = {
+ current_cpu = "x86"
+ current_os = "linux"
+ is_clang = false
+ }
+}
+
+clang_toolchain("clang_x64") {
+ # Output linker map files for binary size analysis.
+ enable_linker_map = true
+
+ toolchain_args = {
+ current_cpu = "x64"
+ current_os = "linux"
+ }
+}
+
+clang_toolchain("clang_x64_v8_arm64") {
+ toolchain_args = {
+ current_cpu = "x64"
+ v8_current_cpu = "arm64"
+ current_os = "linux"
+ }
+}
+
+clang_toolchain("clang_x64_v8_mips64el") {
+ toolchain_args = {
+ current_cpu = "x64"
+ v8_current_cpu = "mips64el"
+ current_os = "linux"
+ }
+}
+
+clang_toolchain("clang_x64_v8_mips64") {
+ toolchain_args = {
+ current_cpu = "x64"
+ v8_current_cpu = "mips64"
+ current_os = "linux"
+ }
+}
+
+gcc_toolchain("x64") {
+ cc = "gcc"
+ cxx = "g++"
+
+ readelf = "readelf"
+ nm = "nm"
+ ar = "ar"
+ ld = cxx
+
+ # Output linker map files for binary size analysis.
+ enable_linker_map = true
+
+ toolchain_args = {
+ current_cpu = "x64"
+ current_os = "linux"
+ is_clang = false
+ }
+}
+
+clang_toolchain("clang_mipsel") {
+ toolchain_args = {
+ current_cpu = "mipsel"
+ current_os = "linux"
+ }
+}
+
+clang_toolchain("clang_mips64el") {
+ toolchain_args = {
+ current_cpu = "mips64el"
+ current_os = "linux"
+ }
+}
+
+gcc_toolchain("mipsel") {
+ toolprefix = "mipsel-linux-gnu-"
+
+ cc = "${toolprefix}gcc"
+ cxx = " ${toolprefix}g++"
+ ar = "${toolprefix}ar"
+ ld = cxx
+ readelf = "${toolprefix}readelf"
+ nm = "${toolprefix}nm"
+
+ toolchain_args = {
+ cc_wrapper = ""
+ current_cpu = "mipsel"
+ current_os = "linux"
+ is_clang = false
+ use_goma = false
+ }
+}
+
+gcc_toolchain("mips64el") {
+ toolprefix = "mips64el-linux-gnuabi64-"
+
+ cc = "${toolprefix}gcc"
+ cxx = "${toolprefix}g++"
+ ar = "${toolprefix}ar"
+ ld = cxx
+ readelf = "${toolprefix}readelf"
+ nm = "${toolprefix}nm"
+
+ toolchain_args = {
+ cc_wrapper = ""
+ current_cpu = "mips64el"
+ current_os = "linux"
+ is_clang = false
+ use_goma = false
+ }
+}
+
+clang_toolchain("clang_s390x") {
+ toolchain_args = {
+ current_cpu = "s390x"
+ current_os = "linux"
+ is_clang = true
+ }
+}
+
+gcc_toolchain("s390x") {
+ cc = "gcc"
+ cxx = "g++"
+
+ readelf = "readelf"
+ nm = "nm"
+ ar = "ar"
+ ld = cxx
+
+ toolchain_args = {
+ current_cpu = "s390x"
+ current_os = "linux"
+ is_clang = false
+ }
+}
+
+gcc_toolchain("ppc64") {
+ cc = "gcc"
+ cxx = "g++"
+
+ readelf = "readelf"
+ nm = "nm"
+ ar = "ar"
+ ld = cxx
+
+ toolchain_args = {
+ current_cpu = "ppc64"
+ current_os = "linux"
+ is_clang = false
+ }
+}
+
+gcc_toolchain("mips") {
+ toolprefix = "mips-linux-gnu-"
+
+ cc = "${toolprefix}gcc"
+ cxx = "${toolprefix}g++"
+
+ readelf = "${toolprefix}readelf"
+ nm = "${toolprefix}nm"
+ ar = "${toolprefix}ar"
+ ld = cxx
+
+ toolchain_args = {
+ current_cpu = "mips"
+ current_os = "linux"
+ is_clang = false
+ }
+}
+
+gcc_toolchain("mips64") {
+ toolprefix = "mips64-linux-gnuabi64-"
+
+ cc = "${toolprefix}gcc"
+ cxx = "${toolprefix}g++"
+
+ readelf = "${toolprefix}readelf"
+ nm = "${toolprefix}nm"
+ ar = "${toolprefix}ar"
+ ld = cxx
+
+ toolchain_args = {
+ current_cpu = "mips64"
+ current_os = "linux"
+ is_clang = false
+ }
+}
diff --git a/deps/v8/build/toolchain/linux/unbundle/BUILD.gn b/deps/v8/build/toolchain/linux/unbundle/BUILD.gn
new file mode 100644
index 0000000000..4719d540b0
--- /dev/null
+++ b/deps/v8/build/toolchain/linux/unbundle/BUILD.gn
@@ -0,0 +1,41 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/gcc_toolchain.gni")
+
+gcc_toolchain("default") {
+ cc = getenv("CC")
+ cxx = getenv("CXX")
+ ar = getenv("AR")
+ nm = getenv("NM")
+ ld = cxx
+
+ extra_cflags = getenv("CFLAGS")
+ extra_cppflags = getenv("CPPFLAGS")
+ extra_cxxflags = getenv("CXXFLAGS")
+ extra_ldflags = getenv("LDFLAGS")
+
+ toolchain_args = {
+ current_cpu = current_cpu
+ current_os = current_os
+ }
+}
+
+gcc_toolchain("host") {
+ cc = getenv("BUILD_CC")
+ cxx = getenv("BUILD_CXX")
+ ar = getenv("BUILD_AR")
+ nm = getenv("BUILD_NM")
+ ld = cxx
+
+ extra_cflags = getenv("BUILD_CFLAGS")
+ extra_cppflags = getenv("BUILD_CPPFLAGS")
+ extra_cxxflags = getenv("BUILD_CXXFLAGS")
+ extra_ldflags = getenv("BUILD_LDFLAGS")
+
+ toolchain_args = {
+ current_cpu = current_cpu
+ current_os = current_os
+ }
+}
diff --git a/deps/v8/build/toolchain/linux/unbundle/README.md b/deps/v8/build/toolchain/linux/unbundle/README.md
new file mode 100644
index 0000000000..17b93c9fde
--- /dev/null
+++ b/deps/v8/build/toolchain/linux/unbundle/README.md
@@ -0,0 +1,41 @@
+# Overview
+
+This directory contains files that make it possible for Linux
+distributions to build Chromium using system toolchain.
+
+For more info on established way such builds are configured,
+please read the following:
+
+ - https://www.gnu.org/software/make/manual/html_node/Implicit-Variables.html
+
+Why do distros want CFLAGS, LDFLAGS, etc? Please read the following
+for some examples. This is not an exhaustive list.
+
+ - https://wiki.debian.org/Hardening
+ - https://wiki.ubuntu.com/DistCompilerFlags
+ - https://fedoraproject.org/wiki/Changes/Harden_All_Packages
+ - https://fedoraproject.org/wiki/Changes/Modernise_GCC_Flags
+ - https://fedoraproject.org/wiki/Packaging:Guidelines#Compiler_flags
+ - https://blog.flameeyes.eu/2010/09/are-we-done-with-ldflags/
+ - https://blog.flameeyes.eu/2008/08/flags-and-flags/
+
+# Usage
+
+Add the following to GN args:
+
+```
+custom_toolchain="//build/toolchain/linux/unbundle:default"
+host_toolchain="//build/toolchain/linux/unbundle:default"
+```
+
+See [more docs on GN](https://gn.googlesource.com/gn/+/master/docs/quick_start.md).
+
+To cross-compile (not fully tested), add the following:
+
+```
+host_toolchain="//build/toolchain/linux/unbundle:host"
+v8_snapshot_toolchain="//build/toolchain/linux/unbundle:host"
+```
+
+Note: when cross-compiling for a 32-bit target, a matching 32-bit toolchain
+may be needed.
diff --git a/deps/v8/build/toolchain/mac/BUILD.gn b/deps/v8/build/toolchain/mac/BUILD.gn
new file mode 100644
index 0000000000..22316c9f1e
--- /dev/null
+++ b/deps/v8/build/toolchain/mac/BUILD.gn
@@ -0,0 +1,572 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(brettw) Use "gcc_toolchain.gni" like the Linux toolchains. This requires
+# some enhancements since the commands on Mac are slightly different than on
+# Linux.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/coverage/coverage.gni")
+import("../goma.gni")
+if (is_ios) {
+ import("//build/config/ios/ios_sdk.gni")
+}
+import("//build/config/mac/mac_sdk.gni")
+import("//build/config/mac/symbols.gni")
+
+assert(host_os == "mac")
+
+import("//build/toolchain/cc_wrapper.gni")
+import("//build/toolchain/concurrent_links.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+ # Reduce the number of tasks using the copy_bundle_data and compile_xcassets
+ # tools as they can cause lots of I/O contention when invoking ninja with a
+ # large number of parallel jobs (e.g. when using distributed build like goma).
+ bundle_pool_depth = -1
+}
+
+if (current_toolchain == default_toolchain) {
+ pool("bundle_pool") {
+ if (bundle_pool_depth == -1) {
+ depth = concurrent_links
+ } else {
+ depth = bundle_pool_depth
+ }
+ }
+}
+
+# When implementing tools using Python scripts, a TOOL_VERSION=N env
+# variable is placed in front of the command. The N should be incremented
+# whenever the script is changed, so that the build system rebuilds all
+# edges that utilize the script. Ideally this should be changed to use
+# proper input-dirty checking, but that could be expensive. Instead, use a
+# script to get the tool scripts' modification time to use as the version.
+# This won't cause a re-generation of GN files when the tool script changes
+# but it will cause edges to be marked as dirty if the ninja files are
+# regenerated. See https://crbug.com/619083 for details. A proper fix
+# would be to have inputs to tools (https://crbug.com/621119).
+tool_versions =
+ exec_script("get_tool_mtime.py",
+ rebase_path([
+ "//build/toolchain/mac/compile_xcassets.py",
+ "//build/toolchain/mac/filter_libtool.py",
+ "//build/toolchain/mac/linker_driver.py",
+ ],
+ root_build_dir),
+ "trim scope")
+
+# Shared toolchain definition. Invocations should set current_os to set the
+# build args in this definition.
+template("mac_toolchain") {
+ toolchain(target_name) {
+ if (use_system_xcode) {
+ env_wrapper = ""
+ } else {
+ env_wrapper = "export DEVELOPER_DIR=$hermetic_xcode_path; "
+ }
+
+ # When invoking this toolchain not as the default one, these args will be
+ # passed to the build. They are ignored when this is the default toolchain.
+ assert(defined(invoker.toolchain_args),
+ "Toolchains must declare toolchain_args")
+ toolchain_args = {
+ # Populate toolchain args from the invoker.
+ forward_variables_from(invoker.toolchain_args, "*")
+
+ # The host toolchain value computed by the default toolchain's setup
+ # needs to be passed through unchanged to all secondary toolchains to
+ # ensure that it's always the same, regardless of the values that may be
+ # set on those toolchains.
+ host_toolchain = host_toolchain
+ }
+
+ # Supports building with the version of clang shipped with Xcode when
+ # targeting iOS by not respecting clang_base_path.
+ if (toolchain_args.current_os == "ios" && use_xcode_clang) {
+ prefix = ""
+ } else {
+ prefix = rebase_path("$clang_base_path/bin/", root_build_dir)
+ }
+
+ _cc = "${prefix}clang"
+ _cxx = "${prefix}clang++"
+
+ # When the invoker has explicitly overridden use_goma or cc_wrapper in the
+ # toolchain args, use those values, otherwise default to the global one.
+ # This works because the only reasonable override that toolchains might
+ # supply for these values are to force-disable them.
+ if (defined(toolchain_args.use_goma)) {
+ toolchain_uses_goma = toolchain_args.use_goma
+ } else {
+ toolchain_uses_goma = use_goma
+ }
+ if (defined(toolchain_args.cc_wrapper)) {
+ toolchain_cc_wrapper = toolchain_args.cc_wrapper
+ } else {
+ toolchain_cc_wrapper = cc_wrapper
+ }
+
+ # Compute the compiler prefix.
+ if (toolchain_uses_goma) {
+ assert(toolchain_cc_wrapper == "",
+ "Goma and cc_wrapper can't be used together.")
+ compiler_prefix = "$goma_dir/gomacc "
+ } else if (toolchain_cc_wrapper != "") {
+ compiler_prefix = toolchain_cc_wrapper + " "
+ } else {
+ compiler_prefix = ""
+ }
+
+ cc = compiler_prefix + _cc
+ cxx = compiler_prefix + _cxx
+ ld = _cxx
+
+ if (defined(toolchain_args.coverage_instrumentation_input_file)) {
+ toolchain_coverage_instrumentation_input_file =
+ toolchain_args.coverage_instrumentation_input_file
+ } else {
+ toolchain_coverage_instrumentation_input_file =
+ coverage_instrumentation_input_file
+ }
+ _use_clang_coverage_wrapper =
+ toolchain_coverage_instrumentation_input_file != ""
+ if (_use_clang_coverage_wrapper) {
+ _coverage_wrapper =
+ rebase_path("//build/toolchain/clang_code_coverage_wrapper.py",
+ root_build_dir) + " --files-to-instrument=" +
+ rebase_path(toolchain_coverage_instrumentation_input_file,
+ root_build_dir)
+ cc = _coverage_wrapper + " ${cc}"
+ cxx = _coverage_wrapper + " ${cxx}"
+ }
+
+ linker_driver =
+ "TOOL_VERSION=${tool_versions.linker_driver} " +
+ rebase_path("//build/toolchain/mac/linker_driver.py", root_build_dir)
+
+ # On iOS, the final applications are assembled using lipo (to support fat
+ # builds). The correct flags are passed to the linker_driver.py script
+ # directly during the lipo call.
+ if (toolchain_args.current_os != "ios") {
+ _enable_dsyms = enable_dsyms
+ _save_unstripped_output = save_unstripped_output
+ } else {
+ _enable_dsyms = false
+ _save_unstripped_output = false
+ }
+
+ # Make these apply to all tools below.
+ lib_switch = "-l"
+ lib_dir_switch = "-L"
+
+ # Object files go in this directory. Use label_name instead of
+ # target_output_name since labels will generally have no spaces and will be
+ # unique in the directory.
+ object_subdir = "{{target_out_dir}}/{{label_name}}"
+
+ # If dSYMs are enabled, this flag will be added to the link tools.
+ if (_enable_dsyms) {
+ dsym_switch = " -Wcrl,dsym,{{root_out_dir}} "
+ if (is_mac) {
+ dsym_switch += "-Wcrl,dsymutilpath," +
+ rebase_path("//tools/clang/dsymutil/bin/dsymutil",
+ root_build_dir) + " "
+ }
+
+ dsym_output_dir =
+ "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.dSYM"
+ dsym_output = [
+ "$dsym_output_dir/",
+ "$dsym_output_dir/Contents/Info.plist",
+ "$dsym_output_dir/Contents/Resources/DWARF/" +
+ "{{target_output_name}}{{output_extension}}",
+ ]
+ } else {
+ dsym_switch = ""
+ }
+
+ if (_save_unstripped_output) {
+ _unstripped_output = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.unstripped"
+ }
+
+ tool("cc") {
+ depfile = "{{output}}.d"
+ precompiled_header_type = "gcc"
+ command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "CC {{output}}"
+ outputs = [
+ "$object_subdir/{{source_name_part}}.o",
+ ]
+ }
+
+ tool("cxx") {
+ depfile = "{{output}}.d"
+ precompiled_header_type = "gcc"
+ command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "CXX {{output}}"
+ outputs = [
+ "$object_subdir/{{source_name_part}}.o",
+ ]
+ }
+
+ tool("asm") {
+ # For GCC we can just use the C compiler to compile assembly.
+ depfile = "{{output}}.d"
+ command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{asmflags}} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "ASM {{output}}"
+ outputs = [
+ "$object_subdir/{{source_name_part}}.o",
+ ]
+ }
+
+ tool("objc") {
+ depfile = "{{output}}.d"
+ precompiled_header_type = "gcc"
+ command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_objc}} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "OBJC {{output}}"
+ outputs = [
+ "$object_subdir/{{source_name_part}}.o",
+ ]
+ }
+
+ tool("objcxx") {
+ depfile = "{{output}}.d"
+ precompiled_header_type = "gcc"
+ command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_objcc}} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "OBJCXX {{output}}"
+ outputs = [
+ "$object_subdir/{{source_name_part}}.o",
+ ]
+ }
+
+ tool("alink") {
+ script =
+ rebase_path("//build/toolchain/mac/filter_libtool.py", root_build_dir)
+
+ # Note about -filelist: Apple's linker reads the file list file and
+ # interprets each newline-separated chunk of text as a file name. It
+ # doesn't do the things one would expect from the shell like unescaping
+ # or handling quotes. In contrast, when Ninja finds a file name with
+ # spaces, it single-quotes them in $inputs_newline as it would normally
+ # do for command-line arguments. Thus any source names with spaces, or
+ # label names with spaces (which GN bases the output paths on) will be
+ # corrupted by this process. Don't use spaces for source files or labels.
+ rspfile = "{{output}}.rsp"
+ rspfile_content = "{{inputs_newline}}"
+ command = "$env_wrapper rm -f {{output}} && TOOL_VERSION=${tool_versions.filter_libtool} python $script libtool -static {{arflags}} -o {{output}} -filelist $rspfile"
+ description = "LIBTOOL-STATIC {{output}}"
+ outputs = [
+ "{{output_dir}}/{{target_output_name}}{{output_extension}}",
+ ]
+ default_output_dir = "{{target_out_dir}}"
+ default_output_extension = ".a"
+ output_prefix = "lib"
+ }
+
+ tool("solink") {
+ dylib = "{{output_dir}}/{{target_output_name}}{{output_extension}}" # eg "./libfoo.dylib"
+ rspfile = dylib + ".rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ # These variables are not built into GN but are helpers that implement
+ # (1) linking to produce a .dylib, (2) extracting the symbols from that
+ # file to a temporary file, (3) if the temporary file has differences from
+ # the existing .TOC file, overwrite it, otherwise, don't change it.
+ #
+ # As a special case, if the library reexports symbols from other dynamic
+ # libraries, we always update the .TOC and skip the temporary file and
+ # diffing steps, since that library always needs to be re-linked.
+ tocname = dylib + ".TOC"
+ temporary_tocname = dylib + ".tmp"
+
+ does_reexport_command = "[ ! -e \"$dylib\" -o ! -e \"$tocname\" ] || otool -l \"$dylib\" | grep -q LC_REEXPORT_DYLIB"
+
+ link_command = "$linker_driver $ld -shared "
+ if (is_component_build) {
+ link_command += " -Wl,-install_name,@rpath/\"{{target_output_name}}{{output_extension}}\" "
+ }
+ link_command += dsym_switch
+ link_command += "{{ldflags}} -o \"$dylib\" -Wl,-filelist,\"$rspfile\" {{libs}} {{solibs}}"
+
+ replace_command = "if ! cmp -s \"$temporary_tocname\" \"$tocname\"; then mv \"$temporary_tocname\" \"$tocname\""
+ extract_toc_command = "{ otool -l \"$dylib\" | grep LC_ID_DYLIB -A 5; nm -gP \"$dylib\" | cut -f1-2 -d' ' | grep -v U\$\$; true; }"
+
+ command = "$env_wrapper if $does_reexport_command ; then $link_command && $extract_toc_command > \"$tocname\"; else $link_command && $extract_toc_command > \"$temporary_tocname\" && $replace_command ; fi; fi"
+
+ rspfile_content = "{{inputs_newline}}"
+
+ description = "SOLINK {{output}}"
+
+ # Use this for {{output_extension}} expansions unless a target manually
+ # overrides it (in which case {{output_extension}} will be what the target
+ # specifies).
+ default_output_dir = "{{root_out_dir}}"
+ default_output_extension = ".dylib"
+
+ output_prefix = "lib"
+
+ # Since the above commands only updates the .TOC file when it changes, ask
+ # Ninja to check if the timestamp actually changed to know if downstream
+ # dependencies should be recompiled.
+ restat = true
+
+ # Tell GN about the output files. It will link to the dylib but use the
+ # tocname for dependency management.
+ outputs = [
+ dylib,
+ tocname,
+ ]
+ link_output = dylib
+ depend_output = tocname
+
+ if (_enable_dsyms) {
+ outputs += dsym_output
+ }
+ if (_save_unstripped_output) {
+ outputs += [ _unstripped_output ]
+ }
+ }
+
+ tool("solink_module") {
+ sofile = "{{output_dir}}/{{target_output_name}}{{output_extension}}" # eg "./libfoo.so"
+ rspfile = sofile + ".rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ link_command = "$env_wrapper $linker_driver $ld -bundle {{ldflags}} -o \"$sofile\" -Wl,-filelist,\"$rspfile\""
+ if (is_component_build) {
+ link_command += " -Wl,-install_name,@rpath/{{target_output_name}}{{output_extension}}"
+ }
+ link_command += dsym_switch
+ link_command += " {{solibs}} {{libs}}"
+ command = link_command
+
+ rspfile_content = "{{inputs_newline}}"
+
+ description = "SOLINK_MODULE {{output}}"
+
+ # Use this for {{output_extension}} expansions unless a target manually
+ # overrides it (in which case {{output_extension}} will be what the target
+ # specifies).
+ default_output_dir = "{{root_out_dir}}"
+ default_output_extension = ".so"
+
+ outputs = [
+ sofile,
+ ]
+
+ if (_enable_dsyms) {
+ outputs += dsym_output
+ }
+ if (_save_unstripped_output) {
+ outputs += [ _unstripped_output ]
+ }
+ }
+
+ tool("link") {
+ outfile = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ rspfile = "$outfile.rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ # Note about -filelist: Apple's linker reads the file list file and
+ # interprets each newline-separated chunk of text as a file name. It
+ # doesn't do the things one would expect from the shell like unescaping
+ # or handling quotes. In contrast, when Ninja finds a file name with
+ # spaces, it single-quotes them in $inputs_newline as it would normally
+ # do for command-line arguments. Thus any source names with spaces, or
+ # label names with spaces (which GN bases the output paths on) will be
+ # corrupted by this process. Don't use spaces for source files or labels.
+ command = "$env_wrapper $linker_driver $ld $dsym_switch {{ldflags}} -o \"$outfile\" -Wl,-filelist,\"$rspfile\" {{solibs}} {{libs}}"
+ description = "LINK $outfile"
+ rspfile_content = "{{inputs_newline}}"
+ outputs = [
+ outfile,
+ ]
+
+ if (_enable_dsyms) {
+ outputs += dsym_output
+ }
+ if (_save_unstripped_output) {
+ outputs += [ _unstripped_output ]
+ }
+
+ default_output_dir = "{{root_out_dir}}"
+ }
+
+ # These two are really entirely generic, but have to be repeated in
+ # each toolchain because GN doesn't allow a template to be used here.
+ # See //build/toolchain/toolchain.gni for details.
+ tool("stamp") {
+ command = stamp_command
+ description = stamp_description
+ }
+ tool("copy") {
+ command = copy_command
+ description = copy_description
+ }
+
+ tool("copy_bundle_data") {
+ # copy_command use hardlink if possible but this does not work with
+ # directories. If source is a directory, instead use "pax" to create
+ # the same tree structure using hardlinks to individual files (this
+ # preserve symbolic links too) as recommended in the replies to the
+ # question at http://serverfault.com/q/209888/43689 ("cp -al" isn't
+ # available on macOS).
+ #
+ # According to the man page for pax, the commands to use to clone
+ # olddir to newdir using pax are the following:
+ #
+ # $ mkdir newdir
+ # $ cd olddir
+ # $ pax -rwl . ../newdir
+ #
+ # The _copydir command does exactly that but use an absolute path
+ # constructed using shell variable $OLDPWD (automatically set when
+ # cd is used) as computing the relative path is a bit complex and
+ # using pwd would requires a sub-shell to be created.
+ _copydir = "mkdir -p {{output}} && cd {{source}} && " +
+ "pax -rwl . \"\$OLDPWD\"/{{output}}"
+ command = "rm -rf {{output}} && if [[ -d {{source}} ]]; then " +
+ _copydir + "; else " + copy_command + "; fi"
+
+ description = "COPY_BUNDLE_DATA {{source}} {{output}}"
+ pool = ":bundle_pool($default_toolchain)"
+ }
+ tool("compile_xcassets") {
+ _tool = rebase_path("//build/toolchain/mac/compile_xcassets.py",
+ root_build_dir)
+ if (is_ios) {
+ _sdk_name = ios_sdk_name
+ _min_deployment_target = ios_deployment_target
+ _compress_pngs = ""
+ } else {
+ _sdk_name = mac_sdk_name
+ _min_deployment_target = mac_deployment_target
+ _compress_pngs = " -c "
+ }
+ command =
+ "$env_wrapper rm -f \"{{output}}\" && " +
+ "TOOL_VERSION=${tool_versions.compile_xcassets} " +
+ "python $_tool$_compress_pngs -p \"$_sdk_name\" " +
+ "-t \"$_min_deployment_target\" -T \"{{bundle_product_type}}\" " +
+ "-P \"{{bundle_partial_info_plist}}\" -o {{output}} {{inputs}}"
+
+ description = "COMPILE_XCASSETS {{output}}"
+ pool = ":bundle_pool($default_toolchain)"
+ }
+
+ tool("action") {
+ pool = "//build/toolchain:action_pool($default_toolchain)"
+ }
+ }
+}
+
+mac_toolchain("clang_arm") {
+ toolchain_args = {
+ current_cpu = "arm"
+ current_os = "mac"
+ }
+}
+
+mac_toolchain("clang_x64") {
+ toolchain_args = {
+ current_cpu = "x64"
+ current_os = "mac"
+
+ if (target_os == "ios") {
+ # TODO(crbug.com/753445): the use_sanitizer_coverage arg is currently
+ # not supported by the Chromium mac_clang_x64 toolchain on iOS
+ # distribution.
+ use_sanitizer_coverage = false
+ }
+ }
+}
+
+mac_toolchain("clang_x86") {
+ toolchain_args = {
+ current_cpu = "x86"
+ current_os = "mac"
+ }
+}
+
+mac_toolchain("clang_x86_v8_arm") {
+ toolchain_args = {
+ current_cpu = "x86"
+ current_os = "mac"
+
+ if (defined(v8_current_cpu)) {
+ v8_current_cpu = "arm"
+ }
+ }
+}
+
+mac_toolchain("clang_x86_v8_mipsel") {
+ toolchain_args = {
+ current_cpu = "x86"
+ current_os = "mac"
+
+ if (defined(v8_current_cpu)) {
+ v8_current_cpu = "mipsel"
+ }
+ }
+}
+
+mac_toolchain("clang_x64_v8_arm64") {
+ toolchain_args = {
+ current_cpu = "x64"
+ current_os = "mac"
+
+ if (defined(v8_current_cpu)) {
+ v8_current_cpu = "arm64"
+ }
+ }
+}
+
+mac_toolchain("clang_x64_v8_mips64el") {
+ toolchain_args = {
+ current_cpu = "x64"
+ current_os = "mac"
+
+ if (defined(v8_current_cpu)) {
+ v8_current_cpu = "mips64el"
+ }
+ }
+}
+
+if (is_ios) {
+ mac_toolchain("ios_clang_arm") {
+ toolchain_args = {
+ current_cpu = "arm"
+ current_os = "ios"
+ }
+ }
+
+ mac_toolchain("ios_clang_arm64") {
+ toolchain_args = {
+ current_cpu = "arm64"
+ current_os = "ios"
+ }
+ }
+
+ mac_toolchain("ios_clang_x86") {
+ toolchain_args = {
+ current_cpu = "x86"
+ current_os = "ios"
+ }
+ }
+
+ mac_toolchain("ios_clang_x64") {
+ toolchain_args = {
+ current_cpu = "x64"
+ current_os = "ios"
+ }
+ }
+}
diff --git a/deps/v8/build/toolchain/mac/OWNERS b/deps/v8/build/toolchain/mac/OWNERS
new file mode 100644
index 0000000000..0ed2e154d8
--- /dev/null
+++ b/deps/v8/build/toolchain/mac/OWNERS
@@ -0,0 +1,2 @@
+rsesek@chromium.org
+sdefresne@chromium.org
diff --git a/deps/v8/build/toolchain/mac/compile_xcassets.py b/deps/v8/build/toolchain/mac/compile_xcassets.py
new file mode 100644
index 0000000000..c1f4680b7c
--- /dev/null
+++ b/deps/v8/build/toolchain/mac/compile_xcassets.py
@@ -0,0 +1,251 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+import tempfile
+
+"""Wrapper around actool to compile assets catalog.
+
+The script compile_xcassets.py is a wrapper around actool to compile
+assets catalog to Assets.car that turns warning into errors. It also
+fixes some quirks of actool to make it work from ninja (mostly that
+actool seems to require absolute path but gn generates command-line
+with relative paths).
+
+The wrapper filter out any message that is not a section header and
+not a warning or error message, and fails if filtered output is not
+empty. This should to treat all warnings as error until actool has
+an option to fail with non-zero error code when there are warnings.
+"""
+
+# Pattern matching a section header in the output of actool.
+SECTION_HEADER = re.compile('^/\\* ([^ ]*) \\*/$')
+
+# Name of the section containing informational messages that can be ignored.
+NOTICE_SECTION = 'com.apple.actool.compilation-results'
+
+# Regular expressions matching spurious messages from actool that should be
+# ignored (as they are bogus). Generally a bug should be filed with Apple
+# when adding a pattern here.
+SPURIOUS_PATTERNS = map(re.compile, [
+ # crbug.com/770634, likely a bug in Xcode 9.1 beta, remove once build
+ # requires a version of Xcode with a fix.
+ r'\[\]\[ipad\]\[76x76\]\[\]\[\]\[1x\]\[\]\[\]: notice: \(null\)',
+
+ # crbug.com/770634, likely a bug in Xcode 9.2 beta, remove once build
+ # requires a version of Xcode with a fix.
+ r'\[\]\[ipad\]\[76x76\]\[\]\[\]\[1x\]\[\]\[\]: notice: 76x76@1x app icons'
+ ' only apply to iPad apps targeting releases of iOS prior to 10.0.',
+])
+
+# Map special type of asset catalog to the corresponding command-line
+# parameter that need to be passed to actool.
+ACTOOL_FLAG_FOR_ASSET_TYPE = {
+ '.appiconset': '--app-icon',
+ '.launchimage': '--launch-image',
+}
+
+
+def IsSpuriousMessage(line):
+ """Returns whether line contains a spurious message that should be ignored."""
+ for pattern in SPURIOUS_PATTERNS:
+ match = pattern.search(line)
+ if match is not None:
+ return True
+ return False
+
+
+def FilterCompilerOutput(compiler_output, relative_paths):
+ """Filers actool compilation output.
+
+ The compiler output is composed of multiple sections for each different
+ level of output (error, warning, notices, ...). Each section starts with
+ the section name on a single line, followed by all the messages from the
+ section.
+
+ The function filter any lines that are not in com.apple.actool.errors or
+ com.apple.actool.document.warnings sections (as spurious messages comes
+ before any section of the output).
+
+ See crbug.com/730054, crbug.com/739163 and crbug.com/770634 for some example
+ messages that pollute the output of actool and cause flaky builds.
+
+ Args:
+ compiler_output: string containing the output generated by the
+ compiler (contains both stdout and stderr)
+ relative_paths: mapping from absolute to relative paths used to
+ convert paths in the warning and error messages (unknown paths
+ will be left unaltered)
+
+ Returns:
+ The filtered output of the compiler. If the compilation was a
+ success, then the output will be empty, otherwise it will use
+ relative path and omit any irrelevant output.
+ """
+
+ filtered_output = []
+ current_section = None
+ data_in_section = False
+ for line in compiler_output.splitlines():
+ match = SECTION_HEADER.search(line)
+ if match is not None:
+ data_in_section = False
+ current_section = match.group(1)
+ continue
+ if current_section and current_section != NOTICE_SECTION:
+ if IsSpuriousMessage(line):
+ continue
+ absolute_path = line.split(':')[0]
+ relative_path = relative_paths.get(absolute_path, absolute_path)
+ if absolute_path != relative_path:
+ line = relative_path + line[len(absolute_path):]
+ if not data_in_section:
+ data_in_section = True
+ filtered_output.append('/* %s */\n' % current_section)
+ filtered_output.append(line + '\n')
+
+ return ''.join(filtered_output)
+
+
+def CompileAssetCatalog(output, platform, product_type, min_deployment_target,
+ inputs, compress_pngs, partial_info_plist):
+ """Compile the .xcassets bundles to an asset catalog using actool.
+
+ Args:
+ output: absolute path to the containing bundle
+ platform: the targeted platform
+ product_type: the bundle type
+ min_deployment_target: minimum deployment target
+ inputs: list of absolute paths to .xcassets bundles
+ compress_pngs: whether to enable compression of pngs
+ partial_info_plist: path to partial Info.plist to generate
+ """
+ command = [
+ 'xcrun', 'actool', '--output-format=human-readable-text',
+ '--notices', '--warnings', '--errors', '--platform', platform,
+ '--minimum-deployment-target', min_deployment_target,
+ ]
+
+ if compress_pngs:
+ command.extend(['--compress-pngs'])
+
+ if product_type != '':
+ command.extend(['--product-type', product_type])
+
+ if platform == 'macosx':
+ command.extend(['--target-device', 'mac'])
+ else:
+ command.extend(['--target-device', 'iphone', '--target-device', 'ipad'])
+
+ # Scan the input directories for the presence of asset catalog types that
+ # require special treatment, and if so, add them to the actool command-line.
+ for relative_path in inputs:
+
+ if not os.path.isdir(relative_path):
+ continue
+
+ for file_or_dir_name in os.listdir(relative_path):
+ if not os.path.isdir(os.path.join(relative_path, file_or_dir_name)):
+ continue
+
+ asset_name, asset_type = os.path.splitext(file_or_dir_name)
+ if asset_type not in ACTOOL_FLAG_FOR_ASSET_TYPE:
+ continue
+
+ command.extend([ACTOOL_FLAG_FOR_ASSET_TYPE[asset_type], asset_name])
+
+ # Always ask actool to generate a partial Info.plist file. If not path
+ # has been given by the caller, use a temporary file name.
+ temporary_file = None
+ if not partial_info_plist:
+ temporary_file = tempfile.NamedTemporaryFile(suffix='.plist')
+ partial_info_plist = temporary_file.name
+
+ command.extend(['--output-partial-info-plist', partial_info_plist])
+
+ # Dictionary used to convert absolute paths back to their relative form
+ # in the output of actool.
+ relative_paths = {}
+
+ # actool crashes if paths are relative, so convert input and output paths
+ # to absolute paths, and record the relative paths to fix them back when
+ # filtering the output.
+ absolute_output = os.path.abspath(output)
+ relative_paths[output] = absolute_output
+ relative_paths[os.path.dirname(output)] = os.path.dirname(absolute_output)
+ command.extend(['--compile', os.path.dirname(os.path.abspath(output))])
+
+ for relative_path in inputs:
+ absolute_path = os.path.abspath(relative_path)
+ relative_paths[absolute_path] = relative_path
+ command.append(absolute_path)
+
+ try:
+ # Run actool and redirect stdout and stderr to the same pipe (as actool
+ # is confused about what should go to stderr/stdout).
+ process = subprocess.Popen(
+ command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ stdout, _ = process.communicate()
+
+ # Filter the output to remove all garbarge and to fix the paths.
+ stdout = FilterCompilerOutput(stdout, relative_paths)
+
+ if process.returncode or stdout:
+ sys.stderr.write(stdout)
+ sys.exit(1)
+
+ finally:
+ if temporary_file:
+ temporary_file.close()
+
+
+def Main():
+ parser = argparse.ArgumentParser(
+ description='compile assets catalog for a bundle')
+ parser.add_argument(
+ '--platform', '-p', required=True,
+ choices=('macosx', 'iphoneos', 'iphonesimulator'),
+ help='target platform for the compiled assets catalog')
+ parser.add_argument(
+ '--minimum-deployment-target', '-t', required=True,
+ help='minimum deployment target for the compiled assets catalog')
+ parser.add_argument(
+ '--output', '-o', required=True,
+ help='path to the compiled assets catalog')
+ parser.add_argument(
+ '--compress-pngs', '-c', action='store_true', default=False,
+ help='recompress PNGs while compiling assets catalog')
+ parser.add_argument(
+ '--product-type', '-T',
+ help='type of the containing bundle')
+ parser.add_argument(
+ '--partial-info-plist', '-P',
+ help='path to partial info plist to create')
+ parser.add_argument(
+ 'inputs', nargs='+',
+ help='path to input assets catalog sources')
+ args = parser.parse_args()
+
+ if os.path.basename(args.output) != 'Assets.car':
+ sys.stderr.write(
+ 'output should be path to compiled asset catalog, not '
+ 'to the containing bundle: %s\n' % (args.output,))
+ sys.exit(1)
+
+ CompileAssetCatalog(
+ args.output,
+ args.platform,
+ args.product_type,
+ args.minimum_deployment_target,
+ args.inputs,
+ args.compress_pngs,
+ args.partial_info_plist)
+
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/deps/v8/build/toolchain/mac/compile_xcassets_unittests.py b/deps/v8/build/toolchain/mac/compile_xcassets_unittests.py
new file mode 100644
index 0000000000..7655df8c05
--- /dev/null
+++ b/deps/v8/build/toolchain/mac/compile_xcassets_unittests.py
@@ -0,0 +1,141 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+import compile_xcassets
+
+
+class TestFilterCompilerOutput(unittest.TestCase):
+
+ relative_paths = {
+ '/Users/janedoe/chromium/src/Chromium.xcassets':
+ '../../Chromium.xcassets',
+ '/Users/janedoe/chromium/src/out/Default/Chromium.app/Assets.car':
+ 'Chromium.app/Assets.car',
+ }
+
+ def testNoError(self):
+ self.assertEquals(
+ '',
+ compile_xcassets.FilterCompilerOutput(
+ '/* com.apple.actool.compilation-results */\n'
+ '/Users/janedoe/chromium/src/out/Default/Chromium.app/Assets.car\n',
+ self.relative_paths))
+
+ def testNoErrorRandomMessages(self):
+ self.assertEquals(
+ '',
+ compile_xcassets.FilterCompilerOutput(
+ '2017-07-04 04:59:19.460 ibtoold[23487:41214] CoreSimulator is att'
+ 'empting to unload a stale CoreSimulatorService job. Existing'
+ ' job (com.apple.CoreSimulator.CoreSimulatorService.179.1.E8tt'
+ 'yeDeVgWK) is from an older version and is being removed to pr'
+ 'event problems.\n'
+ '/* com.apple.actool.compilation-results */\n'
+ '/Users/janedoe/chromium/src/out/Default/Chromium.app/Assets.car\n',
+ self.relative_paths))
+
+ def testWarning(self):
+ self.assertEquals(
+ '/* com.apple.actool.document.warnings */\n'
+ '../../Chromium.xcassets:./image1.imageset/[universal][][][1x][][][]['
+ '][][]: warning: The file "image1.png" for the image set "image1"'
+ ' does not exist.\n',
+ compile_xcassets.FilterCompilerOutput(
+ '/* com.apple.actool.document.warnings */\n'
+ '/Users/janedoe/chromium/src/Chromium.xcassets:./image1.imageset/['
+ 'universal][][][1x][][][][][][]: warning: The file "image1.png'
+ '" for the image set "image1" does not exist.\n'
+ '/* com.apple.actool.compilation-results */\n'
+ '/Users/janedoe/chromium/src/out/Default/Chromium.app/Assets.car\n',
+ self.relative_paths))
+
+ def testError(self):
+ self.assertEquals(
+ '/* com.apple.actool.errors */\n'
+ '../../Chromium.xcassets: error: The output directory "/Users/janedoe/'
+ 'chromium/src/out/Default/Chromium.app" does not exist.\n',
+ compile_xcassets.FilterCompilerOutput(
+ '/* com.apple.actool.errors */\n'
+ '/Users/janedoe/chromium/src/Chromium.xcassets: error: The output '
+ 'directory "/Users/janedoe/chromium/src/out/Default/Chromium.a'
+ 'pp" does not exist.\n'
+ '/* com.apple.actool.compilation-results */\n',
+ self.relative_paths))
+
+ def testSpurious(self):
+ self.assertEquals(
+ '/* com.apple.actool.document.warnings */\n'
+ '../../Chromium.xcassets:./AppIcon.appiconset: warning: A 1024x1024 ap'
+ 'p store icon is required for iOS apps\n',
+ compile_xcassets.FilterCompilerOutput(
+ '/* com.apple.actool.document.warnings */\n'
+ '/Users/janedoe/chromium/src/Chromium.xcassets:./AppIcon.appiconse'
+ 't: warning: A 1024x1024 app store icon is required for iOS ap'
+ 'ps\n'
+ '/* com.apple.actool.document.notices */\n'
+ '/Users/janedoe/chromium/src/Chromium.xcassets:./AppIcon.appiconse'
+ 't/[][ipad][76x76][][][1x][][]: notice: (null)\n',
+ self.relative_paths))
+
+ def testComplexError(self):
+ self.assertEquals(
+ '/* com.apple.actool.errors */\n'
+ ': error: Failed to find a suitable device for the type SimDeviceType '
+ ': com.apple.dt.Xcode.IBSimDeviceType.iPad-2x with runtime SimRunt'
+ 'ime : 10.3.1 (14E8301) - com.apple.CoreSimulator.SimRuntime.iOS-1'
+ '0-3\n'
+ ' Failure Reason: Failed to create SimDeviceSet at path /Users/jane'
+ 'doe/Library/Developer/Xcode/UserData/IB Support/Simulator Devices'
+ '. You\'ll want to check the logs in ~/Library/Logs/CoreSimulator '
+ 'to see why creating the SimDeviceSet failed.\n'
+ ' Underlying Errors:\n'
+ ' Description: Failed to initialize simulator device set.\n'
+ ' Failure Reason: Failed to subscribe to notifications from Cor'
+ 'eSimulatorService.\n'
+ ' Underlying Errors:\n'
+ ' Description: Error returned in reply to notification requ'
+ 'est: Connection invalid\n'
+ ' Failure Reason: Software caused connection abort\n',
+ compile_xcassets.FilterCompilerOutput(
+ '2017-07-07 10:37:27.367 ibtoold[88538:12553239] CoreSimulator det'
+ 'ected Xcode.app relocation or CoreSimulatorService version ch'
+ 'ange. Framework path (/Applications/Xcode.app/Contents/Devel'
+ 'oper/Library/PrivateFrameworks/CoreSimulator.framework) and v'
+ 'ersion (375.21) does not match existing job path (/Library/De'
+ 'veloper/PrivateFrameworks/CoreSimulator.framework/Versions/A/'
+ 'XPCServices/com.apple.CoreSimulator.CoreSimulatorService.xpc)'
+ ' and version (459.13). Attempting to remove the stale servic'
+ 'e in order to add the expected version.\n'
+ '2017-07-07 10:37:27.625 ibtoold[88538:12553256] CoreSimulatorServ'
+ 'ice connection interrupted. Resubscribing to notifications.\n'
+ '2017-07-07 10:37:27.632 ibtoold[88538:12553264] CoreSimulatorServ'
+ 'ice connection became invalid. Simulator services will no lo'
+ 'nger be available.\n'
+ '2017-07-07 10:37:27.642 ibtoold[88538:12553274] CoreSimulatorServ'
+ 'ice connection became invalid. Simulator services will no lo'
+ 'nger be available.\n'
+ '/* com.apple.actool.errors */\n'
+ ': error: Failed to find a suitable device for the type SimDeviceT'
+ 'ype : com.apple.dt.Xcode.IBSimDeviceType.iPad-2x with runtime'
+ ' SimRuntime : 10.3.1 (14E8301) - com.apple.CoreSimulator.SimR'
+ 'untime.iOS-10-3\n'
+ ' Failure Reason: Failed to create SimDeviceSet at path /Users/'
+ 'janedoe/Library/Developer/Xcode/UserData/IB Support/Simulator'
+ ' Devices. You\'ll want to check the logs in ~/Library/Logs/Co'
+ 'reSimulator to see why creating the SimDeviceSet failed.\n'
+ ' Underlying Errors:\n'
+ ' Description: Failed to initialize simulator device set.\n'
+ ' Failure Reason: Failed to subscribe to notifications from'
+ ' CoreSimulatorService.\n'
+ ' Underlying Errors:\n'
+ ' Description: Error returned in reply to notification '
+ 'request: Connection invalid\n'
+ ' Failure Reason: Software caused connection abort\n'
+ '/* com.apple.actool.compilation-results */\n',
+ self.relative_paths))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/toolchain/mac/filter_libtool.py b/deps/v8/build/toolchain/mac/filter_libtool.py
new file mode 100644
index 0000000000..3b16151840
--- /dev/null
+++ b/deps/v8/build/toolchain/mac/filter_libtool.py
@@ -0,0 +1,54 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import subprocess
+import sys
+
+# This script executes libool and filters out logspam lines like:
+# '/path/to/libtool: file: foo.o has no symbols'
+
+BLACKLIST_PATTERNS = map(re.compile, [
+ r'^.*libtool: (?:for architecture: \S* )?file: .* has no symbols$',
+ r'^.*libtool: warning for library: .* the table of contents is empty '
+ r'\(no object file members in the library define global symbols\)$',
+ r'^.*libtool: warning same member name \(\S*\) in output file used for '
+ r'input files: \S* and: \S* \(due to use of basename, truncation, '
+ r'blank padding or duplicate input files\)$',
+])
+
+
+def IsBlacklistedLine(line):
+ """Returns whether the line should be filtered out."""
+ for pattern in BLACKLIST_PATTERNS:
+ if pattern.match(line):
+ return True
+ return False
+
+
+def Main(cmd_list):
+ env = os.environ.copy()
+ # Ref:
+ # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
+ # The problem with this flag is that it resets the file mtime on the file to
+ # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
+ env['ZERO_AR_DATE'] = '1'
+ libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
+ _, err = libtoolout.communicate()
+ for line in err.splitlines():
+ if not IsBlacklistedLine(line):
+ print >>sys.stderr, line
+ # Unconditionally touch the output .a file on the command line if present
+ # and the command succeeded. A bit hacky.
+ if not libtoolout.returncode:
+ for i in range(len(cmd_list) - 1):
+ if cmd_list[i] == '-o' and cmd_list[i+1].endswith('.a'):
+ os.utime(cmd_list[i+1], None)
+ break
+ return libtoolout.returncode
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv[1:]))
diff --git a/deps/v8/build/toolchain/mac/get_tool_mtime.py b/deps/v8/build/toolchain/mac/get_tool_mtime.py
new file mode 100644
index 0000000000..4106344b82
--- /dev/null
+++ b/deps/v8/build/toolchain/mac/get_tool_mtime.py
@@ -0,0 +1,17 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+# Usage: python get_tool_mtime.py path/to/file1.py path/to/file2.py
+#
+# Prints a GN scope with the variable name being the basename sans-extension
+# and the value being the file modification time. A variable is emitted for
+# each file argument on the command line.
+
+if __name__ == '__main__':
+ for f in sys.argv[1:]:
+ variable = os.path.splitext(os.path.basename(f))[0]
+ print '%s = %d' % (variable, os.path.getmtime(f))
diff --git a/deps/v8/build/toolchain/mac/linker_driver.py b/deps/v8/build/toolchain/mac/linker_driver.py
new file mode 100755
index 0000000000..10bbda02ac
--- /dev/null
+++ b/deps/v8/build/toolchain/mac/linker_driver.py
@@ -0,0 +1,256 @@
+#!/usr/bin/env python
+
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import os.path
+import shutil
+import subprocess
+import sys
+
+DSYMUTIL_INVOKE = ['xcrun', 'dsymutil']
+
+# The linker_driver.py is responsible for forwarding a linker invocation to
+# the compiler driver, while processing special arguments itself.
+#
+# Usage: linker_driver.py clang++ main.o -L. -llib -o prog -Wcrl,dsym,out
+#
+# On Mac, the logical step of linking is handled by three discrete tools to
+# perform the image link, debug info link, and strip. The linker_driver.py
+# combines these three steps into a single tool.
+#
+# The command passed to the linker_driver.py should be the compiler driver
+# invocation for the linker. It is first invoked unaltered (except for the
+# removal of the special driver arguments, described below). Then the driver
+# performs additional actions, based on these arguments:
+#
+# -Wcrl,dsym,<dsym_path_prefix>
+# After invoking the linker, this will run `dsymutil` on the linker's
+# output, producing a dSYM bundle, stored at dsym_path_prefix. As an
+# example, if the linker driver were invoked with:
+# "... -o out/gn/obj/foo/libbar.dylib ... -Wcrl,dsym,out/gn ..."
+# The resulting dSYM would be out/gn/libbar.dylib.dSYM/.
+#
+# -Wcrl,dsymutilpath,<dsymutil_path>
+# Sets the path to the dsymutil to run with -Wcrl,dsym, in which case
+# `xcrun` is not used to invoke it.
+#
+# -Wcrl,unstripped,<unstripped_path_prefix>
+# After invoking the linker, and before strip, this will save a copy of
+# the unstripped linker output in the directory unstripped_path_prefix.
+#
+# -Wcrl,strip,<strip_arguments>
+# After invoking the linker, and optionally dsymutil, this will run
+# the strip command on the linker's output. strip_arguments are
+# comma-separated arguments to be passed to the strip command.
+
+def Main(args):
+ """Main function for the linker driver. Separates out the arguments for
+ the main compiler driver and the linker driver, then invokes all the
+ required tools.
+
+ Args:
+ args: list of string, Arguments to the script.
+ """
+
+ if len(args) < 2:
+ raise RuntimeError("Usage: linker_driver.py [linker-invocation]")
+
+ for i in xrange(len(args)):
+ if args[i] != '--developer_dir':
+ continue
+ os.environ['DEVELOPER_DIR'] = args[i + 1]
+ del args[i:i+2]
+ break
+
+ # Collect arguments to the linker driver (this script) and remove them from
+ # the arguments being passed to the compiler driver.
+ linker_driver_actions = {}
+ compiler_driver_args = []
+ for arg in args[1:]:
+ if arg.startswith(_LINKER_DRIVER_ARG_PREFIX):
+ # Convert driver actions into a map of name => lambda to invoke.
+ driver_action = ProcessLinkerDriverArg(arg)
+ assert driver_action[0] not in linker_driver_actions
+ linker_driver_actions[driver_action[0]] = driver_action[1]
+ else:
+ compiler_driver_args.append(arg)
+
+ linker_driver_outputs = [_FindLinkerOutput(compiler_driver_args)]
+
+ try:
+ # Run the linker by invoking the compiler driver.
+ subprocess.check_call(compiler_driver_args)
+
+ # Run the linker driver actions, in the order specified by the actions list.
+ for action in _LINKER_DRIVER_ACTIONS:
+ name = action[0]
+ if name in linker_driver_actions:
+ linker_driver_outputs += linker_driver_actions[name](args)
+ except:
+ # If a linker driver action failed, remove all the outputs to make the
+ # build step atomic.
+ map(_RemovePath, linker_driver_outputs)
+
+ # Re-report the original failure.
+ raise
+
+
+def ProcessLinkerDriverArg(arg):
+ """Processes a linker driver argument and returns a tuple containing the
+ name and unary lambda to invoke for that linker driver action.
+
+ Args:
+ arg: string, The linker driver argument.
+
+ Returns:
+ A 2-tuple:
+ 0: The driver action name, as in _LINKER_DRIVER_ACTIONS.
+ 1: An 1-ary lambda that takes the full list of arguments passed to
+ Main(). The lambda should call the linker driver action that
+ corresponds to the argument and return a list of outputs from the
+ action.
+ """
+ if not arg.startswith(_LINKER_DRIVER_ARG_PREFIX):
+ raise ValueError('%s is not a linker driver argument' % (arg,))
+
+ sub_arg = arg[len(_LINKER_DRIVER_ARG_PREFIX):]
+
+ for driver_action in _LINKER_DRIVER_ACTIONS:
+ (name, action) = driver_action
+ if sub_arg.startswith(name):
+ return (name,
+ lambda full_args: action(sub_arg[len(name):], full_args))
+
+ raise ValueError('Unknown linker driver argument: %s' % (arg,))
+
+
+def RunDsymUtil(dsym_path_prefix, full_args):
+ """Linker driver action for -Wcrl,dsym,<dsym-path-prefix>. Invokes dsymutil
+ on the linker's output and produces a dsym file at |dsym_file| path.
+
+ Args:
+ dsym_path_prefix: string, The path at which the dsymutil output should be
+ located.
+ full_args: list of string, Full argument list for the linker driver.
+
+ Returns:
+ list of string, Build step outputs.
+ """
+ if not len(dsym_path_prefix):
+ raise ValueError('Unspecified dSYM output file')
+
+ linker_out = _FindLinkerOutput(full_args)
+ base = os.path.basename(linker_out)
+ dsym_out = os.path.join(dsym_path_prefix, base + '.dSYM')
+
+ # Remove old dSYMs before invoking dsymutil.
+ _RemovePath(dsym_out)
+ subprocess.check_call(DSYMUTIL_INVOKE + ['-o', dsym_out, linker_out])
+ return [dsym_out]
+
+
+def SetDsymutilPath(dsymutil_path, full_args):
+ """Linker driver action for -Wcrl,dsymutilpath,<dsymutil_path>.
+
+ Sets the invocation command for dsymutil, which allows the caller to specify
+ an alternate dsymutil. This action is always processed before the RunDsymUtil
+ action.
+
+ Args:
+ dsymutil_path: string, The path to the dsymutil binary to run
+ full_args: list of string, Full argument list for the linker driver.
+
+ Returns:
+ No output - this step is run purely for its side-effect.
+ """
+ global DSYMUTIL_INVOKE
+ DSYMUTIL_INVOKE = [dsymutil_path]
+ return []
+
+
+def RunSaveUnstripped(unstripped_path_prefix, full_args):
+ """Linker driver action for -Wcrl,unstripped,<unstripped_path_prefix>. Copies
+ the linker output to |unstripped_path_prefix| before stripping.
+
+ Args:
+ unstripped_path_prefix: string, The path at which the unstripped output
+ should be located.
+ full_args: list of string, Full argument list for the linker driver.
+
+ Returns:
+ list of string, Build step outputs.
+ """
+ if not len(unstripped_path_prefix):
+ raise ValueError('Unspecified unstripped output file')
+
+ linker_out = _FindLinkerOutput(full_args)
+ base = os.path.basename(linker_out)
+ unstripped_out = os.path.join(unstripped_path_prefix, base + '.unstripped')
+
+ shutil.copyfile(linker_out, unstripped_out)
+ return [unstripped_out]
+
+
+def RunStrip(strip_args_string, full_args):
+ """Linker driver action for -Wcrl,strip,<strip_arguments>.
+
+ Args:
+ strip_args_string: string, Comma-separated arguments for `strip`.
+ full_args: list of string, Full arguments for the linker driver.
+
+ Returns:
+ list of string, Build step outputs.
+ """
+ strip_command = ['xcrun', 'strip']
+ if len(strip_args_string) > 0:
+ strip_command += strip_args_string.split(',')
+ strip_command.append(_FindLinkerOutput(full_args))
+ subprocess.check_call(strip_command)
+ return []
+
+
+def _FindLinkerOutput(full_args):
+ """Finds the output of the linker by looking for the output flag in its
+ argument list. As this is a required linker argument, raises an error if it
+ cannot be found.
+ """
+ # The linker_driver.py script may be used to wrap either the compiler linker
+ # (uses -o to configure the output) or lipo (uses -output to configure the
+ # output). Since wrapping the compiler linker is the most likely possibility
+ # use try/except and fallback to checking for -output if -o is not found.
+ try:
+ output_flag_index = full_args.index('-o')
+ except ValueError:
+ output_flag_index = full_args.index('-output')
+ return full_args[output_flag_index + 1]
+
+
+def _RemovePath(path):
+ """Removes the file or directory at |path| if it exists."""
+ if os.path.exists(path):
+ if os.path.isdir(path):
+ shutil.rmtree(path)
+ else:
+ os.unlink(path)
+
+
+_LINKER_DRIVER_ARG_PREFIX = '-Wcrl,'
+
+"""List of linker driver actions. The sort order of this list affects the
+order in which the actions are invoked. The first item in the tuple is the
+argument's -Wcrl,<sub_argument> and the second is the function to invoke.
+"""
+_LINKER_DRIVER_ACTIONS = [
+ ('dsymutilpath,', SetDsymutilPath),
+ ('dsym,', RunDsymUtil),
+ ('unstripped,', RunSaveUnstripped),
+ ('strip,', RunStrip),
+]
+
+
+if __name__ == '__main__':
+ Main(sys.argv)
+ sys.exit(0)
diff --git a/deps/v8/build/toolchain/nacl/BUILD.gn b/deps/v8/build/toolchain/nacl/BUILD.gn
new file mode 100644
index 0000000000..85e284948c
--- /dev/null
+++ b/deps/v8/build/toolchain/nacl/BUILD.gn
@@ -0,0 +1,266 @@
+# Copyright (c) 2014 The Native Client Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/nacl/config.gni")
+import("//build/config/sysroot.gni")
+import("//build/toolchain/nacl_toolchain.gni")
+
+# Add the toolchain revision as a preprocessor define so that sources are
+# rebuilt when a toolchain is updated.
+# Idea we could use the toolchain deps feature, but currently that feature is
+# bugged and does not trigger a rebuild.
+# https://code.google.com/p/chromium/issues/detail?id=431880
+# Calls to get the toolchain revision are relatively slow, so do them all in a
+# single batch to amortize python startup, etc.
+revisions = exec_script("//native_client/build/get_toolchain_revision.py",
+ [
+ "nacl_x86_glibc",
+ "nacl_arm_glibc",
+ "pnacl_newlib",
+ ],
+ "trim list lines")
+nacl_x86_glibc_rev = revisions[0]
+nacl_arm_glibc_rev = revisions[1]
+
+pnacl_newlib_rev = revisions[2]
+
+if (host_os == "win") {
+ toolsuffix = ".exe"
+} else {
+ toolsuffix = ""
+}
+
+# The PNaCl toolchain tools are all wrapper scripts rather than binary
+# executables. On POSIX systems, nobody cares what kind of executable
+# file you are. But on Windows, scripts (.bat files) cannot be run
+# directly and need the Windows shell (cmd.exe) specified explicily.
+if (host_os == "win") {
+ # NOTE! The //build/toolchain/gcc_*_wrapper.py scripts recognize
+ # this exact prefix string, so they must be updated if this string
+ # is changed in any way.
+ scriptprefix = "cmd /c call "
+ scriptsuffix = ".bat"
+} else {
+ scriptprefix = ""
+ scriptsuffix = ""
+}
+
+# When the compilers are run via goma or ccache rather than directly by
+# GN/Ninja, the goma/ccache wrapper handles .bat files but gets confused
+# by being given the scriptprefix.
+if (host_os == "win" && !use_goma && cc_wrapper == "") {
+ compiler_scriptprefix = scriptprefix
+} else {
+ compiler_scriptprefix = ""
+}
+
+template("pnacl_toolchain") {
+ assert(defined(invoker.executable_extension),
+ "Must define executable_extension")
+
+ nacl_toolchain(target_name) {
+ toolchain_package = "pnacl_newlib"
+ toolchain_revision = pnacl_newlib_rev
+ toolprefix =
+ rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/pnacl-",
+ root_build_dir)
+
+ cc = compiler_scriptprefix + toolprefix + "clang" + scriptsuffix
+ cxx = compiler_scriptprefix + toolprefix + "clang++" + scriptsuffix
+ ar = toolprefix + "ar" + scriptsuffix
+ readelf = scriptprefix + toolprefix + "readelf" + scriptsuffix
+ nm = scriptprefix + toolprefix + "nm" + scriptsuffix
+ if (defined(invoker.strip)) {
+ strip = scriptprefix + toolprefix + invoker.strip + scriptsuffix
+ }
+ forward_variables_from(invoker,
+ [
+ "executable_extension",
+ "is_clang_analysis_supported",
+ ])
+
+ # Note this is not the usual "ld = cxx" because "ld" uses are
+ # never run via goma, so this needs scriptprefix.
+ ld = scriptprefix + toolprefix + "clang++" + scriptsuffix
+
+ toolchain_args = {
+ is_clang = true
+ current_cpu = "pnacl"
+ use_lld = false
+ }
+ }
+}
+
+pnacl_toolchain("newlib_pnacl") {
+ executable_extension = ".pexe"
+
+ # The pnacl-finalize tool turns a .pexe.debug file into a .pexe file.
+ # It's very similar in purpose to the traditional "strip" utility: it
+ # turns what comes out of the linker into what you actually want to
+ # distribute and run. PNaCl doesn't have a "strip"-like utility that
+ # you ever actually want to use other than pnacl-finalize, so just
+ # make pnacl-finalize the strip tool rather than adding an additional
+ # step like "postlink" to run pnacl-finalize.
+ strip = "finalize"
+}
+
+pnacl_toolchain("newlib_pnacl_nonsfi") {
+ executable_extension = ""
+ strip = "strip"
+}
+
+template("nacl_glibc_toolchain") {
+ toolchain_cpu = target_name
+ assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple")
+ assert(defined(invoker.toolchain_package), "Must define toolchain_package")
+ assert(defined(invoker.toolchain_revision), "Must define toolchain_revision")
+ forward_variables_from(invoker,
+ [
+ "toolchain_package",
+ "toolchain_revision",
+ ])
+
+ toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" +
+ invoker.toolchain_tuple + "-",
+ root_build_dir)
+
+ nacl_toolchain("glibc_" + toolchain_cpu) {
+ cc = toolprefix + "gcc" + toolsuffix
+ cxx = toolprefix + "g++" + toolsuffix
+ ar = toolprefix + "ar" + toolsuffix
+ ld = cxx
+ readelf = toolprefix + "readelf" + toolsuffix
+ nm = toolprefix + "nm" + toolsuffix
+ strip = toolprefix + "strip" + toolsuffix
+
+ toolchain_args = {
+ current_cpu = toolchain_cpu
+ is_clang = false
+ is_nacl_glibc = true
+ use_lld = false
+ }
+ }
+}
+
+nacl_glibc_toolchain("x86") {
+ toolchain_package = "nacl_x86_glibc"
+ toolchain_revision = nacl_x86_glibc_rev
+
+ # Rely on the :compiler_cpu_abi config adding the -m32 flag here rather
+ # than using the i686-nacl binary directly. This is a because i686-nacl-gcc
+ # is a shell script wrapper around x86_64-nacl-gcc and goma has trouble with
+ # compiler executables that are shell scripts (so the i686 'compiler' is not
+ # currently in goma).
+ toolchain_tuple = "x86_64-nacl"
+}
+
+nacl_glibc_toolchain("x64") {
+ toolchain_package = "nacl_x86_glibc"
+ toolchain_revision = nacl_x86_glibc_rev
+ toolchain_tuple = "x86_64-nacl"
+}
+
+nacl_glibc_toolchain("arm") {
+ toolchain_package = "nacl_arm_glibc"
+ toolchain_revision = nacl_arm_glibc_rev
+ toolchain_tuple = "arm-nacl"
+}
+
+template("nacl_clang_toolchain") {
+ toolchain_cpu = target_name
+ assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple")
+
+ toolchain_package = "pnacl_newlib"
+ toolchain_revision = pnacl_newlib_rev
+ toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" +
+ invoker.toolchain_tuple + "-",
+ root_build_dir)
+
+ nacl_toolchain("clang_newlib_" + toolchain_cpu) {
+ cc = toolprefix + "clang" + toolsuffix
+ cxx = toolprefix + "clang++" + toolsuffix
+ ar = toolprefix + "ar" + toolsuffix
+ ld = cxx
+ readelf = toolprefix + "readelf" + toolsuffix
+ nm = toolprefix + "nm" + toolsuffix
+ strip = toolprefix + "strip" + toolsuffix
+
+ toolchain_args = {
+ current_cpu = toolchain_cpu
+ is_clang = true
+ use_lld = false
+ }
+ }
+}
+
+template("nacl_irt_toolchain") {
+ toolchain_cpu = target_name
+ assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple")
+
+ toolchain_package = "pnacl_newlib"
+ toolchain_revision = pnacl_newlib_rev
+ toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" +
+ invoker.toolchain_tuple + "-",
+ root_build_dir)
+
+ link_irt = rebase_path("//native_client/build/link_irt.py", root_build_dir)
+
+ tls_edit_label =
+ "//native_client/src/tools/tls_edit:tls_edit($host_toolchain)"
+ host_toolchain_out_dir =
+ rebase_path(get_label_info(tls_edit_label, "root_out_dir"),
+ root_build_dir)
+ tls_edit = "${host_toolchain_out_dir}/tls_edit"
+
+ nacl_toolchain("irt_" + toolchain_cpu) {
+ cc = toolprefix + "clang" + toolsuffix
+ cxx = toolprefix + "clang++" + toolsuffix
+ ar = toolprefix + "ar" + toolsuffix
+ readelf = toolprefix + "readelf" + toolsuffix
+ nm = toolprefix + "nm" + toolsuffix
+ strip = toolprefix + "strip" + toolsuffix
+
+ # Some IRT implementations (notably, Chromium's) contain C++ code,
+ # so we need to link w/ the C++ linker.
+ ld = "${python_path} ${link_irt} --tls-edit=${tls_edit} --link-cmd=${cxx} --readelf-cmd=${readelf}"
+
+ toolchain_args = {
+ current_cpu = toolchain_cpu
+ is_clang = true
+ use_lld = false
+ }
+
+ # TODO(ncbray): depend on link script
+ deps = [
+ tls_edit_label,
+ ]
+ }
+}
+
+template("nacl_clang_toolchains") {
+ assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple")
+ nacl_clang_toolchain(target_name) {
+ toolchain_tuple = invoker.toolchain_tuple
+ }
+ nacl_irt_toolchain(target_name) {
+ toolchain_tuple = invoker.toolchain_tuple
+ }
+}
+
+nacl_clang_toolchains("x86") {
+ # Rely on :compiler_cpu_abi adding -m32. See nacl_x86_glibc above.
+ toolchain_tuple = "x86_64-nacl"
+}
+
+nacl_clang_toolchains("x64") {
+ toolchain_tuple = "x86_64-nacl"
+}
+
+nacl_clang_toolchains("arm") {
+ toolchain_tuple = "arm-nacl"
+}
+
+nacl_clang_toolchains("mipsel") {
+ toolchain_tuple = "mipsel-nacl"
+}
diff --git a/deps/v8/build/toolchain/nacl_toolchain.gni b/deps/v8/build/toolchain/nacl_toolchain.gni
new file mode 100644
index 0000000000..11404e1e20
--- /dev/null
+++ b/deps/v8/build/toolchain/nacl_toolchain.gni
@@ -0,0 +1,59 @@
+# Copyright (c) 2014 The Native Client Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/nacl/config.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+
+# This template defines a NaCl toolchain.
+#
+# It requires the following variables specifying the executables to run:
+# - cc
+# - cxx
+# - ar
+# - ld
+
+template("nacl_toolchain") {
+ assert(defined(invoker.cc), "nacl_toolchain() must specify a \"cc\" value")
+ assert(defined(invoker.cxx), "nacl_toolchain() must specify a \"cxx\" value")
+ assert(defined(invoker.ar), "nacl_toolchain() must specify a \"ar\" value")
+ assert(defined(invoker.ld), "nacl_toolchain() must specify a \"ld\" value")
+ gcc_toolchain(target_name) {
+ if (defined(invoker.executable_extension)) {
+ executable_extension = invoker.executable_extension
+ } else {
+ executable_extension = ".nexe"
+ }
+ rebuild_define = "NACL_TC_REV=" + invoker.toolchain_revision
+
+ forward_variables_from(invoker,
+ [
+ "ar",
+ "cc",
+ "cxx",
+ "deps",
+ "ld",
+ "link_outputs",
+ "nm",
+ "readelf",
+ "strip",
+ ])
+
+ toolchain_args = {
+ # Use all values set on the invoker's toolchain_args.
+ forward_variables_from(invoker.toolchain_args, "*")
+
+ current_os = "nacl"
+
+ # We do not support component builds with the NaCl toolchains.
+ is_component_build = false
+
+ # We do not support tcmalloc in the NaCl toolchains.
+ use_allocator = "none"
+
+ # We do not support clang code coverage in the NaCl toolchains.
+ use_clang_coverage = false
+ coverage_instrumentation_input_file = ""
+ }
+ }
+}
diff --git a/deps/v8/build/toolchain/toolchain.gni b/deps/v8/build/toolchain/toolchain.gni
new file mode 100644
index 0000000000..9a13d296fa
--- /dev/null
+++ b/deps/v8/build/toolchain/toolchain.gni
@@ -0,0 +1,102 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Toolchain-related configuration that may be needed outside the context of the
+# toolchain() rules themselves.
+
+import("//build/config/chrome_build.gni")
+import("//build_overrides/build.gni")
+
+declare_args() {
+ # If this is set to true, or if LLVM_FORCE_HEAD_REVISION is set to 1
+ # in the environment, we use the revision in the llvm repo to determine
+ # the CLANG_REVISION to use, instead of the version hard-coded into
+ # //tools/clang/scripts/update.py. This should only be used in
+ # conjunction with setting LLVM_FORCE_HEAD_REVISION in the
+ # environment when `gclient runhooks` is run as well.
+ llvm_force_head_revision = false
+
+ # Compile with Xcode version of clang instead of hermetic version shipped
+ # with the build. Used on iOS to ship official builds (as they are built
+ # with the version of clang shipped with Xcode).
+ use_xcode_clang = is_ios && is_official_build
+
+ # Used for binary size analysis.
+ generate_linker_map = is_android && is_official_build
+}
+
+if (generate_linker_map) {
+ assert(
+ is_official_build,
+ "Linker map files should only be generated when is_official_build = true")
+ assert(current_os == "android" || target_os == "linux",
+ "Linker map files should only be generated for Android and Linux")
+}
+
+# The path to the hermetic install of Xcode. Only relevant when
+# use_system_xcode = false.
+hermetic_xcode_path =
+ rebase_path("//build/${target_os}_files/Xcode.app", "", root_build_dir)
+
+declare_args() {
+ if (is_clang) {
+ # Clang compiler version. Clang files are placed at version-dependent paths.
+ clang_version = "9.0.0"
+ }
+}
+
+# Check target_os here instead of is_ios as this file is loaded for secondary
+# toolchain (host toolchain in particular) but the argument is the same for
+# all toolchains.
+assert(!use_xcode_clang || target_os == "ios",
+ "Using Xcode's clang is only supported in iOS builds")
+
+# Extension for shared library files (including leading dot).
+if (is_mac || is_ios) {
+ shlib_extension = ".dylib"
+} else if (is_android && is_component_build) {
+ # By appending .cr, we prevent name collisions with libraries already
+ # loaded by the Android zygote.
+ shlib_extension = ".cr.so"
+} else if (is_posix || is_fuchsia) {
+ shlib_extension = ".so"
+} else if (is_win) {
+ shlib_extension = ".dll"
+} else {
+ assert(false, "Platform not supported")
+}
+
+# Prefix for shared library files.
+if (is_posix || is_fuchsia) {
+ shlib_prefix = "lib"
+} else {
+ shlib_prefix = ""
+}
+
+# Directory for shared library files.
+if (is_fuchsia) {
+ shlib_subdir = "/lib"
+} else {
+ shlib_subdir = ""
+}
+
+# While other "tool"s in a toolchain are specific to the target of that
+# toolchain, the "stamp" and "copy" tools are really generic to the host;
+# but each toolchain must define them separately. GN doesn't allow a
+# template instantiation inside a toolchain definition, so some boilerplate
+# has to be repeated in each toolchain to define these two tools. These
+# four variables reduce the duplication in that boilerplate.
+stamp_description = "STAMP {{output}}"
+copy_description = "COPY {{source}} {{output}}"
+if (host_os == "win") {
+ _tool_wrapper_path =
+ rebase_path("//build/toolchain/win/tool_wrapper.py", root_build_dir)
+
+ stamp_command = "cmd /c type nul > \"{{output}}\""
+ copy_command =
+ "$python_path $_tool_wrapper_path recursive-mirror {{source}} {{output}}"
+} else {
+ stamp_command = "touch {{output}}"
+ copy_command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})"
+}
diff --git a/deps/v8/build/toolchain/win/BUILD.gn b/deps/v8/build/toolchain/win/BUILD.gn
new file mode 100644
index 0000000000..478a98774b
--- /dev/null
+++ b/deps/v8/build/toolchain/win/BUILD.gn
@@ -0,0 +1,496 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/win/visual_studio_version.gni")
+import("//build/toolchain/cc_wrapper.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+
+# Should only be running on Windows.
+assert(is_win)
+
+# Setup the Visual Studio state.
+#
+# Its arguments are the VS path and the compiler wrapper tool. It will write
+# "environment.x86" and "environment.x64" to the build directory and return a
+# list to us.
+
+# This tool will is used as a wrapper for various commands below.
+tool_wrapper_path = rebase_path("tool_wrapper.py", root_build_dir)
+
+if (use_goma) {
+ if (host_os == "win") {
+ goma_prefix = "$goma_dir/gomacc.exe "
+ } else {
+ goma_prefix = "$goma_dir/gomacc "
+ }
+ clang_prefix = goma_prefix
+} else {
+ goma_prefix = ""
+ if (cc_wrapper != "") {
+ clang_prefix = cc_wrapper + " "
+ } else {
+ clang_prefix = ""
+ }
+}
+
+# Copy the VS runtime DLL for the default toolchain to the root build directory
+# so things will run.
+if (current_toolchain == default_toolchain) {
+ if (is_debug) {
+ configuration_name = "Debug"
+ } else {
+ configuration_name = "Release"
+ }
+ exec_script("../../vs_toolchain.py",
+ [
+ "copy_dlls",
+ rebase_path(root_build_dir),
+ configuration_name,
+ target_cpu,
+ ])
+}
+
+if (host_os == "win") {
+ clang_cl = "clang-cl.exe"
+} else {
+ clang_cl = "clang-cl"
+}
+
+# Parameters:
+# environment: File name of environment file.
+#
+# You would also define a toolchain_args variable with at least these set:
+# current_cpu: current_cpu to pass as a build arg
+# current_os: current_os to pass as a build arg
+template("msvc_toolchain") {
+ toolchain(target_name) {
+ # When invoking this toolchain not as the default one, these args will be
+ # passed to the build. They are ignored when this is the default toolchain.
+ assert(defined(invoker.toolchain_args))
+ toolchain_args = {
+ if (defined(invoker.toolchain_args)) {
+ forward_variables_from(invoker.toolchain_args, "*")
+ }
+
+ # This value needs to be passed through unchanged.
+ host_toolchain = host_toolchain
+ }
+
+ # Make these apply to all tools below.
+ lib_switch = ""
+ lib_dir_switch = "/LIBPATH:"
+
+ # Object files go in this directory.
+ object_subdir = "{{target_out_dir}}/{{label_name}}"
+
+ env = invoker.environment
+
+ cl = invoker.cl
+
+ if (use_lld) {
+ if (host_os == "win") {
+ lld_link = "lld-link.exe"
+ } else {
+ lld_link = "lld-link"
+ }
+ prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+
+ # lld-link includes a replacement for lib.exe that can produce thin
+ # archives and understands bitcode (for lto builds).
+ lib = "$prefix/$lld_link /lib"
+ link = "$prefix/$lld_link"
+ if (host_os != "win") {
+ # See comment adding --rsp-quoting to $cl above for more information.
+ link = "$link --rsp-quoting=posix"
+ }
+ } else {
+ lib = "lib.exe"
+ link = "link.exe"
+ }
+
+ # If possible, pass system includes as flags to the compiler. When that's
+ # not possible, load a full environment file (containing %INCLUDE% and
+ # %PATH%) -- e.g. 32-bit MSVS builds require %PATH% to be set and just
+ # passing in a list of include directories isn't enough.
+ if (defined(invoker.sys_include_flags)) {
+ env_wrapper = ""
+ sys_include_flags = "${invoker.sys_include_flags} " # Note trailing space.
+ } else {
+ # clang-cl doesn't need this env hoop, so omit it there.
+ assert((defined(toolchain_args.is_clang) && !toolchain_args.is_clang) ||
+ !is_clang)
+ env_wrapper = "ninja -t msvc -e $env -- " # Note trailing space.
+ sys_include_flags = ""
+ }
+
+ # ninja does not have -t msvc other than windows, and lld doesn't depend on
+ # mt.exe in PATH on non-Windows, so it's not needed there anyways.
+ if (defined(invoker.sys_lib_flags)) {
+ linker_wrapper = ""
+ sys_lib_flags = "${invoker.sys_lib_flags} " # Note trailing space
+ } else if (use_lld) {
+ # Invoke ninja as wrapper instead of tool wrapper, because python
+ # invocation requires higher cpu usage compared to ninja invocation, and
+ # the python wrapper is only needed to work around link.exe problems.
+ # TODO(thakis): Remove wrapper once lld-link can merge manifests without
+ # relying on mt.exe being in %PATH% on Windows, https://crbug.com/872740
+ linker_wrapper = "ninja -t msvc -e $env -- " # Note trailing space.
+ sys_lib_flags = ""
+ } else {
+ linker_wrapper =
+ "$python_path $tool_wrapper_path link-wrapper $env False " # Note trailing space.
+ sys_lib_flags = ""
+ }
+
+ tool("cc") {
+ precompiled_header_type = "msvc"
+ pdbname = "{{target_out_dir}}/{{label_name}}_c.pdb"
+
+ # Label names may have spaces in them so the pdbname must be quoted. The
+ # source and output don't need to be quoted because GN knows they're a
+ # full file name and will quote automatically when necessary.
+ depsformat = "msvc"
+ description = "CC {{output}}"
+ outputs = [
+ "$object_subdir/{{source_name_part}}.obj",
+ ]
+
+ command = "$env_wrapper$cl /nologo /showIncludes $sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} /c {{source}} /Fo{{output}} /Fd\"$pdbname\""
+ }
+
+ tool("cxx") {
+ precompiled_header_type = "msvc"
+
+ # The PDB name needs to be different between C and C++ compiled files.
+ pdbname = "{{target_out_dir}}/{{label_name}}_cc.pdb"
+
+ # See comment in CC tool about quoting.
+ depsformat = "msvc"
+ description = "CXX {{output}}"
+ outputs = [
+ "$object_subdir/{{source_name_part}}.obj",
+ ]
+
+ command = "$env_wrapper$cl /nologo /showIncludes $sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} /c {{source}} /Fo{{output}} /Fd\"$pdbname\""
+ }
+
+ tool("rc") {
+ command = "$python_path $tool_wrapper_path rc-wrapper $env rc.exe /nologo {{defines}} {{include_dirs}} /fo{{output}} {{source}}"
+ depsformat = "msvc"
+ outputs = [
+ "$object_subdir/{{source_name_part}}.res",
+ ]
+ description = "RC {{output}}"
+ }
+
+ tool("asm") {
+ if (toolchain_args.current_cpu == "arm64") {
+ prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+ ml = "${clang_prefix}${prefix}/${clang_cl} --target=arm64-windows"
+ ml += " -c -o{{output}}"
+ } else {
+ if (toolchain_args.current_cpu == "x64") {
+ ml = "ml64.exe"
+ } else {
+ ml = "ml.exe"
+ }
+ ml += " /nologo /c /Fo{{output}}"
+ if (use_lld) {
+ # Wrap ml(64).exe with a script that makes its output deterministic.
+ # It's lld only because the script zaps obj Timestamp which
+ # link.exe /incremental looks at.
+ # TODO(https://crbug.com/762167): If we end up writing an llvm-ml64,
+ # make sure it has deterministic output (maybe with /Brepro or
+ # something) and remove this wrapper.
+ ml_py = rebase_path("ml.py", root_build_dir)
+ ml = "$python_path $ml_py $ml"
+ }
+ }
+ command = "$python_path $tool_wrapper_path asm-wrapper $env $ml {{defines}} {{include_dirs}} {{asmflags}} {{source}}"
+ description = "ASM {{output}}"
+ outputs = [
+ "$object_subdir/{{source_name_part}}.obj",
+ ]
+ }
+
+ tool("alink") {
+ rspfile = "{{output}}.rsp"
+ command = "$linker_wrapper$lib /nologo ${sys_lib_flags}{{arflags}} /OUT:{{output}} @$rspfile"
+ description = "LIB {{output}}"
+ outputs = [
+ # Ignore {{output_extension}} and always use .lib, there's no reason to
+ # allow targets to override this extension on Windows.
+ "{{output_dir}}/{{target_output_name}}.lib",
+ ]
+ default_output_extension = ".lib"
+ default_output_dir = "{{target_out_dir}}"
+
+ # The use of inputs_newline is to work around a fixed per-line buffer
+ # size in the linker.
+ rspfile_content = "{{inputs_newline}}"
+ }
+
+ tool("solink") {
+ dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" # e.g. foo.dll
+ libname = "${dllname}.lib" # e.g. foo.dll.lib
+ pdbname = "${dllname}.pdb"
+ rspfile = "${dllname}.rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ command = "$linker_wrapper$link /nologo ${sys_lib_flags}/IMPLIB:$libname /DLL /OUT:$dllname /PDB:$pdbname @$rspfile"
+
+ default_output_extension = ".dll"
+ default_output_dir = "{{root_out_dir}}"
+ description = "LINK(DLL) {{output}}"
+ outputs = [
+ dllname,
+ libname,
+ ]
+ link_output = libname
+ depend_output = libname
+ runtime_outputs = [ dllname ]
+ if (symbol_level != 0) {
+ outputs += [ pdbname ]
+ runtime_outputs += [ pdbname ]
+ }
+
+ # Since the above commands only updates the .lib file when it changes, ask
+ # Ninja to check if the timestamp actually changed to know if downstream
+ # dependencies should be recompiled.
+ restat = true
+
+ # The use of inputs_newline is to work around a fixed per-line buffer
+ # size in the linker.
+ rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}"
+ }
+
+ tool("solink_module") {
+ dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" # e.g. foo.dll
+ pdbname = "${dllname}.pdb"
+ rspfile = "${dllname}.rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ command = "$linker_wrapper$link /nologo ${sys_lib_flags}/DLL /OUT:$dllname /PDB:$pdbname @$rspfile"
+
+ default_output_extension = ".dll"
+ default_output_dir = "{{root_out_dir}}"
+ description = "LINK_MODULE(DLL) {{output}}"
+ outputs = [
+ dllname,
+ ]
+ if (symbol_level != 0) {
+ outputs += [ pdbname ]
+ }
+ runtime_outputs = outputs
+
+ # The use of inputs_newline is to work around a fixed per-line buffer
+ # size in the linker.
+ rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}"
+ }
+
+ tool("link") {
+ exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ pdbname = "$exename.pdb"
+ rspfile = "$exename.rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ command = "$linker_wrapper$link /nologo ${sys_lib_flags}/OUT:$exename /PDB:$pdbname @$rspfile"
+
+ default_output_extension = ".exe"
+ default_output_dir = "{{root_out_dir}}"
+ description = "LINK {{output}}"
+ outputs = [
+ exename,
+ ]
+ if (symbol_level != 0) {
+ outputs += [ pdbname ]
+ }
+ runtime_outputs = outputs
+
+ # The use of inputs_newline is to work around a fixed per-line buffer
+ # size in the linker.
+ rspfile_content = "{{inputs_newline}} {{libs}} {{solibs}} {{ldflags}}"
+ }
+
+ # These two are really entirely generic, but have to be repeated in
+ # each toolchain because GN doesn't allow a template to be used here.
+ # See //build/toolchain/toolchain.gni for details.
+ tool("stamp") {
+ command = stamp_command
+ description = stamp_description
+ pool = "//build/toolchain:action_pool($default_toolchain)"
+ }
+ tool("copy") {
+ command = copy_command
+ description = copy_description
+ pool = "//build/toolchain:action_pool($default_toolchain)"
+ }
+
+ tool("action") {
+ pool = "//build/toolchain:action_pool($default_toolchain)"
+ }
+ }
+}
+
+if (target_cpu == "x86" || target_cpu == "x64") {
+ win_build_host_cpu = target_cpu
+} else {
+ win_build_host_cpu = host_cpu
+}
+
+# x86, arm and arm64 build cpu toolchains for Windows (not WinUWP). Only
+# define when the build cpu is one of these architectures since we don't
+# do any cross compiles when targeting x64-bit (the build does generate
+# some 64-bit stuff from x86/arm/arm64 target builds).
+if (win_build_host_cpu != "x64") {
+ build_cpu_toolchain_data = exec_script("setup_toolchain.py",
+ [
+ visual_studio_path,
+ windows_sdk_path,
+ visual_studio_runtime_dirs,
+ host_os,
+ win_build_host_cpu,
+ "environment." + win_build_host_cpu,
+ ],
+ "scope")
+
+ msvc_toolchain(win_build_host_cpu) {
+ environment = "environment." + win_build_host_cpu
+ cl = "${goma_prefix}\"${build_cpu_toolchain_data.vc_bin_dir}/cl.exe\""
+ if (host_os != "win") {
+ # For win cross build.
+ sys_lib_flags = "${build_cpu_toolchain_data.libpath_flags}"
+ }
+ toolchain_args = {
+ current_os = "win"
+ current_cpu = win_build_host_cpu
+ is_clang = false
+ }
+ }
+
+ msvc_toolchain("win_clang_" + win_build_host_cpu) {
+ environment = "environment." + win_build_host_cpu
+ prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+ cl = "${clang_prefix}$prefix/${clang_cl}"
+ sys_include_flags = "${build_cpu_toolchain_data.include_flags_imsvc}"
+ if (host_os != "win") {
+ # For win cross build.
+ sys_lib_flags = "${build_cpu_toolchain_data.libpath_flags}"
+ }
+
+ toolchain_args = {
+ current_os = "win"
+ current_cpu = win_build_host_cpu
+ is_clang = true
+ }
+ }
+}
+
+# 64-bit toolchains, including x64 and arm64.
+template("win_64bit_toolchains") {
+ assert(defined(invoker.toolchain_arch))
+ toolchain_arch = invoker.toolchain_arch
+
+ win_64bit_toolchain_data = exec_script("setup_toolchain.py",
+ [
+ visual_studio_path,
+ windows_sdk_path,
+ visual_studio_runtime_dirs,
+ "win",
+ toolchain_arch,
+ "environment." + toolchain_arch,
+ ],
+ "scope")
+
+ msvc_toolchain(target_name) {
+ environment = "environment." + toolchain_arch
+ cl = "${goma_prefix}\"${win_64bit_toolchain_data.vc_bin_dir}/cl.exe\""
+ if (host_os != "win") {
+ # For win cross build
+ sys_lib_flags = "${win_64bit_toolchain_data.libpath_flags}"
+ }
+
+ toolchain_args = {
+ if (defined(invoker.toolchain_args)) {
+ forward_variables_from(invoker.toolchain_args, "*")
+ }
+ is_clang = false
+ current_os = "win"
+ current_cpu = toolchain_arch
+ }
+ }
+
+ msvc_toolchain("win_clang_" + target_name) {
+ environment = "environment." + toolchain_arch
+ prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+ cl = "${clang_prefix}$prefix/${clang_cl}"
+ sys_include_flags = "${win_64bit_toolchain_data.include_flags_imsvc}"
+ if (host_os != "win") {
+ # For win cross build
+ sys_lib_flags = "${win_64bit_toolchain_data.libpath_flags}"
+ }
+
+ toolchain_args = {
+ if (defined(invoker.toolchain_args)) {
+ forward_variables_from(invoker.toolchain_args, "*")
+ }
+ is_clang = true
+ current_os = "win"
+ current_cpu = toolchain_arch
+ }
+ }
+}
+
+win_64bit_toolchains("x64") {
+ toolchain_arch = "x64"
+}
+
+if (target_cpu == "arm64") {
+ win_64bit_toolchains("arm64") {
+ toolchain_arch = "arm64"
+ }
+}
+
+# The nacl_win64 toolchain is nearly identical to the plain x64 toolchain.
+# It's used solely for building nacl64.exe (//components/nacl/broker:nacl64).
+# The only reason it's a separate toolchain is so that it can force
+# is_component_build to false in the toolchain_args() block, because
+# building nacl64.exe in component style does not work.
+win_64bit_toolchains("nacl_win64") {
+ toolchain_arch = "x64"
+ toolchain_args = {
+ is_component_build = false
+ }
+}
+
+# WinUWP toolchains. Only define these when targeting them.
+
+if (target_os == "winuwp") {
+ assert(target_cpu == "x64" || target_cpu == "x86" || target_cpu == "arm" ||
+ target_cpu == "arm64")
+ store_cpu_toolchain_data = exec_script("setup_toolchain.py",
+ [
+ visual_studio_path,
+ windows_sdk_path,
+ visual_studio_runtime_dirs,
+ target_os,
+ target_cpu,
+ "environment.store_" + target_cpu,
+ ],
+ "scope")
+
+ msvc_toolchain("uwp_" + target_cpu) {
+ environment = "environment.store_" + target_cpu
+ cl = "${goma_prefix}\"${store_cpu_toolchain_data.vc_bin_dir}/cl.exe\""
+ toolchain_args = {
+ current_os = "winuwp"
+ current_cpu = target_cpu
+ is_clang = false
+ }
+ }
+}
diff --git a/deps/v8/build/toolchain/win/midl.gni b/deps/v8/build/toolchain/win/midl.gni
new file mode 100644
index 0000000000..b46f4cd538
--- /dev/null
+++ b/deps/v8/build/toolchain/win/midl.gni
@@ -0,0 +1,118 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_win)
+
+import("//build/config/win/visual_studio_version.gni")
+
+# This template defines a rule to invoke the MS IDL compiler. The generated
+# source code will be compiled and linked into targets that depend on this.
+#
+# Parameters
+#
+# sources
+# List of .idl file to process.
+#
+# header_file (optional)
+# File name of generated header file. Defaults to the basename of the
+# source idl file with a .h extension.
+#
+# out_dir (optional)
+# Directory to write the generated files to. Defaults to target_gen_dir.
+#
+# dynamic_guid (optional)
+# If the GUID is not constant across builds, the current GUID.
+#
+# deps (optional)
+# visibility (optional)
+
+template("midl") {
+ action_name = "${target_name}_idl_action"
+ source_set_name = target_name
+
+ assert(defined(invoker.sources), "Source must be defined for $target_name")
+
+ if (defined(invoker.out_dir)) {
+ out_dir = invoker.out_dir
+ } else {
+ out_dir = target_gen_dir
+ }
+
+ if (defined(invoker.dynamic_guid)) {
+ dynamic_guid = invoker.dynamic_guid
+ } else {
+ dynamic_guid = "none"
+ }
+
+ if (defined(invoker.header_file)) {
+ header_file = invoker.header_file
+ } else {
+ header_file = "{{source_name_part}}.h"
+ }
+
+ dlldata_file = "{{source_name_part}}.dlldata.c"
+ interface_identifier_file = "{{source_name_part}}_i.c"
+ proxy_file = "{{source_name_part}}_p.c"
+ type_library_file = "{{source_name_part}}.tlb"
+
+ action_foreach(action_name) {
+ visibility = [ ":$source_set_name" ]
+ script = "//build/toolchain/win/midl.py"
+
+ sources = invoker.sources
+
+ # Note that .tlb is not included in the outputs as it is not always
+ # generated depending on the content of the input idl file.
+ outputs = [
+ "$out_dir/$header_file",
+ "$out_dir/$dlldata_file",
+ "$out_dir/$interface_identifier_file",
+ "$out_dir/$proxy_file",
+ ]
+
+ if (current_cpu == "x86") {
+ win_tool_arch = "environment.x86"
+ idl_target_platform = "win32"
+ } else if (current_cpu == "x64") {
+ win_tool_arch = "environment.x64"
+ idl_target_platform = "x64"
+ } else if (current_cpu == "arm64") {
+ win_tool_arch = "environment.arm64"
+ idl_target_platform = "arm64"
+ } else {
+ assert(false, "Need environment for this arch")
+ }
+
+ args = [
+ win_tool_arch,
+ rebase_path(out_dir, root_build_dir),
+ dynamic_guid,
+ type_library_file,
+ header_file,
+ dlldata_file,
+ interface_identifier_file,
+ proxy_file,
+ "{{source}}",
+ "/char",
+ "signed",
+ "/env",
+ idl_target_platform,
+ "/Oicf",
+ ]
+
+ forward_variables_from(invoker, [ "deps" ])
+ }
+
+ source_set(target_name) {
+ forward_variables_from(invoker, [ "visibility" ])
+
+ # We only compile the IID files from the IDL tool rather than all outputs.
+ sources = process_file_template(invoker.sources,
+ [ "$out_dir/$interface_identifier_file" ])
+
+ public_deps = [
+ ":$action_name",
+ ]
+ }
+}
diff --git a/deps/v8/build/toolchain/win/midl.py b/deps/v8/build/toolchain/win/midl.py
new file mode 100644
index 0000000000..09fec0b8cf
--- /dev/null
+++ b/deps/v8/build/toolchain/win/midl.py
@@ -0,0 +1,238 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import array
+import difflib
+import distutils.dir_util
+import filecmp
+import operator
+import os
+import re
+import shutil
+import struct
+import subprocess
+import sys
+import tempfile
+import uuid
+
+
+def ZapTimestamp(filename):
+ contents = open(filename, 'rb').read()
+ # midl.exe writes timestamp 2147483647 (2^31 - 1) as creation date into its
+ # outputs, but using the local timezone. To make the output timezone-
+ # independent, replace that date with a fixed string of the same length.
+ # Also blank out the minor version number.
+ if filename.endswith('.tlb'):
+ # See https://chromium-review.googlesource.com/c/chromium/src/+/693223 for
+ # a fairly complete description of the .tlb binary format.
+ # TLB files start with a 54 byte header. Offset 0x20 stores how many types
+ # are defined in the file, and the header is followed by that many uint32s.
+ # After that, 15 section headers appear. Each section header is 16 bytes,
+ # starting with offset and length uint32s.
+ # Section 12 in the file contains custom() data. custom() data has a type
+ # (int, string, etc). Each custom data chunk starts with a uint16_t
+ # describing its type. Type 8 is string data, consisting of a uint32_t
+ # len, followed by that many data bytes, followed by 'W' bytes to pad to a
+ # 4 byte boundary. Type 0x13 is uint32 data, followed by 4 data bytes,
+ # followed by two 'W' to pad to a 4 byte boundary.
+ # The custom block always starts with one string containing "Created by
+ # MIDL version 8...", followed by one uint32 containing 0x7fffffff,
+ # followed by another uint32 containing the MIDL compiler version (e.g.
+ # 0x0801026e for v8.1.622 -- 0x26e == 622). These 3 fields take 0x54 bytes.
+ # There might be more custom data after that, but these 3 blocks are always
+ # there for file-level metadata.
+ # All data is little-endian in the file.
+ assert contents[0:8] == 'MSFT\x02\x00\x01\x00'
+ ntypes, = struct.unpack_from('<I', contents, 0x20)
+ custom_off, custom_len = struct.unpack_from(
+ '<II', contents, 0x54 + 4*ntypes + 11*16)
+ assert custom_len >= 0x54
+ # First: Type string (0x8), followed by 0x3e characters.
+ assert contents[custom_off:custom_off+6] == '\x08\x00\x3e\x00\x00\x00'
+ assert re.match(
+ 'Created by MIDL version 8\.\d\d\.\d{4} at ... Jan 1. ..:..:.. 2038\n',
+ contents[custom_off+6:custom_off+6+0x3e])
+ # Second: Type uint32 (0x13) storing 0x7fffffff (followed by WW / 0x57 pad)
+ assert contents[custom_off+6+0x3e:custom_off+6+0x3e+8] == \
+ '\x13\x00\xff\xff\xff\x7f\x57\x57'
+ # Third: Type uint32 (0x13) storing MIDL compiler version.
+ assert contents[custom_off+6+0x3e+8:custom_off+6+0x3e+8+2] == '\x13\x00'
+ # Replace "Created by" string with fixed string, and fixed MIDL version with
+ # 8.1.622 always.
+ contents = (contents[0:custom_off+6] +
+ 'Created by MIDL version 8.xx.xxxx at a redacted point in time\n' +
+ # uint32 (0x13) val 0x7fffffff, WW, uint32 (0x13), val 0x0801026e, WW
+ '\x13\x00\xff\xff\xff\x7f\x57\x57\x13\x00\x6e\x02\x01\x08\x57\x57' +
+ contents[custom_off + 0x54:])
+ else:
+ contents = re.sub(
+ 'File created by MIDL compiler version 8\.\d\d\.\d{4} \*/\r\n'
+ '/\* at ... Jan 1. ..:..:.. 2038',
+ 'File created by MIDL compiler version 8.xx.xxxx */\r\n'
+ '/* at a redacted point in time',
+ contents)
+ contents = re.sub(
+ ' Oicf, W1, Zp8, env=(.....) \(32b run\), '
+ 'target_arch=(AMD64|X86) 8\.\d\d\.\d{4}',
+ ' Oicf, W1, Zp8, env=\\1 (32b run), target_arch=\\2 8.xx.xxxx',
+ contents)
+ # TODO(thakis): If we need more hacks than these, try to verify checked-in
+ # outputs when we're using the hermetic toolchain.
+ # midl.exe older than 8.1.622 omit '//' after #endif, fix that:
+ contents = contents.replace('#endif !_MIDL_USE_GUIDDEF_',
+ '#endif // !_MIDL_USE_GUIDDEF_')
+ # midl.exe puts the midl version into code in one place. To have
+ # predictable output, lie about the midl version if it's not 8.1.622.
+ # This is unfortunate, but remember that there's beauty too in imperfection.
+ contents = contents.replace('0x801026c, /* MIDL Version 8.1.620 */',
+ '0x801026e, /* MIDL Version 8.1.622 */')
+ open(filename, 'wb').write(contents)
+
+
+def overwrite_cls_guid_h(h_file, dynamic_guid):
+ contents = open(h_file, 'rb').read()
+ contents = re.sub('class DECLSPEC_UUID\("[^"]*"\)',
+ 'class DECLSPEC_UUID("%s")' % str(dynamic_guid), contents)
+ open(h_file, 'wb').write(contents)
+
+
+def overwrite_cls_guid_iid(iid_file, dynamic_guid):
+ contents = open(iid_file, 'rb').read()
+ hexuuid = '0x%08x,0x%04x,0x%04x,' % dynamic_guid.fields[0:3]
+ hexuuid += ','.join('0x%02x' % ord(b) for b in dynamic_guid.bytes[8:])
+ contents = re.sub(r'MIDL_DEFINE_GUID\(CLSID, ([^,]*),[^)]*\)',
+ r'MIDL_DEFINE_GUID(CLSID, \1,%s)' % hexuuid, contents)
+ open(iid_file, 'wb').write(contents)
+
+
+def overwrite_cls_guid_tlb(tlb_file, dynamic_guid):
+ # See ZapTimestamp() for a short overview of the .tlb format. The 1st
+ # section contains type descriptions, and the first type should be our
+ # coclass. It points to the type's GUID in section 6, the GUID section.
+ contents = open(tlb_file, 'rb').read()
+ assert contents[0:8] == 'MSFT\x02\x00\x01\x00'
+ ntypes, = struct.unpack_from('<I', contents, 0x20)
+ type_off, type_len = struct.unpack_from('<II', contents, 0x54 + 4*ntypes)
+ assert ord(contents[type_off]) == 0x25, "expected coclass"
+ guidind = struct.unpack_from('<I', contents, type_off + 0x2c)[0]
+ guid_off, guid_len = struct.unpack_from(
+ '<II', contents, 0x54 + 4*ntypes + 5*16)
+ assert guidind + 14 <= guid_len
+ contents = array.array('c', contents)
+ struct.pack_into('<IHH8s', contents, guid_off + guidind,
+ *(dynamic_guid.fields[0:3] + (dynamic_guid.bytes[8:],)))
+ # The GUID is correct now, but there's also a GUID hashtable in section 5.
+ # Need to recreate that too. Since the hash table uses chaining, it's
+ # easiest to recompute it from scratch rather than trying to patch it up.
+ hashtab = [0xffffffff] * (0x80 / 4)
+ for guidind in range(guid_off, guid_off + guid_len, 24):
+ guidbytes, typeoff, nextguid = struct.unpack_from(
+ '<16sII', contents, guidind)
+ words = struct.unpack('<8H', guidbytes)
+ # midl seems to use the following simple hash function for GUIDs:
+ guidhash = reduce(operator.xor, [w for w in words]) % (0x80 / 4)
+ nextguid = hashtab[guidhash]
+ struct.pack_into('<I', contents, guidind + 0x14, nextguid)
+ hashtab[guidhash] = guidind - guid_off
+ hash_off, hash_len = struct.unpack_from(
+ '<II', contents, 0x54 + 4*ntypes + 4*16)
+ for i, hashval in enumerate(hashtab):
+ struct.pack_into('<I', contents, hash_off + 4*i, hashval)
+ open(tlb_file, 'wb').write(contents)
+
+
+def overwrite_cls_guid(h_file, iid_file, tlb_file, dynamic_guid):
+ # Fix up GUID in .h, _i.c, and .tlb. This currently assumes that there's
+ # only one coclass in the idl file, and that that's the type with the
+ # dynamic type.
+ overwrite_cls_guid_h(h_file, dynamic_guid)
+ overwrite_cls_guid_iid(iid_file, dynamic_guid)
+ overwrite_cls_guid_tlb(tlb_file, dynamic_guid)
+
+
+def main(arch, outdir, dynamic_guid, tlb, h, dlldata, iid, proxy, idl, *flags):
+ # Copy checked-in outputs to final location.
+ THIS_DIR = os.path.abspath(os.path.dirname(__file__))
+ source = os.path.join(THIS_DIR, '..', '..', '..',
+ 'third_party', 'win_build_output', outdir.replace('gen/', 'midl/'))
+ if os.path.isdir(os.path.join(source, os.path.basename(idl))):
+ source = os.path.join(source, os.path.basename(idl))
+ source = os.path.join(source, arch.split('.')[1]) # Append 'x86' or 'x64'.
+ source = os.path.normpath(source)
+ distutils.dir_util.copy_tree(source, outdir, preserve_times=False)
+ if dynamic_guid != 'none':
+ overwrite_cls_guid(os.path.join(outdir, h),
+ os.path.join(outdir, iid),
+ os.path.join(outdir, tlb),
+ uuid.UUID(dynamic_guid))
+
+ # On non-Windows, that's all we can do.
+ if sys.platform != 'win32':
+ return 0
+
+ # On Windows, run midl.exe on the input and check that its outputs are
+ # identical to the checked-in outputs (after possibly replacing their main
+ # class guid).
+ tmp_dir = tempfile.mkdtemp()
+ delete_tmp_dir = True
+
+ # Read the environment block from the file. This is stored in the format used
+ # by CreateProcess. Drop last 2 NULs, one for list terminator, one for
+ # trailing vs. separator.
+ env_pairs = open(arch).read()[:-2].split('\0')
+ env_dict = dict([item.split('=', 1) for item in env_pairs])
+
+ args = ['midl', '/nologo'] + list(flags) + [
+ '/out', tmp_dir,
+ '/tlb', tlb,
+ '/h', h,
+ '/dlldata', dlldata,
+ '/iid', iid,
+ '/proxy', proxy,
+ idl]
+ try:
+ popen = subprocess.Popen(args, shell=True, env=env_dict,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ out, _ = popen.communicate()
+ # Filter junk out of stdout, and write filtered versions. Output we want
+ # to filter is pairs of lines that look like this:
+ # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
+ # objidl.idl
+ lines = out.splitlines()
+ prefixes = ('Processing ', '64 bit Processing ')
+ processing = set(os.path.basename(x)
+ for x in lines if x.startswith(prefixes))
+ for line in lines:
+ if not line.startswith(prefixes) and line not in processing:
+ print line
+ if popen.returncode != 0:
+ return popen.returncode
+
+ for f in os.listdir(tmp_dir):
+ ZapTimestamp(os.path.join(tmp_dir, f))
+
+ # Now compare the output in tmp_dir to the copied-over outputs.
+ diff = filecmp.dircmp(tmp_dir, outdir)
+ if diff.diff_files:
+ print 'midl.exe output different from files in %s, see %s' \
+ % (outdir, tmp_dir)
+ for f in diff.diff_files:
+ if f.endswith('.tlb'): continue
+ fromfile = os.path.join(outdir, f)
+ tofile = os.path.join(tmp_dir, f)
+ print ''.join(difflib.unified_diff(open(fromfile, 'U').readlines(),
+ open(tofile, 'U').readlines(),
+ fromfile, tofile))
+ delete_tmp_dir = False
+ print 'To rebaseline:'
+ print ' copy /y %s\* %s' % (tmp_dir, source)
+ sys.exit(1)
+ return 0
+ finally:
+ if os.path.exists(tmp_dir) and delete_tmp_dir:
+ shutil.rmtree(tmp_dir)
+
+
+if __name__ == '__main__':
+ sys.exit(main(*sys.argv[1:]))
diff --git a/deps/v8/build/toolchain/win/ml.py b/deps/v8/build/toolchain/win/ml.py
new file mode 100755
index 0000000000..877c584c57
--- /dev/null
+++ b/deps/v8/build/toolchain/win/ml.py
@@ -0,0 +1,287 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wraps ml.exe or ml64.exe and postprocesses the output to be deterministic.
+Sets timestamp in .obj file to 0, hence incompatible with link.exe /incremental.
+
+Use by prefixing the ml(64).exe invocation with this script:
+ python ml.py ml.exe [args...]"""
+
+import array
+import collections
+import struct
+import subprocess
+import sys
+
+
+class Struct(object):
+ """A thin wrapper around the struct module that returns a namedtuple"""
+ def __init__(self, name, *args):
+ """Pass the name of the return type, and then an interleaved list of
+ format strings as used by the struct module and of field names."""
+ self.fmt = '<' + ''.join(args[0::2])
+ self.type = collections.namedtuple(name, args[1::2])
+
+ def pack_into(self, buffer, offset, data):
+ return struct.pack_into(self.fmt, buffer, offset, *data)
+
+ def unpack_from(self, buffer, offset=0):
+ return self.type(*struct.unpack_from(self.fmt, buffer, offset))
+
+ def size(self):
+ return struct.calcsize(self.fmt)
+
+
+def Subtract(nt, **kwargs):
+ """Subtract(nt, f=2) returns a new namedtuple with 2 subtracted from nt.f"""
+ return nt._replace(**{k: getattr(nt, k) - v for k, v in kwargs.iteritems()})
+
+
+def MakeDeterministic(objdata):
+ # Takes data produced by ml(64).exe (without any special flags) and
+ # 1. Sets the timestamp to 0
+ # 2. Strips the .debug$S section (which contains an unwanted absolute path)
+
+ # This makes several assumptions about ml's output:
+ # - Section data is in the same order as the corresponding section headers:
+ # section headers preceeding the .debug$S section header have their data
+ # preceeding the .debug$S section data; likewise for section headers
+ # following the .debug$S section.
+ # - The .debug$S section contains only the absolute path to the obj file and
+ # nothing else, in particular there's only a single entry in the symbol
+ # table refering to the .debug$S section.
+ # - There are no COFF line number entries.
+ # - There's no IMAGE_SYM_CLASS_CLR_TOKEN symbol.
+ # These seem to hold in practice; if they stop holding this script needs to
+ # become smarter.
+
+ objdata = array.array('c', objdata) # Writable, e.g. via struct.pack_into.
+
+ # Read coff header.
+ COFFHEADER = Struct('COFFHEADER',
+ 'H', 'Machine',
+ 'H', 'NumberOfSections',
+ 'I', 'TimeDateStamp',
+ 'I', 'PointerToSymbolTable',
+ 'I', 'NumberOfSymbols',
+
+ 'H', 'SizeOfOptionalHeader',
+ 'H', 'Characteristics')
+ coff_header = COFFHEADER.unpack_from(objdata)
+ assert coff_header.SizeOfOptionalHeader == 0 # Only set for binaries.
+
+ # Read section headers following coff header.
+ SECTIONHEADER = Struct('SECTIONHEADER',
+ '8s', 'Name',
+ 'I', 'VirtualSize',
+ 'I', 'VirtualAddress',
+
+ 'I', 'SizeOfRawData',
+ 'I', 'PointerToRawData',
+ 'I', 'PointerToRelocations',
+ 'I', 'PointerToLineNumbers',
+
+ 'H', 'NumberOfRelocations',
+ 'H', 'NumberOfLineNumbers',
+ 'I', 'Characteristics')
+ section_headers = []
+ debug_section_index = -1
+ for i in range(0, coff_header.NumberOfSections):
+ section_header = SECTIONHEADER.unpack_from(
+ objdata, offset=COFFHEADER.size() + i * SECTIONHEADER.size())
+ assert not section_header[0].startswith('/') # Support short names only.
+ section_headers.append(section_header)
+
+ if section_header.Name == '.debug$S':
+ assert debug_section_index == -1
+ debug_section_index = i
+ assert debug_section_index != -1
+
+ data_start = COFFHEADER.size() + len(section_headers) * SECTIONHEADER.size()
+
+ # Verify the .debug$S section looks like we expect.
+ assert section_headers[debug_section_index].Name == '.debug$S'
+ assert section_headers[debug_section_index].VirtualSize == 0
+ assert section_headers[debug_section_index].VirtualAddress == 0
+ debug_size = section_headers[debug_section_index].SizeOfRawData
+ debug_offset = section_headers[debug_section_index].PointerToRawData
+ assert section_headers[debug_section_index].PointerToRelocations == 0
+ assert section_headers[debug_section_index].PointerToLineNumbers == 0
+ assert section_headers[debug_section_index].NumberOfRelocations == 0
+ assert section_headers[debug_section_index].NumberOfLineNumbers == 0
+
+ # Make sure sections in front of .debug$S have their data preceeding it.
+ for header in section_headers[:debug_section_index]:
+ assert header.PointerToRawData < debug_offset
+ assert header.PointerToRelocations < debug_offset
+ assert header.PointerToLineNumbers < debug_offset
+
+ # Make sure sections after of .debug$S have their data following it.
+ for header in section_headers[debug_section_index + 1:]:
+ # Make sure the .debug$S data is at the very end of section data:
+ assert header.PointerToRawData > debug_offset
+ assert header.PointerToRelocations == 0
+ assert header.PointerToLineNumbers == 0
+
+ # Make sure the first non-empty section's data starts right after the section
+ # headers.
+ for section_header in section_headers:
+ if section_header.PointerToRawData == 0:
+ assert section_header.PointerToRelocations == 0
+ assert section_header.PointerToLineNumbers == 0
+ continue
+ assert section_header.PointerToRawData == data_start
+ break
+
+ # Make sure the symbol table (and hence, string table) appear after the last
+ # section:
+ assert (coff_header.PointerToSymbolTable >=
+ section_headers[-1].PointerToRawData + section_headers[-1].SizeOfRawData)
+
+ # The symbol table contains a symbol for the no-longer-present .debug$S
+ # section. If we leave it there, lld-link will complain:
+ #
+ # lld-link: error: .debug$S should not refer to non-existent section 5
+ #
+ # so we need to remove that symbol table entry as well. This shifts symbol
+ # entries around and we need to update symbol table indices in:
+ # - relocations
+ # - line number records (never present)
+ # - one aux symbol entries (never present in ml output)
+ SYM = Struct('SYM',
+ '8s', 'Name',
+ 'I', 'Value',
+ 'h', 'SectionNumber', # Note: Signed!
+ 'H', 'Type',
+
+ 'B', 'StorageClass',
+ 'B', 'NumberOfAuxSymbols')
+ i = 0
+ debug_sym = -1
+ while i < coff_header.NumberOfSymbols:
+ sym_offset = coff_header.PointerToSymbolTable + i * SYM.size()
+ sym = SYM.unpack_from(objdata, sym_offset)
+
+ # 107 is IMAGE_SYM_CLASS_CLR_TOKEN, which has aux entry "CLR Token
+ # Definition", which contains a symbol index. Check it's never present.
+ assert sym.StorageClass != 107
+
+ # Note: sym.SectionNumber is 1-based, debug_section_index is 0-based.
+ if sym.SectionNumber - 1 == debug_section_index:
+ assert debug_sym == -1, 'more than one .debug$S symbol found'
+ debug_sym = i
+ # Make sure the .debug$S symbol looks like we expect.
+ # In particular, it should have exactly one aux symbol.
+ assert sym.Name == '.debug$S'
+ assert sym.Value == 0
+ assert sym.Type == 0
+ assert sym.StorageClass == 3
+ assert sym.NumberOfAuxSymbols == 1
+ elif sym.SectionNumber > debug_section_index:
+ sym = Subtract(sym, SectionNumber=1)
+ SYM.pack_into(objdata, sym_offset, sym)
+ i += 1 + sym.NumberOfAuxSymbols
+ assert debug_sym != -1, '.debug$S symbol not found'
+
+ # Note: Usually the .debug$S section is the last, but for files saying
+ # `includelib foo.lib`, like safe_terminate_process.asm in 32-bit builds,
+ # this isn't true: .drectve is after .debug$S.
+
+ # Update symbol table indices in relocations.
+ # There are a few processor types that have one or two relocation types
+ # where SymbolTableIndex has a different meaning, but not for x86.
+ REL = Struct('REL',
+ 'I', 'VirtualAddress',
+ 'I', 'SymbolTableIndex',
+ 'H', 'Type')
+ for header in section_headers[0:debug_section_index]:
+ for j in range(0, header.NumberOfRelocations):
+ rel_offset = header.PointerToRelocations + j * REL.size()
+ rel = REL.unpack_from(objdata, rel_offset)
+ assert rel.SymbolTableIndex != debug_sym
+ if rel.SymbolTableIndex > debug_sym:
+ rel = Subtract(rel, SymbolTableIndex=2)
+ REL.pack_into(objdata, rel_offset, rel)
+
+ # Update symbol table indices in line numbers -- just check they don't exist.
+ for header in section_headers:
+ assert header.NumberOfLineNumbers == 0
+
+ # Now that all indices are updated, remove the symbol table entry refering to
+ # .debug$S and its aux entry.
+ del objdata[coff_header.PointerToSymbolTable + debug_sym * SYM.size():
+ coff_header.PointerToSymbolTable + (debug_sym + 2) * SYM.size()]
+
+ # Now we know that it's safe to write out the input data, with just the
+ # timestamp overwritten to 0, the last section header cut out (and the
+ # offsets of all other section headers decremented by the size of that
+ # one section header), and the last section's data cut out. The symbol
+ # table offset needs to be reduced by one section header and the size of
+ # the missing section.
+ # (The COFF spec only requires on-disk sections to be aligned in image files,
+ # for obj files it's not required. If that wasn't the case, deleting slices
+ # if data would not generally be safe.)
+
+ # Update section offsets and remove .debug$S section data.
+ for i in range(0, debug_section_index):
+ header = section_headers[i]
+ if header.SizeOfRawData:
+ header = Subtract(header, PointerToRawData=SECTIONHEADER.size())
+ if header.NumberOfRelocations:
+ header = Subtract(header, PointerToRelocations=SECTIONHEADER.size())
+ if header.NumberOfLineNumbers:
+ header = Subtract(header, PointerToLineNumbers=SECTIONHEADER.size())
+ SECTIONHEADER.pack_into(
+ objdata, COFFHEADER.size() + i * SECTIONHEADER.size(), header)
+ for i in range(debug_section_index + 1, len(section_headers)):
+ header = section_headers[i]
+ shift = SECTIONHEADER.size() + debug_size
+ if header.SizeOfRawData:
+ header = Subtract(header, PointerToRawData=shift)
+ if header.NumberOfRelocations:
+ header = Subtract(header, PointerToRelocations=shift)
+ if header.NumberOfLineNumbers:
+ header = Subtract(header, PointerToLineNumbers=shift)
+ SECTIONHEADER.pack_into(
+ objdata, COFFHEADER.size() + i * SECTIONHEADER.size(), header)
+
+ del objdata[debug_offset:debug_offset + debug_size]
+
+ # Finally, remove .debug$S section header and update coff header.
+ coff_header = coff_header._replace(TimeDateStamp=0)
+ coff_header = Subtract(coff_header,
+ NumberOfSections=1,
+ PointerToSymbolTable=SECTIONHEADER.size() + debug_size,
+ NumberOfSymbols=2)
+ COFFHEADER.pack_into(objdata, 0, coff_header)
+
+ del objdata[
+ COFFHEADER.size() + debug_section_index * SECTIONHEADER.size():
+ COFFHEADER.size() + (debug_section_index + 1) * SECTIONHEADER.size()]
+
+ # All done!
+ return objdata.tostring()
+
+
+def main():
+ ml_result = subprocess.call(sys.argv[1:])
+ if ml_result != 0:
+ return ml_result
+
+ objfile = None
+ for i in range(1, len(sys.argv)):
+ if sys.argv[i].startswith('/Fo'):
+ objfile = sys.argv[i][len('/Fo'):]
+ assert objfile, 'failed to find ml output'
+
+ with open(objfile, 'rb') as f:
+ objdata = f.read()
+ objdata = MakeDeterministic(objdata)
+ with open(objfile, 'wb') as f:
+ f.write(objdata)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/toolchain/win/rc/.gitignore b/deps/v8/build/toolchain/win/rc/.gitignore
new file mode 100644
index 0000000000..e8fc4d3e1f
--- /dev/null
+++ b/deps/v8/build/toolchain/win/rc/.gitignore
@@ -0,0 +1,3 @@
+linux64/rc
+mac/rc
+win/rc.exe
diff --git a/deps/v8/build/toolchain/win/rc/README.md b/deps/v8/build/toolchain/win/rc/README.md
new file mode 100644
index 0000000000..e6d38f9709
--- /dev/null
+++ b/deps/v8/build/toolchain/win/rc/README.md
@@ -0,0 +1,30 @@
+# rc
+
+This contains a cross-platform reimplementation of rc.exe.
+
+This exists mainly to compile .rc files on non-Windows hosts for cross builds.
+However, it also runs on Windows for two reasons:
+
+1. To compare the output of Microsoft's rc.exe and the reimplementation and to
+ check that they produce bitwise identical output.
+2. The reimplementation supports printing resource files in /showIncludes
+ output, which helps getting build dependencies right.
+
+The resource compiler consists of two parts:
+
+1. A python script rc.py that serves as the driver. It does unicode
+ conversions, runs the input through the preprocessor, and then calls the
+ actual resource compiler.
+2. The resource compiler, a C++ binary obtained via sha1 files from Google
+ Storage. The binary's code currenty lives at
+ https://github.com/nico/hack/tree/master/res, even though work is (slowly)
+ underway to upstream it into LLVM.
+
+To update the rc binary, run `upload_rc_binaries.sh` in this directory, on a
+Mac.
+
+rc isn't built from source as part of the regular chrome build because
+it's needed in a gn toolchain tool, and these currently cannot have deps.
+Alternatively, gn could be taught about deps on tools, or rc invocations could
+be not a tool but a template like e.g. yasm invocations (which can have deps),
+then the prebuilt binaries wouldn't be needed.
diff --git a/deps/v8/build/toolchain/win/rc/linux64/rc.sha1 b/deps/v8/build/toolchain/win/rc/linux64/rc.sha1
new file mode 100644
index 0000000000..ad14ca46a9
--- /dev/null
+++ b/deps/v8/build/toolchain/win/rc/linux64/rc.sha1
@@ -0,0 +1 @@
+2d0c766039264dc2514d005a42f074af4838a446 \ No newline at end of file
diff --git a/deps/v8/build/toolchain/win/rc/mac/rc.sha1 b/deps/v8/build/toolchain/win/rc/mac/rc.sha1
new file mode 100644
index 0000000000..dbd6302a35
--- /dev/null
+++ b/deps/v8/build/toolchain/win/rc/mac/rc.sha1
@@ -0,0 +1 @@
+4c25c3bcb6608109bb52028d008835895cf72629 \ No newline at end of file
diff --git a/deps/v8/build/toolchain/win/rc/rc.py b/deps/v8/build/toolchain/win/rc/rc.py
new file mode 100755
index 0000000000..2eff7d2fa3
--- /dev/null
+++ b/deps/v8/build/toolchain/win/rc/rc.py
@@ -0,0 +1,192 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""usage: rc.py [options] input.res
+A resource compiler for .rc files.
+
+options:
+-h, --help Print this message.
+-I<dir> Add include path.
+-D<sym> Define a macro for the preprocessor.
+/fo<out> Set path of output .res file.
+/nologo Ignored (rc.py doesn't print a logo by default).
+/showIncludes Print referenced header and resource files."""
+
+from __future__ import print_function
+from collections import namedtuple
+import codecs
+import os
+import re
+import subprocess
+import sys
+import tempfile
+
+
+THIS_DIR = os.path.abspath(os.path.dirname(__file__))
+SRC_DIR = \
+ os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(THIS_DIR))))
+
+
+def ParseFlags():
+ """Parses flags off sys.argv and returns the parsed flags."""
+ # Can't use optparse / argparse because of /fo flag :-/
+ includes = []
+ defines = []
+ output = None
+ input = None
+ show_includes = False
+ # Parse.
+ for flag in sys.argv[1:]:
+ if flag == '-h' or flag == '--help':
+ print(__doc__)
+ sys.exit(0)
+ if flag.startswith('-I'):
+ includes.append(flag)
+ elif flag.startswith('-D'):
+ defines.append(flag)
+ elif flag.startswith('/fo'):
+ if output:
+ print('rc.py: error: multiple /fo flags', '/fo' + output, flag,
+ file=sys.stderr)
+ sys.exit(1)
+ output = flag[3:]
+ elif flag == '/nologo':
+ pass
+ elif flag == '/showIncludes':
+ show_includes = True
+ elif (flag.startswith('-') or
+ (flag.startswith('/') and not os.path.exists(flag))):
+ print('rc.py: error: unknown flag', flag, file=sys.stderr)
+ print(__doc__, file=sys.stderr)
+ sys.exit(1)
+ else:
+ if input:
+ print('rc.py: error: multiple inputs:', input, flag, file=sys.stderr)
+ sys.exit(1)
+ input = flag
+ # Validate and set default values.
+ if not input:
+ print('rc.py: error: no input file', file=sys.stderr)
+ sys.exit(1)
+ if not output:
+ output = os.path.splitext(input)[0] + '.res'
+ Flags = namedtuple('Flags', ['includes', 'defines', 'output', 'input',
+ 'show_includes'])
+ return Flags(includes=includes, defines=defines, output=output, input=input,
+ show_includes=show_includes)
+
+
+def ReadInput(input):
+ """"Reads input and returns it. For UTF-16LEBOM input, converts to UTF-8."""
+ # Microsoft's rc.exe only supports unicode in the form of UTF-16LE with a BOM.
+ # Our rc binary sniffs for UTF-16LE. If that's not found, if /utf-8 is
+ # passed, the input is treated as UTF-8. If /utf-8 is not passed and the
+ # input is not UTF-16LE, then our rc errors out on characters outside of
+ # 7-bit ASCII. Since the driver always converts UTF-16LE to UTF-8 here (for
+ # the preprocessor, which doesn't support UTF-16LE), our rc will either see
+ # UTF-8 with the /utf-8 flag (for UTF-16LE input), or ASCII input.
+ # This is compatible with Microsoft rc.exe. If we wanted, we could expose
+ # a /utf-8 flag for the driver for UTF-8 .rc inputs too.
+ # TODO(thakis): Microsoft's rc.exe supports BOM-less UTF-16LE. We currently
+ # don't, but for chrome it currently doesn't matter.
+ is_utf8 = False
+ try:
+ with open(input, 'rb') as rc_file:
+ rc_file_data = rc_file.read()
+ if rc_file_data.startswith(codecs.BOM_UTF16_LE):
+ rc_file_data = rc_file_data[2:].decode('utf-16le').encode('utf-8')
+ is_utf8 = True
+ except IOError:
+ print('rc.py: failed to open', input, file=sys.stderr)
+ sys.exit(1)
+ except UnicodeDecodeError:
+ print('rc.py: failed to decode UTF-16 despite BOM', input, file=sys.stderr)
+ sys.exit(1)
+ return rc_file_data, is_utf8
+
+
+def Preprocess(rc_file_data, flags):
+ """Runs the input file through the preprocessor."""
+ clang = os.path.join(SRC_DIR, 'third_party', 'llvm-build',
+ 'Release+Asserts', 'bin', 'clang-cl')
+ # Let preprocessor write to a temp file so that it doesn't interfere
+ # with /showIncludes output on stdout.
+ if sys.platform == 'win32':
+ clang += '.exe'
+ temp_handle, temp_file = tempfile.mkstemp(suffix='.i')
+ # Closing temp_handle immediately defeats the purpose of mkstemp(), but I
+ # can't figure out how to let write to the temp file on Windows otherwise.
+ os.close(temp_handle)
+ clang_cmd = [clang, '/P', '/DRC_INVOKED', '/TC', '-', '/Fi' + temp_file]
+ if os.path.dirname(flags.input):
+ # This must precede flags.includes.
+ clang_cmd.append('-I' + os.path.dirname(flags.input))
+ if flags.show_includes:
+ clang_cmd.append('/showIncludes')
+ clang_cmd += flags.includes + flags.defines
+ p = subprocess.Popen(clang_cmd, stdin=subprocess.PIPE)
+ p.communicate(input=rc_file_data)
+ if p.returncode != 0:
+ sys.exit(p.returncode)
+ preprocessed_output = open(temp_file, 'rb').read()
+ os.remove(temp_file)
+
+ # rc.exe has a wacko preprocessor:
+ # https://msdn.microsoft.com/en-us/library/windows/desktop/aa381033(v=vs.85).aspx
+ # """RC treats files with the .c and .h extensions in a special manner. It
+ # assumes that a file with one of these extensions does not contain
+ # resources. If a file has the .c or .h file name extension, RC ignores all
+ # lines in the file except the preprocessor directives."""
+ # Thankfully, the Microsoft headers are mostly good about putting everything
+ # in the system headers behind `if !defined(RC_INVOKED)`, so regular
+ # preprocessing with RC_INVOKED defined works.
+ return preprocessed_output
+
+
+def RunRc(preprocessed_output, is_utf8, flags):
+ if sys.platform.startswith('linux'):
+ rc = os.path.join(THIS_DIR, 'linux64', 'rc')
+ elif sys.platform == 'darwin':
+ rc = os.path.join(THIS_DIR, 'mac', 'rc')
+ elif sys.platform == 'win32':
+ rc = os.path.join(THIS_DIR, 'win', 'rc.exe')
+ else:
+ print('rc.py: error: unsupported platform', sys.platform, file=sys.stderr)
+ sys.exit(1)
+ rc_cmd = [rc]
+ # Make sure rc-relative resources can be found:
+ if os.path.dirname(flags.input):
+ rc_cmd.append('/cd' + os.path.dirname(flags.input))
+ rc_cmd.append('/fo' + flags.output)
+ if is_utf8:
+ rc_cmd.append('/utf-8')
+ # TODO(thakis): rc currently always prints full paths for /showIncludes,
+ # but clang-cl /P doesn't. Which one is right?
+ if flags.show_includes:
+ rc_cmd.append('/showIncludes')
+ # Microsoft rc.exe searches for referenced files relative to -I flags in
+ # addition to the pwd, so -I flags need to be passed both to both
+ # the preprocessor and rc.
+ rc_cmd += flags.includes
+ p = subprocess.Popen(rc_cmd, stdin=subprocess.PIPE)
+ p.communicate(input=preprocessed_output)
+ return p.returncode
+
+
+def main():
+ # This driver has to do these things:
+ # 1. Parse flags.
+ # 2. Convert the input from UTF-16LE to UTF-8 if needed.
+ # 3. Pass the input through a preprocessor (and clean up the preprocessor's
+ # output in minor ways).
+ # 4. Call rc for the heavy lifting.
+ flags = ParseFlags()
+ rc_file_data, is_utf8 = ReadInput(flags.input)
+ preprocessed_output = Preprocess(rc_file_data, flags)
+ return RunRc(preprocessed_output, is_utf8, flags)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/toolchain/win/rc/upload_rc_binaries.sh b/deps/v8/build/toolchain/win/rc/upload_rc_binaries.sh
new file mode 100755
index 0000000000..ec4df4cbce
--- /dev/null
+++ b/deps/v8/build/toolchain/win/rc/upload_rc_binaries.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+set -eu
+
+# Builds new rc binaries at head and uploads them to google storage.
+# The new .sha1 files will be in the tree after this has run.
+
+if [[ "$OSTYPE" != "darwin"* ]]; then
+ echo "this script must run on a mac"
+ exit 1
+fi
+
+DIR="$(cd "$(dirname "${0}" )" && pwd)"
+SRC_DIR="$DIR/../../../.."
+
+# Make sure Linux and Windows sysroots are installed, for distrib.py.
+$SRC_DIR/build/linux/sysroot_scripts/install-sysroot.py --arch amd64
+$SRC_DIR/build/vs_toolchain.py update --force
+
+# Make a temporary directory.
+WORK_DIR=$(mktemp -d)
+if [[ ! "$WORK_DIR" || ! -d "$WORK_DIR" ]]; then
+ echo "could not create temp dir"
+ exit 1
+fi
+function cleanup {
+ rm -rf "$WORK_DIR"
+}
+trap cleanup EXIT
+
+# Check out rc and build it in the temporary directory. Copy binaries over.
+pushd "$WORK_DIR" > /dev/null
+git clone -q https://github.com/nico/hack
+cd hack/res
+./distrib.py "$SRC_DIR"
+popd > /dev/null
+cp "$WORK_DIR/hack/res/rc-linux64" "$DIR/linux64/rc"
+cp "$WORK_DIR/hack/res/rc-mac" "$DIR/mac/rc"
+cp "$WORK_DIR/hack/res/rc-win.exe" "$DIR/win/rc.exe"
+
+# Upload binaries to cloud storage.
+upload_to_google_storage.py -b chromium-browser-clang/rc "$DIR/linux64/rc"
+upload_to_google_storage.py -b chromium-browser-clang/rc "$DIR/mac/rc"
+upload_to_google_storage.py -b chromium-browser-clang/rc "$DIR/win/rc.exe"
diff --git a/deps/v8/build/toolchain/win/rc/win/rc.exe.sha1 b/deps/v8/build/toolchain/win/rc/win/rc.exe.sha1
new file mode 100644
index 0000000000..3fdbfc0c20
--- /dev/null
+++ b/deps/v8/build/toolchain/win/rc/win/rc.exe.sha1
@@ -0,0 +1 @@
+ba51d69039ffb88310b72b6568efa9f0de148f8f \ No newline at end of file
diff --git a/deps/v8/build/toolchain/win/setup_toolchain.py b/deps/v8/build/toolchain/win/setup_toolchain.py
new file mode 100644
index 0000000000..ef8aeda564
--- /dev/null
+++ b/deps/v8/build/toolchain/win/setup_toolchain.py
@@ -0,0 +1,291 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Copies the given "win tool" (which the toolchain uses to wrap compiler
+# invocations) and the environment blocks for the 32-bit and 64-bit builds on
+# Windows to the build directory.
+#
+# The arguments are the visual studio install location and the location of the
+# win tool. The script assumes that the root build directory is the current dir
+# and the files will be written to the current directory.
+
+from __future__ import print_function
+
+import errno
+import json
+import os
+import re
+import subprocess
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+import gn_helpers
+
+SCRIPT_DIR = os.path.dirname(__file__)
+
+def _ExtractImportantEnvironment(output_of_set):
+ """Extracts environment variables required for the toolchain to run from
+ a textual dump output by the cmd.exe 'set' command."""
+ envvars_to_save = (
+ 'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
+ 'include',
+ 'lib',
+ 'libpath',
+ 'path',
+ 'pathext',
+ 'systemroot',
+ 'temp',
+ 'tmp',
+ )
+ env = {}
+ # This occasionally happens and leads to misleading SYSTEMROOT error messages
+ # if not caught here.
+ if output_of_set.count('=') == 0:
+ raise Exception('Invalid output_of_set. Value is:\n%s' % output_of_set)
+ for line in output_of_set.splitlines():
+ for envvar in envvars_to_save:
+ if re.match(envvar + '=', line.lower()):
+ var, setting = line.split('=', 1)
+ if envvar == 'path':
+ # Our own rules and actions in Chromium rely on python being in the
+ # path. Add the path to this python here so that if it's not in the
+ # path when ninja is run later, python will still be found.
+ setting = os.path.dirname(sys.executable) + os.pathsep + setting
+ env[var.upper()] = setting
+ break
+ if sys.platform in ('win32', 'cygwin'):
+ for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
+ if required not in env:
+ raise Exception('Environment variable "%s" '
+ 'required to be set to valid path' % required)
+ return env
+
+
+def _DetectVisualStudioPath():
+ """Return path to the GYP_MSVS_VERSION of Visual Studio.
+ """
+
+ # Use the code in build/vs_toolchain.py to avoid duplicating code.
+ chromium_dir = os.path.abspath(os.path.join(SCRIPT_DIR, '..', '..', '..'))
+ sys.path.append(os.path.join(chromium_dir, 'build'))
+ import vs_toolchain
+ return vs_toolchain.DetectVisualStudioPath()
+
+
+def _LoadEnvFromBat(args):
+ """Given a bat command, runs it and returns env vars set by it."""
+ args = args[:]
+ args.extend(('&&', 'set'))
+ popen = subprocess.Popen(
+ args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ variables, _ = popen.communicate()
+ if popen.returncode != 0:
+ raise Exception('"%s" failed with error %d' % (args, popen.returncode))
+ return variables.decode(errors='ignore')
+
+
+def _LoadToolchainEnv(cpu, sdk_dir, target_store):
+ """Returns a dictionary with environment variables that must be set while
+ running binaries from the toolchain (e.g. INCLUDE and PATH for cl.exe)."""
+ # Check if we are running in the SDK command line environment and use
+ # the setup script from the SDK if so. |cpu| should be either
+ # 'x86' or 'x64' or 'arm' or 'arm64'.
+ assert cpu in ('x86', 'x64', 'arm', 'arm64')
+ if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1))) and sdk_dir:
+ # Load environment from json file.
+ env = os.path.normpath(os.path.join(sdk_dir, 'bin/SetEnv.%s.json' % cpu))
+ env = json.load(open(env))['env']
+ for k in env:
+ entries = [os.path.join(*([os.path.join(sdk_dir, 'bin')] + e))
+ for e in env[k]]
+ # clang-cl wants INCLUDE to be ;-separated even on non-Windows,
+ # lld-link wants LIB to be ;-separated even on non-Windows. Path gets :.
+ # The separator for INCLUDE here must match the one used in main() below.
+ sep = os.pathsep if k == 'PATH' else ';'
+ env[k] = sep.join(entries)
+ # PATH is a bit of a special case, it's in addition to the current PATH.
+ env['PATH'] = env['PATH'] + os.pathsep + os.environ['PATH']
+ # Augment with the current env to pick up TEMP and friends.
+ for k in os.environ:
+ if k not in env:
+ env[k] = os.environ[k]
+
+ varlines = []
+ for k in sorted(env.keys()):
+ varlines.append('%s=%s' % (str(k), str(env[k])))
+ variables = '\n'.join(varlines)
+
+ # Check that the json file contained the same environment as the .cmd file.
+ if sys.platform in ('win32', 'cygwin'):
+ script = os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.cmd'))
+ arg = '/' + cpu
+ json_env = _ExtractImportantEnvironment(variables)
+ cmd_env = _ExtractImportantEnvironment(_LoadEnvFromBat([script, arg]))
+ assert _LowercaseDict(json_env) == _LowercaseDict(cmd_env)
+ else:
+ if 'GYP_MSVS_OVERRIDE_PATH' not in os.environ:
+ os.environ['GYP_MSVS_OVERRIDE_PATH'] = _DetectVisualStudioPath()
+ # We only support x64-hosted tools.
+ script_path = os.path.normpath(os.path.join(
+ os.environ['GYP_MSVS_OVERRIDE_PATH'],
+ 'VC/vcvarsall.bat'))
+ if not os.path.exists(script_path):
+ # vcvarsall.bat for VS 2017 fails if run after running vcvarsall.bat from
+ # VS 2013 or VS 2015. Fix this by clearing the vsinstalldir environment
+ # variable.
+ if 'VSINSTALLDIR' in os.environ:
+ del os.environ['VSINSTALLDIR']
+ other_path = os.path.normpath(os.path.join(
+ os.environ['GYP_MSVS_OVERRIDE_PATH'],
+ 'VC/Auxiliary/Build/vcvarsall.bat'))
+ if not os.path.exists(other_path):
+ raise Exception('%s is missing - make sure VC++ tools are installed.' %
+ script_path)
+ script_path = other_path
+ cpu_arg = "amd64"
+ if (cpu != 'x64'):
+ # x64 is default target CPU thus any other CPU requires a target set
+ cpu_arg += '_' + cpu
+ args = [script_path, cpu_arg]
+ # Store target must come before any SDK version declaration
+ if (target_store):
+ args.append(['store'])
+ variables = _LoadEnvFromBat(args)
+ return _ExtractImportantEnvironment(variables)
+
+
+def _FormatAsEnvironmentBlock(envvar_dict):
+ """Format as an 'environment block' directly suitable for CreateProcess.
+ Briefly this is a list of key=value\0, terminated by an additional \0. See
+ CreateProcess documentation for more details."""
+ block = ''
+ nul = '\0'
+ for key, value in envvar_dict.items():
+ block += key + '=' + value + nul
+ block += nul
+ return block
+
+
+def _LowercaseDict(d):
+ """Returns a copy of `d` with both key and values lowercased.
+
+ Args:
+ d: dict to lowercase (e.g. {'A': 'BcD'}).
+
+ Returns:
+ A dict with both keys and values lowercased (e.g.: {'a': 'bcd'}).
+ """
+ return {k.lower(): d[k].lower() for k in d}
+
+
+def main():
+ if len(sys.argv) != 7:
+ print('Usage setup_toolchain.py '
+ '<visual studio path> <win sdk path> '
+ '<runtime dirs> <target_os> <target_cpu> '
+ '<environment block name|none>')
+ sys.exit(2)
+ win_sdk_path = sys.argv[2]
+ runtime_dirs = sys.argv[3]
+ target_os = sys.argv[4]
+ target_cpu = sys.argv[5]
+ environment_block_name = sys.argv[6]
+ if (environment_block_name == 'none'):
+ environment_block_name = ''
+
+ if (target_os == 'winuwp'):
+ target_store = True
+ else:
+ target_store = False
+
+ cpus = ('x86', 'x64', 'arm', 'arm64')
+ assert target_cpu in cpus
+ vc_bin_dir = ''
+ vc_lib_path = ''
+ vc_lib_atlmfc_path = ''
+ vc_lib_um_path = ''
+ include = ''
+ lib = ''
+
+ # TODO(scottmg|goma): Do we need an equivalent of
+ # ninja_use_custom_environment_files?
+
+ for cpu in cpus:
+ if cpu == target_cpu:
+ # Extract environment variables for subprocesses.
+ env = _LoadToolchainEnv(cpu, win_sdk_path, target_store)
+ env['PATH'] = runtime_dirs + os.pathsep + env['PATH']
+
+ for path in env['PATH'].split(os.pathsep):
+ if os.path.exists(os.path.join(path, 'cl.exe')):
+ vc_bin_dir = os.path.realpath(path)
+ break
+
+ for path in env['LIB'].split(';'):
+ if os.path.exists(os.path.join(path, 'msvcrt.lib')):
+ vc_lib_path = os.path.realpath(path)
+ break
+
+ for path in env['LIB'].split(';'):
+ if os.path.exists(os.path.join(path, 'atls.lib')):
+ vc_lib_atlmfc_path = os.path.realpath(path)
+ break
+
+ for path in env['LIB'].split(';'):
+ if os.path.exists(os.path.join(path, 'User32.Lib')):
+ vc_lib_um_path = os.path.realpath(path)
+ break
+
+ # The separator for INCLUDE here must match the one used in
+ # _LoadToolchainEnv() above.
+ include = [p.replace('"', r'\"') for p in env['INCLUDE'].split(';') if p]
+
+ # Make include path relative to builddir when cwd and sdk in same drive.
+ try:
+ include = list(map(os.path.relpath, include))
+ except ValueError:
+ pass
+
+ lib = [p.replace('"', r'\"') for p in env['LIB'].split(';') if p]
+ # Make lib path relative to builddir when cwd and sdk in same drive.
+ try:
+ lib = map(os.path.relpath, lib)
+ except ValueError:
+ pass
+
+ def q(s): # Quote s if it contains spaces or other weird characters.
+ return s if re.match(r'^[a-zA-Z0-9._/\\:-]*$', s) else '"' + s + '"'
+ include_I = ' '.join([q('/I' + i) for i in include])
+ include_imsvc = ' '.join([q('-imsvc' + i) for i in include])
+ libpath_flags = ' '.join([q('-libpath:' + i) for i in lib])
+
+ if (environment_block_name != ''):
+ env_block = _FormatAsEnvironmentBlock(env)
+ with open(environment_block_name, 'w') as f:
+ f.write(env_block)
+
+ assert vc_bin_dir
+ print('vc_bin_dir = ' + gn_helpers.ToGNString(vc_bin_dir))
+ assert include_I
+ print('include_flags_I = ' + gn_helpers.ToGNString(include_I))
+ assert include_imsvc
+ print('include_flags_imsvc = ' + gn_helpers.ToGNString(include_imsvc))
+ assert vc_lib_path
+ print('vc_lib_path = ' + gn_helpers.ToGNString(vc_lib_path))
+ if (target_store != True):
+ # Path is assumed not to exist for desktop applications
+ assert vc_lib_atlmfc_path
+ # Possible atlmfc library path gets introduced in the future for store thus
+ # output result if a result exists.
+ if (vc_lib_atlmfc_path != ''):
+ print('vc_lib_atlmfc_path = ' + gn_helpers.ToGNString(vc_lib_atlmfc_path))
+ assert vc_lib_um_path
+ print('vc_lib_um_path = ' + gn_helpers.ToGNString(vc_lib_um_path))
+ print('paths = ' + gn_helpers.ToGNString(env['PATH']))
+ assert libpath_flags
+ print('libpath_flags = ' + gn_helpers.ToGNString(libpath_flags))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/toolchain/win/tool_wrapper.py b/deps/v8/build/toolchain/win/tool_wrapper.py
new file mode 100644
index 0000000000..926086670d
--- /dev/null
+++ b/deps/v8/build/toolchain/win/tool_wrapper.py
@@ -0,0 +1,245 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions for Windows builds.
+
+This file is copied to the build directory as part of toolchain setup and
+is used to set up calls to tools used by the build that need wrappers.
+"""
+
+from __future__ import print_function
+
+import os
+import re
+import shutil
+import subprocess
+import stat
+import string
+import sys
+
+# tool_wrapper.py doesn't get invoked through python.bat so the Python bin
+# directory doesn't get added to the path. The Python module search logic
+# handles this fine and finds win32file.pyd. However the Windows module
+# search logic then looks for pywintypes27.dll and other DLLs in the path and
+# if it finds versions with a different bitness first then win32file.pyd will
+# fail to load with a cryptic error:
+# ImportError: DLL load failed: %1 is not a valid Win32 application.
+if sys.platform == 'win32':
+ os.environ['PATH'] = os.path.dirname(sys.executable) + \
+ os.pathsep + os.environ['PATH']
+ import win32file # pylint: disable=import-error
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+
+# A regex matching an argument corresponding to the output filename passed to
+# link.exe.
+_LINK_EXE_OUT_ARG = re.compile('/OUT:(?P<out>.+)$', re.IGNORECASE)
+
+def main(args):
+ exit_code = WinTool().Dispatch(args)
+ if exit_code is not None:
+ sys.exit(exit_code)
+
+
+class WinTool(object):
+ """This class performs all the Windows tooling steps. The methods can either
+ be executed directly, or dispatched from an argument list."""
+
+ def _UseSeparateMspdbsrv(self, env, args):
+ """Allows to use a unique instance of mspdbsrv.exe per linker instead of a
+ shared one."""
+ if len(args) < 1:
+ raise Exception("Not enough arguments")
+
+ if args[0] != 'link.exe':
+ return
+
+ # Use the output filename passed to the linker to generate an endpoint name
+ # for mspdbsrv.exe.
+ endpoint_name = None
+ for arg in args:
+ m = _LINK_EXE_OUT_ARG.match(arg)
+ if m:
+ endpoint_name = re.sub(r'\W+', '',
+ '%s_%d' % (m.group('out'), os.getpid()))
+ break
+
+ if endpoint_name is None:
+ return
+
+ # Adds the appropriate environment variable. This will be read by link.exe
+ # to know which instance of mspdbsrv.exe it should connect to (if it's
+ # not set then the default endpoint is used).
+ env['_MSPDBSRV_ENDPOINT_'] = endpoint_name
+
+ def Dispatch(self, args):
+ """Dispatches a string command to a method."""
+ if len(args) < 1:
+ raise Exception("Not enough arguments")
+
+ method = "Exec%s" % self._CommandifyName(args[0])
+ return getattr(self, method)(*args[1:])
+
+ def _CommandifyName(self, name_string):
+ """Transforms a tool name like recursive-mirror to RecursiveMirror."""
+ return name_string.title().replace('-', '')
+
+ def _GetEnv(self, arch):
+ """Gets the saved environment from a file for a given architecture."""
+ # The environment is saved as an "environment block" (see CreateProcess
+ # and msvs_emulation for details). We convert to a dict here.
+ # Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
+ pairs = open(arch).read()[:-2].split('\0')
+ kvs = [item.split('=', 1) for item in pairs]
+ return dict(kvs)
+
+ def ExecDeleteFile(self, path):
+ """Simple file delete command."""
+ if os.path.exists(path):
+ os.unlink(path)
+
+ def ExecRecursiveMirror(self, source, dest):
+ """Emulation of rm -rf out && cp -af in out."""
+ if os.path.exists(dest):
+ if os.path.isdir(dest):
+ def _on_error(fn, path, dummy_excinfo):
+ # The operation failed, possibly because the file is set to
+ # read-only. If that's why, make it writable and try the op again.
+ if not os.access(path, os.W_OK):
+ os.chmod(path, stat.S_IWRITE)
+ fn(path)
+ shutil.rmtree(dest, onerror=_on_error)
+ else:
+ if not os.access(dest, os.W_OK):
+ # Attempt to make the file writable before deleting it.
+ os.chmod(dest, stat.S_IWRITE)
+ os.unlink(dest)
+
+ if os.path.isdir(source):
+ shutil.copytree(source, dest)
+ else:
+ shutil.copy2(source, dest)
+ # Try to diagnose crbug.com/741603
+ if not os.path.exists(dest):
+ raise Exception("Copying of %s to %s failed" % (source, dest))
+
+ def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
+ """Filter diagnostic output from link that looks like:
+ ' Creating library ui.dll.lib and object ui.dll.exp'
+ This happens when there are exports from the dll or exe.
+ """
+ env = self._GetEnv(arch)
+ if use_separate_mspdbsrv == 'True':
+ self._UseSeparateMspdbsrv(env, args)
+ if sys.platform == 'win32':
+ args = list(args) # *args is a tuple by default, which is read-only.
+ args[0] = args[0].replace('/', '\\')
+ # https://docs.python.org/2/library/subprocess.html:
+ # "On Unix with shell=True [...] if args is a sequence, the first item
+ # specifies the command string, and any additional items will be treated as
+ # additional arguments to the shell itself. That is to say, Popen does the
+ # equivalent of:
+ # Popen(['/bin/sh', '-c', args[0], args[1], ...])"
+ # For that reason, since going through the shell doesn't seem necessary on
+ # non-Windows don't do that there.
+ pe_name = None
+ for arg in args:
+ m = _LINK_EXE_OUT_ARG.match(arg)
+ if m:
+ pe_name = m.group('out')
+ link = subprocess.Popen(args, shell=sys.platform == 'win32', env=env,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ # Read output one line at a time as it shows up to avoid OOM failures when
+ # GBs of output is produced.
+ for line in link.stdout:
+ if (not line.startswith(' Creating library ') and
+ not line.startswith('Generating code') and
+ not line.startswith('Finished generating code')):
+ print(line)
+ result = link.wait()
+ if result == 0 and sys.platform == 'win32':
+ # Flush the file buffers to try to work around a Windows 10 kernel bug,
+ # https://crbug.com/644525
+ output_handle = win32file.CreateFile(pe_name, win32file.GENERIC_WRITE,
+ 0, None, win32file.OPEN_EXISTING, 0, 0)
+ win32file.FlushFileBuffers(output_handle)
+ output_handle.Close()
+ return result
+
+ def ExecAsmWrapper(self, arch, *args):
+ """Filter logo banner from invocations of asm.exe."""
+ env = self._GetEnv(arch)
+ if sys.platform == 'win32':
+ # Windows ARM64 uses clang-cl as assembler which has '/' as path
+ # separator, convert it to '\\' when running on Windows.
+ args = list(args) # *args is a tuple by default, which is read-only
+ args[0] = args[0].replace('/', '\\')
+ popen = subprocess.Popen(args, shell=True, env=env,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ out, _ = popen.communicate()
+ for line in out.splitlines():
+ if not line.startswith(' Assembling: '):
+ print(line)
+ return popen.returncode
+
+ def ExecRcWrapper(self, arch, *args):
+ """Converts .rc files to .res files."""
+ env = self._GetEnv(arch)
+
+ # We run two resource compilers:
+ # 1. A custom one at build/toolchain/win/rc/rc.py which can run on
+ # non-Windows, and which has /showIncludes support so we can track
+ # dependencies (e.g. on .ico files) of .rc files.
+ # 2. On Windows, regular Microsoft rc.exe, to make sure rc.py produces
+ # bitwise identical output.
+
+ # 1. Run our rc.py.
+ # Also pass /showIncludes to track dependencies of .rc files.
+ args = list(args)
+ rcpy_args = args[:]
+ rcpy_args[0:1] = [sys.executable, os.path.join(BASE_DIR, 'rc', 'rc.py')]
+ rcpy_res_output = rcpy_args[-2]
+ assert rcpy_res_output.startswith('/fo')
+ assert rcpy_res_output.endswith('.res')
+ rc_res_output = rcpy_res_output + '_ms_rc'
+ args[-2] = rc_res_output
+ rcpy_args.append('/showIncludes')
+ rc_exe_exit_code = subprocess.call(rcpy_args, env=env)
+ if rc_exe_exit_code == 0:
+ # Since tool("rc") can't have deps, add deps on this script and on rc.py
+ # and its deps here, so that rc edges become dirty if rc.py changes.
+ print('Note: including file: ../../build/toolchain/win/tool_wrapper.py')
+ print('Note: including file: ../../build/toolchain/win/rc/rc.py')
+ print(
+ 'Note: including file: ../../build/toolchain/win/rc/linux64/rc.sha1')
+ print('Note: including file: ../../build/toolchain/win/rc/mac/rc.sha1')
+ print(
+ 'Note: including file: ../../build/toolchain/win/rc/win/rc.exe.sha1')
+
+ # 2. Run Microsoft rc.exe.
+ if sys.platform == 'win32' and rc_exe_exit_code == 0:
+ rc_exe_exit_code = subprocess.call(args, shell=True, env=env)
+ # Assert Microsoft rc.exe and rc.py produced identical .res files.
+ if rc_exe_exit_code == 0:
+ import filecmp
+ # Strip "/fo" prefix.
+ assert filecmp.cmp(rc_res_output[3:], rcpy_res_output[3:])
+ return rc_exe_exit_code
+
+ def ExecActionWrapper(self, arch, rspfile, *dirname):
+ """Runs an action command line from a response file using the environment
+ for |arch|. If |dirname| is supplied, use that as the working directory."""
+ env = self._GetEnv(arch)
+ # TODO(scottmg): This is a temporary hack to get some specific variables
+ # through to actions that are set after GN-time. http://crbug.com/333738.
+ for k, v in os.environ.items():
+ if k not in env:
+ env[k] = v
+ args = open(rspfile).read()
+ dirname = dirname[0] if dirname else None
+ return subprocess.call(args, shell=True, env=env, cwd=dirname)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/toolchain/wrapper_utils.py b/deps/v8/build/toolchain/wrapper_utils.py
new file mode 100644
index 0000000000..5949a3727c
--- /dev/null
+++ b/deps/v8/build/toolchain/wrapper_utils.py
@@ -0,0 +1,93 @@
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions for gcc_toolchain.gni wrappers."""
+
+import gzip
+import os
+import re
+import subprocess
+import shlex
+import shutil
+import sys
+import threading
+
+_BAT_PREFIX = 'cmd /c call '
+
+
+def _GzipThenDelete(src_path, dest_path):
+ # Results for Android map file with GCC on a z620:
+ # Uncompressed: 207MB
+ # gzip -9: 16.4MB, takes 8.7 seconds.
+ # gzip -1: 21.8MB, takes 2.0 seconds.
+ # Piping directly from the linker via -print-map (or via -Map with a fifo)
+ # adds a whopping 30-45 seconds!
+ with open(src_path, 'rb') as f_in, gzip.GzipFile(dest_path, 'wb', 1) as f_out:
+ shutil.copyfileobj(f_in, f_out)
+ os.unlink(src_path)
+
+
+def CommandToRun(command):
+ """Generates commands compatible with Windows.
+
+ When running on a Windows host and using a toolchain whose tools are
+ actually wrapper scripts (i.e. .bat files on Windows) rather than binary
+ executables, the |command| to run has to be prefixed with this magic.
+ The GN toolchain definitions take care of that for when GN/Ninja is
+ running the tool directly. When that command is passed in to this
+ script, it appears as a unitary string but needs to be split up so that
+ just 'cmd' is the actual command given to Python's subprocess module.
+
+ Args:
+ command: List containing the UNIX style |command|.
+
+ Returns:
+ A list containing the Windows version of the |command|.
+ """
+ if command[0].startswith(_BAT_PREFIX):
+ command = command[0].split(None, 3) + command[1:]
+ return command
+
+
+def RunLinkWithOptionalMapFile(command, env=None, map_file=None):
+ """Runs the given command, adding in -Wl,-Map when |map_file| is given.
+
+ Also takes care of gzipping when |map_file| ends with .gz.
+
+ Args:
+ command: List of arguments comprising the command.
+ env: Environment variables.
+ map_file: Path to output map_file.
+
+ Returns:
+ The exit code of running |command|.
+ """
+ tmp_map_path = None
+ if map_file and map_file.endswith('.gz'):
+ tmp_map_path = map_file + '.tmp'
+ command.append('-Wl,-Map,' + tmp_map_path)
+ elif map_file:
+ command.append('-Wl,-Map,' + map_file)
+
+ result = subprocess.call(command, env=env)
+
+ if tmp_map_path and result == 0:
+ threading.Thread(
+ target=lambda: _GzipThenDelete(tmp_map_path, map_file)).start()
+ elif tmp_map_path and os.path.exists(tmp_map_path):
+ os.unlink(tmp_map_path)
+
+ return result
+
+
+def CaptureCommandStderr(command, env=None):
+ """Returns the stderr of a command.
+
+ Args:
+ command: A list containing the command and arguments.
+ env: Environment variables for the new process.
+ """
+ child = subprocess.Popen(command, stderr=subprocess.PIPE, env=env)
+ _, stderr = child.communicate()
+ return child.returncode, stderr
diff --git a/deps/v8/build/tree_truth.sh b/deps/v8/build/tree_truth.sh
new file mode 100755
index 0000000000..617092dc8a
--- /dev/null
+++ b/deps/v8/build/tree_truth.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Script for printing recent commits in a buildbot run.
+
+# Return the sha1 of the given tag. If not present, return "".
+# $1: path to repo
+# $2: tag name
+tt_sha1_for_tag() {
+ oneline=$(cd $1 && git log -1 $2 --format='%H' 2>/dev/null)
+ if [ $? -eq 0 ] ; then
+ echo $oneline
+ fi
+}
+
+# Return the sha1 of HEAD, or ""
+# $1: path to repo
+tt_sha1_for_head() {
+ ( cd $1 && git log HEAD -n1 --format='%H' | cat )
+}
+
+# For the given repo, set tag to HEAD.
+# $1: path to repo
+# $2: tag name
+tt_tag_head() {
+ ( cd $1 && git tag -f $2 )
+}
+
+# For the given repo, delete the tag.
+# $1: path to repo
+# $2: tag name
+tt_delete_tag() {
+ ( cd $1 && git tag -d $2 )
+}
+
+# For the given repo, set tag to "three commits ago" (for testing).
+# $1: path to repo
+# $2: tag name
+tt_tag_three_ago() {
+ local sh=$(cd $1 && git log --pretty=oneline -n 3 | tail -1 | awk '{print $1}')
+ ( cd $1 && git tag -f $2 $sh )
+}
+
+# List the commits between the given tag and HEAD.
+# If the tag does not exist, only list the last few.
+# If the tag is at HEAD, list nothing.
+# Output format has distinct build steps for repos with changes.
+# $1: path to repo
+# $2: tag name
+# $3: simple/short repo name to use for display
+tt_list_commits() {
+ local tag_sha1=$(tt_sha1_for_tag $1 $2)
+ local head_sha1=$(tt_sha1_for_head $1)
+ local display_name=$(echo $3 | sed 's#/#_#g')
+ if [ "${tag_sha1}" = "${head_sha1}" ] ; then
+ return
+ fi
+ if [ "${tag_sha1}" = "" ] ; then
+ echo "@@@BUILD_STEP Recent commits in repo $display_name@@@"
+ echo "NOTE: git tag was not found so we have no baseline."
+ echo "Here are some recent commits, but they may not be new for this build."
+ ( cd $1 && git log -n 10 --stat | cat)
+ else
+ echo "@@@BUILD_STEP New commits in repo $display_name@@@"
+ ( cd $1 && git log -n 500 $2..HEAD --stat | cat)
+ fi
+}
+
+# Clean out the tree truth tags in all repos. For testing.
+tt_clean_all() {
+ for project in $@; do
+ tt_delete_tag $CHROME_SRC/../$project tree_truth
+ done
+}
+
+# Print tree truth for all clank repos.
+tt_print_all() {
+ for project in $@; do
+ local full_path=$CHROME_SRC/../$project
+ tt_list_commits $full_path tree_truth $project
+ tt_tag_head $full_path tree_truth
+ done
+}
+
+# Print a summary of the last 10 commits for each repo.
+tt_brief_summary() {
+ echo "@@@BUILD_STEP Brief summary of recent CLs in every branch@@@"
+ for project in $@; do
+ echo $project:
+ local full_path=$CHROME_SRC/../$project
+ (cd $full_path && git log -n 10 --format=" %H %s %an, %ad" | cat)
+ echo "================================================================="
+ done
+}
+
+CHROME_SRC=$1
+shift
+PROJECT_LIST=$@
+tt_brief_summary $PROJECT_LIST
+tt_print_all $PROJECT_LIST
diff --git a/deps/v8/build/update-linux-sandbox.sh b/deps/v8/build/update-linux-sandbox.sh
new file mode 100755
index 0000000000..d24cf2602d
--- /dev/null
+++ b/deps/v8/build/update-linux-sandbox.sh
@@ -0,0 +1,82 @@
+#!/bin/sh
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+BUILDTYPE="${BUILDTYPE:-Debug}"
+CHROME_SRC_DIR="${CHROME_SRC_DIR:-$(dirname -- $(readlink -fn -- "$0"))/..}"
+CHROME_OUT_DIR="${CHROME_SRC_DIR}/${CHROMIUM_OUT_DIR:-out}/${BUILDTYPE}"
+CHROME_SANDBOX_BUILD_PATH="${CHROME_OUT_DIR}/chrome_sandbox"
+CHROME_SANDBOX_INST_PATH="/usr/local/sbin/chrome-devel-sandbox"
+CHROME_SANDBOX_INST_DIR=$(dirname -- "$CHROME_SANDBOX_INST_PATH")
+
+TARGET_DIR_TYPE=$(stat -f -c %t -- "${CHROME_SANDBOX_INST_DIR}" 2>/dev/null)
+if [ $? -ne 0 ]; then
+ echo "Could not get status of ${CHROME_SANDBOX_INST_DIR}"
+ exit 1
+fi
+
+# Make sure the path is not on NFS.
+if [ "${TARGET_DIR_TYPE}" = "6969" ]; then
+ echo "Please make sure ${CHROME_SANDBOX_INST_PATH} is not on NFS!"
+ exit 1
+fi
+
+installsandbox() {
+ echo "(using sudo so you may be asked for your password)"
+ sudo -- cp "${CHROME_SANDBOX_BUILD_PATH}" \
+ "${CHROME_SANDBOX_INST_PATH}" &&
+ sudo -- chown root:root "${CHROME_SANDBOX_INST_PATH}" &&
+ sudo -- chmod 4755 "${CHROME_SANDBOX_INST_PATH}"
+ return $?
+}
+
+if [ ! -d "${CHROME_OUT_DIR}" ]; then
+ echo -n "${CHROME_OUT_DIR} does not exist. Use \"BUILDTYPE=Release ${0}\" "
+ echo "If you are building in Release mode"
+ exit 1
+fi
+
+if [ ! -f "${CHROME_SANDBOX_BUILD_PATH}" ]; then
+ echo "Could not find ${CHROME_SANDBOX_BUILD_PATH}"
+ echo -n "BUILDTYPE is $BUILDTYPE, use \"BUILDTYPE=<value> ${0}\" to override "
+ echo "after you build the chrome_sandbox target"
+ exit 1
+fi
+
+if readelf -d "${CHROME_SANDBOX_BUILD_PATH}" | \
+ grep "(RPATH)" > /dev/null 2>&1; then
+ echo "Build requires is_component_build=false in ${CHROME_OUT_DIR}/args.gn."
+ exit 1
+fi
+
+if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
+ echo -n "Could not find ${CHROME_SANDBOX_INST_PATH}, "
+ echo "installing it now."
+ installsandbox
+fi
+
+if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
+ echo "Failed to install ${CHROME_SANDBOX_INST_PATH}"
+ exit 1
+fi
+
+CURRENT_API=$("${CHROME_SANDBOX_BUILD_PATH}" --get-api)
+INSTALLED_API=$("${CHROME_SANDBOX_INST_PATH}" --get-api)
+
+if [ "${CURRENT_API}" != "${INSTALLED_API}" ]; then
+ echo "Your installed setuid sandbox is too old, installing it now."
+ if ! installsandbox; then
+ echo "Failed to install ${CHROME_SANDBOX_INST_PATH}"
+ exit 1
+ fi
+else
+ echo "Your setuid sandbox is up to date"
+ if [ "${CHROME_DEVEL_SANDBOX}" != "${CHROME_SANDBOX_INST_PATH}" ]; then
+ echo -n "Make sure you have \"export "
+ echo -n "CHROME_DEVEL_SANDBOX=${CHROME_SANDBOX_INST_PATH}\" "
+ echo "somewhere in your .bashrc"
+ echo "This variable is currently: ${CHROME_DEVEL_SANDBOX:-empty}"
+ fi
+fi
diff --git a/deps/v8/build/util/BUILD.gn b/deps/v8/build/util/BUILD.gn
new file mode 100644
index 0000000000..54c23c91c4
--- /dev/null
+++ b/deps/v8/build/util/BUILD.gn
@@ -0,0 +1,51 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/util/lastchange.gni")
+
+action("webkit_version") {
+ script = "version.py"
+
+ template_file = "webkit_version.h.in"
+ inputs = [
+ lastchange_file,
+ template_file,
+ ]
+
+ output_file = "$target_gen_dir/webkit_version.h"
+ outputs = [
+ output_file,
+ ]
+
+ args = [
+ # LASTCHANGE contains "<build hash>-<ref>". The user agent only wants the
+ # "<build hash>" bit, so chop off everything after it.
+ "-e",
+ "LASTCHANGE=LASTCHANGE[:LASTCHANGE.find('-')]",
+ "-f",
+ rebase_path(lastchange_file, root_build_dir),
+ rebase_path(template_file, root_build_dir),
+ rebase_path(output_file, root_build_dir),
+ ]
+}
+
+action("chrome_version_json") {
+ script = "version.py"
+ _chrome_version_path = "//chrome/VERSION"
+ inputs = [
+ _chrome_version_path,
+ ]
+ _output_file = "$root_gen_dir/CHROME_VERSION.json"
+ outputs = [
+ _output_file,
+ ]
+ args = [
+ "--file",
+ rebase_path(_chrome_version_path, root_build_dir),
+ "--template",
+ "{\"full-quoted\": \"\\\"@MAJOR@.@MINOR@.@BUILD@.@PATCH@\\\"\"}",
+ "--output",
+ rebase_path(_output_file, root_build_dir),
+ ]
+}
diff --git a/deps/v8/build/util/LASTCHANGE.dummy b/deps/v8/build/util/LASTCHANGE.dummy
new file mode 100644
index 0000000000..21bb3c33c7
--- /dev/null
+++ b/deps/v8/build/util/LASTCHANGE.dummy
@@ -0,0 +1 @@
+LASTCHANGE=0000000000000000000000000000000000000000-0000000000000000000000000000000000000000
diff --git a/deps/v8/build/util/PRESUBMIT.py b/deps/v8/build/util/PRESUBMIT.py
new file mode 100644
index 0000000000..271afbbb62
--- /dev/null
+++ b/deps/v8/build/util/PRESUBMIT.py
@@ -0,0 +1,58 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+"""Presubmit for build/util"""
+
+
+def _GetBlacklist(input_api):
+ blacklist = []
+ affected_files = input_api.change.AffectedFiles()
+ version_script_change = next(
+ (f for f in affected_files
+ if re.search('\\/version\\.py$|\\/version_test\\.py$', f.LocalPath())),
+ None)
+
+ if version_script_change is None:
+ blacklist.append('version_test\\.py$')
+
+ android_chrome_version_script_change = next(
+ (f for f in affected_files if re.search(
+ '\\/android_chrome_version\\.py$|'
+ '\\/android_chrome_version_test\\.py$', f.LocalPath())), None)
+
+ if android_chrome_version_script_change is None:
+ blacklist.append('android_chrome_version_test\\.py$')
+
+ return blacklist
+
+
+def _GetPythonUnitTests(input_api, output_api):
+ # No need to test if files are unchanged
+ blacklist = _GetBlacklist(input_api)
+
+ return input_api.canned_checks.GetUnitTestsRecursively(
+ input_api,
+ output_api,
+ input_api.PresubmitLocalPath(),
+ whitelist=['.*_test\\.py$'],
+ blacklist=blacklist)
+
+
+def CommonChecks(input_api, output_api):
+ """Presubmit checks run on both upload and commit.
+ """
+ checks = []
+ checks.extend(_GetPythonUnitTests(input_api, output_api))
+ return input_api.RunTests(checks, False)
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ """Presubmit checks on CL upload."""
+ return CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ """Presubmit checks on commit."""
+ return CommonChecks(input_api, output_api)
diff --git a/deps/v8/build/util/android_chrome_version.py b/deps/v8/build/util/android_chrome_version.py
new file mode 100644
index 0000000000..5628f1a845
--- /dev/null
+++ b/deps/v8/build/util/android_chrome_version.py
@@ -0,0 +1,173 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Different build variants of chrome for android have different version codes.
+Reason: for targets that have the same package name (e.g. chrome, chome
+modern, monochrome, trichrome), Play Store considers them the same app
+and will push the supported app with the highest version code to devices.
+(note Play Store does not support hosting two different apps with same
+version code and package name)
+
+Each key in this dict represents a unique version code that will be used for
+one or more android chrome apks.
+
+Webview channels must have unique version codes for a couple reasons:
+a) Play Store does not support having the same version code for different
+ versions of a package. Without unique codes, promoting a beta apk to stable
+ would require first removing the beta version.
+b) Firebase project support (used by official builders) requires unique
+ [version code + package name].
+ We cannot add new webview package names for new channels because webview
+ packages are whitelisted by Android as webview providers.
+
+WEBVIEW_STABLE, WEBVIEW_BETA, WEBVIEW_DEV are all used for standalone webview,
+whereas the others are used for various chrome apks.
+
+Note that a final digit of '3' for webview is reserved for Trichrome Webview.
+The same versionCode is used for both Trichrome Chrome and Trichrome Webview.
+"""
+ANDROID_CHROME_APK_VERSION_CODE_DIFFS = {
+ 'CHROME': 0,
+ 'CHROME_MODERN': 1,
+ 'MONOCHROME': 2,
+ 'TRICHROME': 3,
+ 'NOTOUCH_CHROME': 4,
+ 'WEBVIEW_STABLE': 0,
+ 'WEBVIEW_BETA': 1,
+ 'WEBVIEW_DEV': 2,
+}
+
+"""The architecture preference is encoded into the version_code for devices
+that support multiple architectures. (exploiting play store logic that pushes
+apk with highest version code)
+
+Detail:
+Many Android devices support multiple architectures, and can run applications
+built for any of them; the Play Store considers all of the supported
+architectures compatible and does not, itself, have any preference for which
+is "better". The common cases here:
+
+- All production arm64 devices can also run arm
+- All production x64 devices can also run x86
+- Pretty much all production x86/x64 devices can also run arm (via a binary
+ translator)
+
+Since the Play Store has no particular preferences, you have to encode your own
+preferences into the ordering of the version codes. There's a few relevant
+things here:
+
+- For any android app, it's theoretically preferable to ship a 64-bit version to
+ 64-bit devices if it exists, because the 64-bit architectures are supposed to
+ be "better" than their 32-bit predecessors (unfortunately this is not always
+ true due to the effect on memory usage, but we currently deal with this by
+ simply not shipping a 64-bit version *at all* on the configurations where we
+ want the 32-bit version to be used).
+- For any android app, it's definitely preferable to ship an x86 version to x86
+ devices if it exists instead of an arm version, because running things through
+ the binary translator is a performance hit.
+- For WebView, Monochrome, and Trichrome specifically, they are a special class
+ of APK called "multiarch" which means that they actually need to *use* more
+ than one architecture at runtime (rather than simply being compatible with
+ more than one). The 64-bit builds of these multiarch APKs contain both 32-bit
+ and 64-bit code, so that Webview is available for both ABIs. If you're
+ multiarch you *must* have a version that supports both 32-bit and 64-bit
+ version on a 64-bit device, otherwise it won't work properly. So, the 64-bit
+ version needs to be a higher versionCode, as otherwise a 64-bit device would
+ prefer the 32-bit version that does not include any 64-bit code, and fail.
+- The relative order of mips isn't important, but it needs to be a *distinct*
+ value to the other architectures because all builds need unique version codes.
+"""
+ARCH_VERSION_CODE_DIFF = {
+ 'arm': 0,
+ 'x86': 10,
+ 'mipsel': 20,
+ 'arm64': 30,
+ 'x64': 60
+}
+ARCH_CHOICES = ARCH_VERSION_CODE_DIFF.keys()
+
+""" "Next" builds get +5 last version code digit.
+
+We choose 5 because it won't conflict with values in
+ANDROID_CHROME_APK_VERSION_CODE_DIFFS
+
+We also increment BUILD (branch) number to ensure that the version code is
+higher for the next build than any build with the same BUILD value (even if the
+other builds have a higher PATCH value). This is needed for release logistics
+when working with unreleased Android versions: upgrading android will install
+the chrome build (the "next" build) that uses the new android sdk.
+"""
+NEXT_BUILD_VERSION_CODE_DIFF = 100005
+
+"""For 64-bit architectures, some packages have multiple targets with version
+codes that differ by the second-to-last digit (the architecture digit). This is
+for various combinations of 32-bit vs 64-bit chrome and webview. The
+default/traditional configuration is 32-bit chrome with 64-bit webview, but we
+are adding:
++ 64-bit chrome with 32-bit webview
++ 64-bit combined Chrome and Webview (only one library)
++ (maybe someday 32-bit chrome with 32-bit webview)
+
+The naming scheme followed here is <chrome>_<webview>,
+e.g. 64_32 is 64-bit chrome with 32-bit webview.
+"""
+ARCH64_APK_VARIANTS = {
+ '64_32': {
+ 'PACKAGES': frozenset(['MONOCHROME', 'TRICHROME']),
+ 'MODIFIER': 10
+ },
+ '64': {
+ 'PACKAGES': frozenset(['MONOCHROME', 'TRICHROME']),
+ 'MODIFIER': 20
+ }
+}
+
+
+def GenerateVersionCodes(version_values, arch, is_next_build):
+ """Get dict of version codes for chrome-for-android-related targets
+
+ e.g.
+ {
+ 'CHROME_VERSION_CODE': '378100010',
+ 'MONOCHROME_VERSION_CODE': '378100013',
+ ...
+ }
+
+ versionCode values are built like this:
+ {full BUILD int}{3 digits for PATCH}{1 digit for architecture}{final digit}.
+
+ MAJOR and MINOR values are not used for generating versionCode.
+ - MINOR is always 0. It was used for something long ago in Chrome's history
+ but has not been used since, and has never been nonzero on Android.
+ - MAJOR is cosmetic and controlled by the release managers. MAJOR and BUILD
+ always have reasonable sort ordering: for two version codes A and B, it's
+ always the case that (A.MAJOR < B.MAJOR) implies (A.BUILD < B.BUILD), and
+ that (A.MAJOR > B.MAJOR) implies (A.BUILD > B.BUILD). This property is just
+ maintained by the humans who set MAJOR.
+
+ Thus, this method is responsible for the final two digits of versionCode.
+ """
+
+ base_version_code = '%s%03d00' % (version_values['BUILD'],
+ int(version_values['PATCH']))
+ new_version_code = int(base_version_code)
+
+ new_version_code += ARCH_VERSION_CODE_DIFF[arch]
+ if is_next_build:
+ new_version_code += NEXT_BUILD_VERSION_CODE_DIFF
+
+ version_codes = {}
+ for apk, diff in ANDROID_CHROME_APK_VERSION_CODE_DIFFS.iteritems():
+ version_code_name = apk + '_VERSION_CODE'
+ version_code_val = new_version_code + diff
+ version_codes[version_code_name] = str(version_code_val)
+
+ if arch == 'arm64' or arch == 'x64':
+ for variant, config in ARCH64_APK_VARIANTS.iteritems():
+ if apk in config['PACKAGES']:
+ variant_name = apk + '_' + variant + '_VERSION_CODE'
+ variant_val = version_code_val + config['MODIFIER']
+ version_codes[variant_name] = str(variant_val)
+
+
+ return version_codes
diff --git a/deps/v8/build/util/android_chrome_version_test.py b/deps/v8/build/util/android_chrome_version_test.py
new file mode 100644
index 0000000000..5e743d34ba
--- /dev/null
+++ b/deps/v8/build/util/android_chrome_version_test.py
@@ -0,0 +1,293 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from android_chrome_version import GenerateVersionCodes
+
+
+class _VersionTest(unittest.TestCase):
+ """Unittests for the android_chrome_version module.
+ """
+
+ EXAMPLE_VERSION_VALUES = {
+ 'MAJOR': '74',
+ 'MINOR': '0',
+ 'BUILD': '3720',
+ 'PATCH': '0',
+ }
+
+ def testGenerateVersionCodesAndroidChrome(self):
+ """Assert it gives correct values for standard/example inputs"""
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+ chrome_version_code = output['CHROME_VERSION_CODE']
+
+ self.assertEqual(chrome_version_code, '372000000')
+
+ def testGenerateVersionCodesAndroidChromeModern(self):
+ """Assert it gives correct values for standard/example inputs"""
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+ chrome_modern_version_code = output['CHROME_MODERN_VERSION_CODE']
+
+ self.assertEqual(chrome_modern_version_code, '372000001')
+
+ def testGenerateVersionCodesAndroidMonochrome(self):
+ """Assert it gives correct values for standard/example inputs"""
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+ monochrome_version_code = output['MONOCHROME_VERSION_CODE']
+
+ self.assertEqual(monochrome_version_code, '372000002')
+
+ def testGenerateVersionCodesAndroidTrichrome(self):
+ """Assert it gives correct values for standard/example inputs"""
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+ trichrome_version_code = output['TRICHROME_VERSION_CODE']
+
+ self.assertEqual(trichrome_version_code, '372000003')
+
+ def testGenerateVersionCodesAndroidNoTouch(self):
+ """Assert it gives correct values for standard/example inputs"""
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+ notouch_chrome_version_code = output['NOTOUCH_CHROME_VERSION_CODE']
+
+ self.assertEqual(notouch_chrome_version_code, '372000004')
+
+ def testGenerateVersionCodesAndroidWebviewStable(self):
+ """Assert it gives correct values for standard/example inputs"""
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+ webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE']
+
+ self.assertEqual(webview_stable_version_code, '372000000')
+
+ def testGenerateVersionCodesAndroidWebviewBeta(self):
+ """Assert it gives correct values for standard/example inputs"""
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+ webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE']
+
+ self.assertEqual(webview_beta_version_code, '372000001')
+
+ def testGenerateVersionCodesAndroidWebviewDev(self):
+ """Assert it gives correct values for standard/example inputs"""
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+ webview_dev_version_code = output['WEBVIEW_DEV_VERSION_CODE']
+
+ self.assertEqual(webview_dev_version_code, '372000002')
+
+ def testGenerateVersionCodesAndroidNextBuild(self):
+ """Assert it handles "next" builds correctly"""
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=True)
+
+ # Get just a sample of values
+ chrome_version_code = output['CHROME_VERSION_CODE']
+ monochrome_version_code = output['MONOCHROME_VERSION_CODE']
+ webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE']
+ webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE']
+
+ self.assertEqual(chrome_version_code, '372100005')
+ self.assertEqual(monochrome_version_code, '372100007')
+ self.assertEqual(webview_stable_version_code, '372100005')
+ self.assertEqual(webview_beta_version_code, '372100006')
+
+ def testGenerateVersionCodesAndroidArchArm(self):
+ """Assert it handles different architectures correctly.
+
+ Version codes for different builds need to be distinct and maintain a
+ certain ordering.
+ See docstring on android_chrome_version.ARCH_VERSION_CODE_DIFF for
+ reasoning.
+ """
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+ arch_chrome_version_code = output['CHROME_VERSION_CODE']
+
+ self.assertEqual(arch_chrome_version_code, '372000000')
+
+ def testGenerateVersionCodesAndroidArchX86(self):
+ """Assert it handles different architectures correctly.
+
+ Version codes for different builds need to be distinct and maintain a
+ certain ordering.
+ See docstring on android_chrome_version.ARCH_VERSION_CODE_DIFF for
+ reasoning.
+ """
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='x86', is_next_build=False)
+ arch_chrome_version_code = output['CHROME_VERSION_CODE']
+
+ self.assertEqual(arch_chrome_version_code, '372000010')
+
+ def testGenerateVersionCodesAndroidArchMips(self):
+ """Assert it handles different architectures correctly.
+
+ Version codes for different builds need to be distinct and maintain a
+ certain ordering.
+ See docstring on android_chrome_version.ARCH_VERSION_CODE_DIFF for
+ reasoning.
+ """
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='mipsel', is_next_build=False)
+ arch_chrome_version_code = output['CHROME_VERSION_CODE']
+
+ self.assertEqual(arch_chrome_version_code, '372000020')
+
+ def testGenerateVersionCodesAndroidArchArm64(self):
+ """Assert it handles different architectures correctly.
+
+ Version codes for different builds need to be distinct and maintain a
+ certain ordering.
+ See docstring on android_chrome_version.ARCH_VERSION_CODE_DIFF for
+ reasoning.
+ """
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='arm64', is_next_build=False)
+ arch_chrome_version_code = output['CHROME_VERSION_CODE']
+
+ self.assertEqual(arch_chrome_version_code, '372000030')
+
+ def testGenerateVersionCodesAndroidArchArm64Variants(self):
+ """Assert it handles 64-bit-specific additional version codes correctly.
+
+ Some additional version codes are generated for 64-bit architectures.
+ See docstring on android_chrome_version.ARCH64_APK_VARIANTS for more info.
+ """
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='arm64', is_next_build=False)
+ arch_monochrome_64_32_version_code = output['MONOCHROME_64_32_VERSION_CODE']
+ arch_monochrome_64_version_code = output['MONOCHROME_64_VERSION_CODE']
+ arch_trichrome_64_32_version_code = output['TRICHROME_64_32_VERSION_CODE']
+ arch_trichrome_64_version_code = output['TRICHROME_64_VERSION_CODE']
+
+ self.assertEqual(arch_monochrome_64_32_version_code, '372000042')
+ self.assertEqual(arch_monochrome_64_version_code, '372000052')
+ self.assertEqual(arch_trichrome_64_32_version_code, '372000043')
+ self.assertEqual(arch_trichrome_64_version_code, '372000053')
+
+ def testGenerateVersionCodesAndroidArchX64(self):
+ """Assert it handles different architectures correctly.
+
+ Version codes for different builds need to be distinct and maintain a
+ certain ordering.
+ See docstring on android_chrome_version.ARCH_VERSION_CODE_DIFF for
+ reasoning.
+ """
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='x64', is_next_build=False)
+ arch_chrome_version_code = output['CHROME_VERSION_CODE']
+
+ self.assertEqual(arch_chrome_version_code, '372000060')
+
+ def testGenerateVersionCodesAndroidArchX64Variants(self):
+ """Assert it handles 64-bit-specific additional version codes correctly.
+
+ Some additional version codes are generated for 64-bit architectures.
+ See docstring on android_chrome_version.ARCH64_APK_VARIANTS for more info.
+ """
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='x64', is_next_build=False)
+ arch_monochrome_64_32_version_code = output['MONOCHROME_64_32_VERSION_CODE']
+ arch_monochrome_64_version_code = output['MONOCHROME_64_VERSION_CODE']
+ arch_trichrome_64_32_version_code = output['TRICHROME_64_32_VERSION_CODE']
+ arch_trichrome_64_version_code = output['TRICHROME_64_VERSION_CODE']
+
+ self.assertEqual(arch_monochrome_64_32_version_code, '372000072')
+ self.assertEqual(arch_monochrome_64_version_code, '372000082')
+ self.assertEqual(arch_trichrome_64_32_version_code, '372000073')
+ self.assertEqual(arch_trichrome_64_version_code, '372000083')
+
+ def testGenerateVersionCodesAndroidArchOrderArm(self):
+ """Assert it handles different architectures correctly.
+
+ Version codes for different builds need to be distinct and maintain a
+ certain ordering.
+ See docstring on android_chrome_version.ARCH_VERSION_CODE_DIFF for
+ reasoning.
+
+ Test arm-related values.
+ """
+ arm_output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+ arm64_output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='arm64', is_next_build=False)
+
+ arm_chrome_version_code = arm_output['CHROME_VERSION_CODE']
+ arm64_chrome_version_code = arm64_output['CHROME_VERSION_CODE']
+
+ self.assertLess(arm_chrome_version_code, arm64_chrome_version_code)
+
+ def testGenerateVersionCodesAndroidArchOrderX86(self):
+ """Assert it handles different architectures correctly.
+
+ Version codes for different builds need to be distinct and maintain a
+ certain ordering.
+ See docstring on android_chrome_version.ARCH_VERSION_CODE_DIFF for
+ reasoning.
+
+ Test x86-related values.
+ """
+ x86_output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='x86', is_next_build=False)
+ x64_output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='x64', is_next_build=False)
+
+ x86_chrome_version_code = x86_output['CHROME_VERSION_CODE']
+ x64_chrome_version_code = x64_output['CHROME_VERSION_CODE']
+
+ self.assertLess(x86_chrome_version_code, x64_chrome_version_code)
+
+ def testGenerateVersionCodesAndroidWebviewChannelOrderBeta(self):
+ """Assert webview beta channel is higher than stable.
+
+ The channel-specific version codes for standalone webview needs to follow
+ the order stable < beta < dev.
+
+ This allows that if a user opts into beta track, they will always have the
+ beta apk, including any finch experiments targeted at beta users, even when
+ beta and stable channels are otherwise on the same version.
+ """
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+ webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE']
+ webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE']
+
+ self.assertGreater(webview_beta_version_code, webview_stable_version_code)
+
+ def testGenerateVersionCodesAndroidWebviewChannelOrderDev(self):
+ """Assert webview dev channel is higher than beta.
+
+ The channel-specific version codes for standalone webview needs to follow
+ the order stable < beta < dev.
+
+ This allows that if a user opts into dev track, they will always have the
+ dev apk, including any finch experiments targeted at dev users, even when
+ dev and beta channels are otherwise on the same version.
+ """
+ output = GenerateVersionCodes(
+ self.EXAMPLE_VERSION_VALUES, arch='arm', is_next_build=False)
+
+ webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE']
+ webview_dev_version_code = output['WEBVIEW_DEV_VERSION_CODE']
+
+ self.assertGreater(webview_dev_version_code, webview_beta_version_code)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/util/branding.gni b/deps/v8/build/util/branding.gni
new file mode 100644
index 0000000000..2a229f1056
--- /dev/null
+++ b/deps/v8/build/util/branding.gni
@@ -0,0 +1,46 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This exposes the Chrome branding as GN variables for use in build files.
+#
+# PREFER NOT TO USE THESE. The GYP build uses this kind of thing extensively.
+# However, it is far better to write an action to generate a file at
+# build-time with the information you need. This allows better dependency
+# checking and GN will run faster.
+#
+# These values should only be used if you REALLY need to depend on them at
+# build-time, for example, in the computation of output file names.
+
+import("//build/config/chrome_build.gni")
+
+_branding_dictionary_template =
+ "full_name = \"@PRODUCT_FULLNAME@\" " +
+ "short_name = \"@PRODUCT_SHORTNAME@\" " +
+ "bundle_id = \"@MAC_BUNDLE_ID@\" " +
+ "creator_code = \"@MAC_CREATOR_CODE@\" " +
+ "installer_full_name = \"@PRODUCT_INSTALLER_FULLNAME@\" " +
+ "installer_short_name = \"@PRODUCT_INSTALLER_SHORTNAME@\" " +
+ "team_id = \"@MAC_TEAM_ID@\" "
+
+_branding_file = "//chrome/app/theme/$branding_path_component/BRANDING"
+_result = exec_script("version.py",
+ [
+ "-f",
+ rebase_path(_branding_file, root_build_dir),
+ "-t",
+ _branding_dictionary_template,
+ ],
+ "scope",
+ [ _branding_file ])
+
+chrome_product_full_name = _result.full_name
+chrome_product_short_name = _result.short_name
+chrome_product_installer_full_name = _result.installer_full_name
+chrome_product_installer_short_name = _result.installer_short_name
+
+if (is_mac) {
+ chrome_mac_bundle_id = _result.bundle_id
+ chrome_mac_creator_code = _result.creator_code
+ chrome_mac_team_id = _result.team_id
+}
diff --git a/deps/v8/build/util/generate_wrapper.gni b/deps/v8/build/util/generate_wrapper.gni
new file mode 100644
index 0000000000..74d94330da
--- /dev/null
+++ b/deps/v8/build/util/generate_wrapper.gni
@@ -0,0 +1,98 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Wraps a target and any of its arguments to an executable script.
+#
+# Many executable targets have build-time-constant arguments. This
+# template allows those to be wrapped into a single, user- or bot-friendly
+# script at build time.
+#
+# Paths to be wrapped should be relative to root_build_dir and should be
+# wrapped in "@WrappedPath(...)"; see Example below.
+#
+# Variables:
+# generator_script: Path to the script to use to perform the wrapping.
+# Defaults to //build/util/generate_wrapper.py. Generally should only
+# be set by other templates.
+# wrapper_script: Output path.
+# executable: Path to the executable to wrap. Can be a script or a
+# build product. Paths can be relative to the containing gn file
+# or source-absolute.
+# executable_args: List of arguments to write into the wrapper.
+#
+# Example wrapping a checked-in script:
+# generate_wrapper("sample_wrapper") {
+# executable = "//for/bar/sample.py"
+# wrapper_script = "$root_build_dir/bin/run_sample"
+#
+# _sample_argument_path = "//sample/$target_cpu/lib/sample_lib.so"
+# _rebased_sample_argument_path = rebase_path(
+# _sample_argument_path,
+# root_build_dir)
+# executable_args = [
+# "--sample-lib", "@WrappedPath(${_rebased_sample_argument_path})",
+# ]
+# }
+#
+# Example wrapping a build product:
+# generate_wrapper("sample_wrapper") {
+# executable = "$root_build_dir/sample_build_product"
+# wrapper_script = "$root_build_dir/bin/run_sample_build_product"
+# }
+template("generate_wrapper") {
+ _generator_script = "//build/util/generate_wrapper.py"
+ if (defined(invoker.generator_script)) {
+ _generator_script = invoker.generator_script
+ }
+ _executable_to_wrap = invoker.executable
+ _wrapper_script = invoker.wrapper_script
+ if (is_win) {
+ _wrapper_script += ".bat"
+ }
+ if (defined(invoker.executable_args)) {
+ _wrapped_arguments = invoker.executable_args
+ } else {
+ _wrapped_arguments = []
+ }
+
+ action(target_name) {
+ forward_variables_from(invoker,
+ [
+ "data",
+ "data_deps",
+ "deps",
+ "sources",
+ "testonly",
+ ])
+ script = _generator_script
+ if (!defined(data)) {
+ data = []
+ }
+ data += [ _wrapper_script ]
+ outputs = [
+ _wrapper_script,
+ ]
+
+ _rebased_executable_to_wrap =
+ rebase_path(_executable_to_wrap, root_build_dir)
+ _rebased_wrapper_script = rebase_path(_wrapper_script, root_build_dir)
+ if (is_win) {
+ _script_language = "batch"
+ } else {
+ _script_language = "bash"
+ }
+ args = [
+ "--executable",
+ "@WrappedPath(${_rebased_executable_to_wrap})",
+ "--wrapper-script",
+ _rebased_wrapper_script,
+ "--output-directory",
+ rebase_path(root_build_dir, root_build_dir),
+ "--script-language",
+ _script_language,
+ "--",
+ ]
+ args += _wrapped_arguments
+ }
+}
diff --git a/deps/v8/build/util/generate_wrapper.py b/deps/v8/build/util/generate_wrapper.py
new file mode 100755
index 0000000000..5373e1ea2e
--- /dev/null
+++ b/deps/v8/build/util/generate_wrapper.py
@@ -0,0 +1,136 @@
+#!/usr/bin/env vpython
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wraps an executable and any provided arguments into an executable script."""
+
+import argparse
+import os
+import sys
+import textwrap
+
+
+# The bash template passes the python script into vpython via stdin.
+# The interpreter doesn't know about the script, so we have bash
+# inject the script location.
+BASH_TEMPLATE = textwrap.dedent(
+ """\
+ #!/usr/bin/env vpython
+ _SCRIPT_LOCATION = __file__
+ {script}
+ """)
+
+
+# The batch template reruns the batch script with vpython, with the -x
+# flag instructing the interpreter to ignore the first line. The interpreter
+# knows about the (batch) script in this case, so it can get the file location
+# directly.
+BATCH_TEMPLATE = textwrap.dedent(
+ """\
+ @SETLOCAL ENABLEDELAYEDEXPANSION \
+ & vpython.bat -x "%~f0" %* \
+ & EXIT /B !ERRORLEVEL!
+ _SCRIPT_LOCATION = __file__
+ {script}
+ """)
+
+
+SCRIPT_TEMPLATES = {
+ 'bash': BASH_TEMPLATE,
+ 'batch': BATCH_TEMPLATE,
+}
+
+
+PY_TEMPLATE = textwrap.dedent(
+ """\
+ import os
+ import re
+ import subprocess
+ import sys
+
+ _WRAPPED_PATH_RE = re.compile(r'@WrappedPath\(([^)]+)\)')
+ _PATH_TO_OUTPUT_DIR = '{path_to_output_dir}'
+ _SCRIPT_DIR = os.path.dirname(os.path.realpath(_SCRIPT_LOCATION))
+
+
+ def ExpandWrappedPath(arg):
+ m = _WRAPPED_PATH_RE.match(arg)
+ if m:
+ return os.path.join(
+ os.path.relpath(_SCRIPT_DIR), _PATH_TO_OUTPUT_DIR, m.group(1))
+ return arg
+
+
+ def ExpandWrappedPaths(args):
+ for i, arg in enumerate(args):
+ args[i] = ExpandWrappedPath(arg)
+ return args
+
+
+ def main(raw_args):
+ executable_path = ExpandWrappedPath('{executable_path}')
+ executable_args = ExpandWrappedPaths({executable_args})
+
+ return subprocess.call([executable_path] + executable_args + raw_args)
+
+
+ if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
+ """)
+
+
+def Wrap(args):
+ """Writes a wrapped script according to the provided arguments.
+
+ Arguments:
+ args: an argparse.Namespace object containing command-line arguments
+ as parsed by a parser returned by CreateArgumentParser.
+ """
+ path_to_output_dir = os.path.relpath(
+ args.output_directory,
+ os.path.dirname(args.wrapper_script))
+
+ with open(args.wrapper_script, 'w') as wrapper_script:
+ py_contents = PY_TEMPLATE.format(
+ path_to_output_dir=path_to_output_dir,
+ executable_path=str(args.executable),
+ executable_args=str(args.executable_args))
+ template = SCRIPT_TEMPLATES[args.script_language]
+ wrapper_script.write(template.format(
+ script=py_contents))
+ os.chmod(args.wrapper_script, 0750)
+
+ return 0
+
+
+def CreateArgumentParser():
+ """Creates an argparse.ArgumentParser instance."""
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--executable',
+ help='Executable to wrap.')
+ parser.add_argument(
+ '--wrapper-script',
+ help='Path to which the wrapper script will be written.')
+ parser.add_argument(
+ '--output-directory',
+ help='Path to the output directory.')
+ parser.add_argument(
+ '--script-language',
+ choices=SCRIPT_TEMPLATES.keys(),
+ help='Language in which the warpper script will be written.')
+ parser.add_argument(
+ 'executable_args', nargs='*',
+ help='Arguments to wrap into the executable.')
+ return parser
+
+
+def main(raw_args):
+ parser = CreateArgumentParser()
+ args = parser.parse_args(raw_args)
+ return Wrap(args)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/util/java_action.gni b/deps/v8/build/util/java_action.gni
new file mode 100644
index 0000000000..646d5a4e7e
--- /dev/null
+++ b/deps/v8/build/util/java_action.gni
@@ -0,0 +1,103 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+jarrunner = "//build/util/java_action.py"
+
+# Declare a target that runs a java command a single time.
+#
+# This target type allows you to run a java command a single time to produce
+# one or more output files. If you want to run a java command for each of a
+# set of input files, see "java_action_foreach".
+#
+# See "gn help action" for more information on how to use this target. This
+# template is based on the "action" and supports the same variables.
+template("java_action") {
+ assert(defined(invoker.script),
+ "Need script in $target_name listing the .jar file to run.")
+ assert(defined(invoker.outputs),
+ "Need outputs in $target_name listing the generated outputs.")
+
+ jarscript = invoker.script
+ action(target_name) {
+ script = jarrunner
+
+ inputs = [
+ jarscript,
+ ]
+ if (defined(invoker.inputs)) {
+ inputs += invoker.inputs
+ }
+
+ args = [
+ "-jar",
+ rebase_path(jarscript, root_build_dir),
+ ]
+ if (defined(invoker.args)) {
+ args += invoker.args
+ }
+
+ forward_variables_from(invoker,
+ [
+ "console",
+ "data",
+ "data_deps",
+ "depfile",
+ "deps",
+ "outputs",
+ "sources",
+ "testonly",
+ "visibility",
+ ])
+ }
+}
+
+# Declare a target that runs a java command over a set of files.
+#
+# This target type allows you to run a java command once-per-file over a set of
+# sources. If you want to run a java command once that takes many files as
+# input, see "java_action".
+#
+# See "gn help action_foreach" for more information on how to use this target.
+# This template is based on the "action_foreach" supports the same variables.
+template("java_action_foreach") {
+ assert(defined(invoker.script),
+ "Need script in $target_name listing the .jar file to run.")
+ assert(defined(invoker.outputs),
+ "Need outputs in $target_name listing the generated outputs.")
+ assert(defined(invoker.sources),
+ "Need sources in $target_name listing the target inputs.")
+
+ jarscript = invoker.script
+ action_foreach(target_name) {
+ script = jarrunner
+
+ inputs = [
+ jarscript,
+ ]
+ if (defined(invoker.inputs)) {
+ inputs += invoker.inputs
+ }
+
+ args = [
+ "-jar",
+ rebase_path(jarscript, root_build_dir),
+ ]
+ if (defined(invoker.args)) {
+ args += invoker.args
+ }
+
+ forward_variables_from(invoker,
+ [
+ "console",
+ "data",
+ "data_deps",
+ "depfile",
+ "deps",
+ "outputs",
+ "sources",
+ "testonly",
+ "visibility",
+ ])
+ }
+}
diff --git a/deps/v8/build/util/java_action.py b/deps/v8/build/util/java_action.py
new file mode 100755
index 0000000000..ed9bb601de
--- /dev/null
+++ b/deps/v8/build/util/java_action.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper script to run java command as action with gn."""
+
+import os
+import subprocess
+import sys
+
+EXIT_SUCCESS = 0
+EXIT_FAILURE = 1
+
+
+def IsExecutable(path):
+ """Returns whether file at |path| exists and is executable.
+
+ Args:
+ path: absolute or relative path to test.
+
+ Returns:
+ True if the file at |path| exists, False otherwise.
+ """
+ return os.path.isfile(path) and os.access(path, os.X_OK)
+
+
+def FindCommand(command):
+ """Looks up for |command| in PATH.
+
+ Args:
+ command: name of the command to lookup, if command is a relative or
+ absolute path (i.e. contains some path separator) then only that
+ path will be tested.
+
+ Returns:
+ Full path to command or None if the command was not found.
+
+ On Windows, this respects the PATHEXT environment variable when the
+ command name does not have an extension.
+ """
+ fpath, _ = os.path.split(command)
+ if fpath:
+ if IsExecutable(command):
+ return command
+
+ if sys.platform == 'win32':
+ # On Windows, if the command does not have an extension, cmd.exe will
+ # try all extensions from PATHEXT when resolving the full path.
+ command, ext = os.path.splitext(command)
+ if not ext:
+ exts = os.environ['PATHEXT'].split(os.path.pathsep)
+ else:
+ exts = [ext]
+ else:
+ exts = ['']
+
+ for path in os.environ['PATH'].split(os.path.pathsep):
+ for ext in exts:
+ path = os.path.join(path, command) + ext
+ if IsExecutable(path):
+ return path
+
+ return None
+
+
+def main():
+ java_path = FindCommand('java')
+ if not java_path:
+ sys.stderr.write('java: command not found\n')
+ sys.exit(EXIT_FAILURE)
+
+ args = sys.argv[1:]
+ if len(args) < 2 or args[0] != '-jar':
+ sys.stderr.write('usage: %s -jar JARPATH [java_args]...\n' % sys.argv[0])
+ sys.exit(EXIT_FAILURE)
+
+ return subprocess.check_call([java_path] + args)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/util/lastchange.gni b/deps/v8/build/util/lastchange.gni
new file mode 100644
index 0000000000..a13295900d
--- /dev/null
+++ b/deps/v8/build/util/lastchange.gni
@@ -0,0 +1,16 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is used to inject fixed dummy commit for commit independent
+# reproducible binaries.
+
+declare_args() {
+ use_dummy_lastchange = false
+}
+
+if (use_dummy_lastchange) {
+ lastchange_file = "//build/util/LASTCHANGE.dummy"
+} else {
+ lastchange_file = "//build/util/LASTCHANGE"
+}
diff --git a/deps/v8/build/util/lastchange.py b/deps/v8/build/util/lastchange.py
new file mode 100755
index 0000000000..6d704b7afa
--- /dev/null
+++ b/deps/v8/build/util/lastchange.py
@@ -0,0 +1,318 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+lastchange.py -- Chromium revision fetching utility.
+"""
+from __future__ import print_function
+
+import argparse
+import collections
+import logging
+import os
+import subprocess
+import sys
+
+VersionInfo = collections.namedtuple("VersionInfo",
+ ("revision_id", "revision", "timestamp"))
+
+class GitError(Exception):
+ pass
+
+# This function exists for compatibility with logic outside this
+# repository that uses this file as a library.
+# TODO(eliribble) remove this function after it has been ported into
+# the repositories that depend on it
+def RunGitCommand(directory, command):
+ """
+ Launches git subcommand.
+
+ Errors are swallowed.
+
+ Returns:
+ A process object or None.
+ """
+ command = ['git'] + command
+ # Force shell usage under cygwin. This is a workaround for
+ # mysterious loss of cwd while invoking cygwin's git.
+ # We can't just pass shell=True to Popen, as under win32 this will
+ # cause CMD to be used, while we explicitly want a cygwin shell.
+ if sys.platform == 'cygwin':
+ command = ['sh', '-c', ' '.join(command)]
+ try:
+ proc = subprocess.Popen(command,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=directory,
+ shell=(sys.platform=='win32'))
+ return proc
+ except OSError as e:
+ logging.error('Command %r failed: %s' % (' '.join(command), e))
+ return None
+
+
+def _RunGitCommand(directory, command):
+ """Launches git subcommand.
+
+ Returns:
+ The stripped stdout of the git command.
+ Raises:
+ GitError on failure, including a nonzero return code.
+ """
+ command = ['git'] + command
+ # Force shell usage under cygwin. This is a workaround for
+ # mysterious loss of cwd while invoking cygwin's git.
+ # We can't just pass shell=True to Popen, as under win32 this will
+ # cause CMD to be used, while we explicitly want a cygwin shell.
+ if sys.platform == 'cygwin':
+ command = ['sh', '-c', ' '.join(command)]
+ try:
+ logging.info("Executing '%s' in %s", ' '.join(command), directory)
+ proc = subprocess.Popen(command,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=directory,
+ shell=(sys.platform=='win32'))
+ stdout, stderr = proc.communicate()
+ stdout = stdout.strip()
+ logging.debug("returncode: %d", proc.returncode)
+ logging.debug("stdout: %s", stdout)
+ logging.debug("stderr: %s", stderr)
+ if proc.returncode != 0 or not stdout:
+ raise GitError((
+ "Git command '{}' in {} failed: "
+ "rc={}, stdout='{}' stderr='{}'").format(
+ " ".join(command), directory, proc.returncode, stdout, stderr))
+ return stdout
+ except OSError as e:
+ raise GitError("Git command 'git {}' in {} failed: {}".format(
+ " ".join(command), directory, e))
+
+
+def GetMergeBase(directory, ref):
+ """
+ Return the merge-base of HEAD and ref.
+
+ Args:
+ directory: The directory containing the .git directory.
+ ref: The ref to use to find the merge base.
+ Returns:
+ The git commit SHA of the merge-base as a string.
+ """
+ logging.debug("Calculating merge base between HEAD and %s in %s",
+ ref, directory)
+ command = ['merge-base', 'HEAD', ref]
+ return _RunGitCommand(directory, command)
+
+
+def FetchGitRevision(directory, commit_filter, start_commit="HEAD"):
+ """
+ Fetch the Git hash (and Cr-Commit-Position if any) for a given directory.
+
+ Args:
+ directory: The directory containing the .git directory.
+ commit_filter: A filter to supply to grep to filter commits
+ start_commit: A commit identifier. The result of this function
+ will be limited to only consider commits before the provided
+ commit.
+ Returns:
+ A VersionInfo object. On error all values will be 0.
+ """
+ hash_ = ''
+
+ git_args = ['log', '-1', '--format=%H %ct']
+ if commit_filter is not None:
+ git_args.append('--grep=' + commit_filter)
+
+ git_args.append(start_commit)
+
+ output = _RunGitCommand(directory, git_args)
+ hash_, commit_timestamp = output.split()
+ if not hash_:
+ return VersionInfo('0', '0', 0)
+
+ revision = hash_
+ output = _RunGitCommand(directory, ['cat-file', 'commit', hash_])
+ for line in reversed(output.splitlines()):
+ if line.startswith('Cr-Commit-Position:'):
+ pos = line.rsplit()[-1].strip()
+ logging.debug("Found Cr-Commit-Position '%s'", pos)
+ revision = "{}-{}".format(hash_, pos)
+ break
+ return VersionInfo(hash_, revision, int(commit_timestamp))
+
+
+def GetHeaderGuard(path):
+ """
+ Returns the header #define guard for the given file path.
+ This treats everything after the last instance of "src/" as being a
+ relevant part of the guard. If there is no "src/", then the entire path
+ is used.
+ """
+ src_index = path.rfind('src/')
+ if src_index != -1:
+ guard = path[src_index + 4:]
+ else:
+ guard = path
+ guard = guard.upper()
+ return guard.replace('/', '_').replace('.', '_').replace('\\', '_') + '_'
+
+
+def GetHeaderContents(path, define, version):
+ """
+ Returns what the contents of the header file should be that indicate the given
+ revision.
+ """
+ header_guard = GetHeaderGuard(path)
+
+ header_contents = """/* Generated by lastchange.py, do not edit.*/
+
+#ifndef %(header_guard)s
+#define %(header_guard)s
+
+#define %(define)s "%(version)s"
+
+#endif // %(header_guard)s
+"""
+ header_contents = header_contents % { 'header_guard': header_guard,
+ 'define': define,
+ 'version': version }
+ return header_contents
+
+
+def GetGitTopDirectory(source_dir):
+ """Get the top git directory - the directory that contains the .git directory.
+
+ Args:
+ source_dir: The directory to search.
+ Returns:
+ The output of "git rev-parse --show-toplevel" as a string
+ """
+ return _RunGitCommand(source_dir, ['rev-parse', '--show-toplevel'])
+
+
+def WriteIfChanged(file_name, contents):
+ """
+ Writes the specified contents to the specified file_name
+ iff the contents are different than the current contents.
+ Returns if new data was written.
+ """
+ try:
+ old_contents = open(file_name, 'r').read()
+ except EnvironmentError:
+ pass
+ else:
+ if contents == old_contents:
+ return False
+ os.unlink(file_name)
+ open(file_name, 'w').write(contents)
+ return True
+
+
+def main(argv=None):
+ if argv is None:
+ argv = sys.argv
+
+ parser = argparse.ArgumentParser(usage="lastchange.py [options]")
+ parser.add_argument("-m", "--version-macro",
+ help=("Name of C #define when using --header. Defaults to "
+ "LAST_CHANGE."))
+ parser.add_argument("-o", "--output", metavar="FILE",
+ help=("Write last change to FILE. "
+ "Can be combined with --header to write both files."))
+ parser.add_argument("--header", metavar="FILE",
+ help=("Write last change to FILE as a C/C++ header. "
+ "Can be combined with --output to write both files."))
+ parser.add_argument("--merge-base-ref",
+ default=None,
+ help=("Only consider changes since the merge "
+ "base between HEAD and the provided ref"))
+ parser.add_argument("--revision-id-only", action='store_true',
+ help=("Output the revision as a VCS revision ID only (in "
+ "Git, a 40-character commit hash, excluding the "
+ "Cr-Commit-Position)."))
+ parser.add_argument("--print-only", action="store_true",
+ help=("Just print the revision string. Overrides any "
+ "file-output-related options."))
+ parser.add_argument("-s", "--source-dir", metavar="DIR",
+ help="Use repository in the given directory.")
+ parser.add_argument("--filter", metavar="REGEX",
+ help=("Only use log entries where the commit message "
+ "matches the supplied filter regex. Defaults to "
+ "'^Change-Id:' to suppress local commits."),
+ default='^Change-Id:')
+
+ args, extras = parser.parse_known_args(argv[1:])
+
+ logging.basicConfig(level=logging.WARNING)
+
+ out_file = args.output
+ header = args.header
+ commit_filter=args.filter
+
+ while len(extras) and out_file is None:
+ if out_file is None:
+ out_file = extras.pop(0)
+ if extras:
+ sys.stderr.write('Unexpected arguments: %r\n\n' % extras)
+ parser.print_help()
+ sys.exit(2)
+
+ source_dir = args.source_dir or os.path.dirname(os.path.abspath(__file__))
+ try:
+ git_top_dir = GetGitTopDirectory(source_dir)
+ except GitError as e:
+ logging.error("Failed to get git top directory from '%s': %s",
+ source_dir, e)
+ return 2
+
+ if args.merge_base_ref:
+ try:
+ merge_base_sha = GetMergeBase(git_top_dir, args.merge_base_ref)
+ except GitError as e:
+ logging.error("You requested a --merge-base-ref value of '%s' but no "
+ "merge base could be found between it and HEAD. Git "
+ "reports: %s", args.merge_base_ref, e)
+ return 3
+ else:
+ merge_base_sha = 'HEAD'
+
+ try:
+ version_info = FetchGitRevision(git_top_dir, commit_filter, merge_base_sha)
+ except GitError as e:
+ logging.error("Failed to get version info: %s", e)
+ logging.info(("Falling back to a version of 0.0.0 to allow script to "
+ "finish. This is normal if you are bootstrapping a new environment "
+ "or do not have a git repository for any other reason. If not, this "
+ "could represent a serious error."))
+ version_info = VersionInfo('0', '0', 0)
+
+ revision_string = version_info.revision
+ if args.revision_id_only:
+ revision_string = version_info.revision_id
+
+ if args.print_only:
+ print(revision_string)
+ else:
+ contents = "LASTCHANGE=%s\n" % revision_string
+ if not out_file and not args.header:
+ sys.stdout.write(contents)
+ else:
+ if out_file:
+ committime_file = out_file + '.committime'
+ out_changed = WriteIfChanged(out_file, contents)
+ if out_changed or not os.path.exists(committime_file):
+ with open(committime_file, 'w') as timefile:
+ timefile.write(str(version_info.timestamp))
+ if header:
+ WriteIfChanged(header,
+ GetHeaderContents(header, args.version_macro,
+ revision_string))
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/util/lib/common/PRESUBMIT.py b/deps/v8/build/util/lib/common/PRESUBMIT.py
new file mode 100644
index 0000000000..fca962f1ca
--- /dev/null
+++ b/deps/v8/build/util/lib/common/PRESUBMIT.py
@@ -0,0 +1,16 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+def _RunTests(input_api, output_api):
+ return (input_api.canned_checks.RunUnitTestsInDirectory(
+ input_api, output_api, '.', whitelist=[r'.+_test.py$']))
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ return _RunTests(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return _RunTests(input_api, output_api)
diff --git a/deps/v8/build/util/lib/common/__init__.py b/deps/v8/build/util/lib/common/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/deps/v8/build/util/lib/common/__init__.py
diff --git a/deps/v8/build/util/lib/common/chrome_test_server_spawner.py b/deps/v8/build/util/lib/common/chrome_test_server_spawner.py
new file mode 100644
index 0000000000..b9844aa391
--- /dev/null
+++ b/deps/v8/build/util/lib/common/chrome_test_server_spawner.py
@@ -0,0 +1,480 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A "Test Server Spawner" that handles killing/stopping per-test test servers.
+
+It's used to accept requests from the device to spawn and kill instances of the
+chrome test server on the host.
+"""
+# pylint: disable=W0702
+
+import BaseHTTPServer
+import json
+import logging
+import os
+import select
+import struct
+import subprocess
+import sys
+import threading
+import time
+import urlparse
+
+
+SERVER_TYPES = {
+ 'http': '',
+ 'ftp': '-f',
+ 'sync': '', # Sync uses its own script, and doesn't take a server type arg.
+ 'tcpecho': '--tcp-echo',
+ 'udpecho': '--udp-echo',
+ 'ws': '--websocket',
+}
+
+
+_DIR_SOURCE_ROOT = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
+ os.pardir))
+
+
+_logger = logging.getLogger(__name__)
+
+
+# Path that are needed to import necessary modules when launching a testserver.
+os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + (':%s:%s:%s:%s:%s'
+ % (os.path.join(_DIR_SOURCE_ROOT, 'third_party'),
+ os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'tlslite'),
+ os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'pyftpdlib', 'src'),
+ os.path.join(_DIR_SOURCE_ROOT, 'net', 'tools', 'testserver'),
+ os.path.join(_DIR_SOURCE_ROOT, 'components', 'sync', 'tools',
+ 'testserver')))
+
+
+# The timeout (in seconds) of starting up the Python test server.
+_TEST_SERVER_STARTUP_TIMEOUT = 10
+
+
+def _GetServerTypeCommandLine(server_type):
+ """Returns the command-line by the given server type.
+
+ Args:
+ server_type: the server type to be used (e.g. 'http').
+
+ Returns:
+ A string containing the command-line argument.
+ """
+ if server_type not in SERVER_TYPES:
+ raise NotImplementedError('Unknown server type: %s' % server_type)
+ if server_type == 'udpecho':
+ raise Exception('Please do not run UDP echo tests because we do not have '
+ 'a UDP forwarder tool.')
+ return SERVER_TYPES[server_type]
+
+
+class PortForwarder:
+ def Map(self, port_pairs):
+ pass
+
+ def GetDevicePortForHostPort(self, host_port):
+ """Returns the device port that corresponds to a given host port."""
+ return host_port
+
+ def WaitHostPortAvailable(self, port):
+ """Returns True if |port| is available."""
+ return True
+
+ def WaitPortNotAvailable(self, port):
+ """Returns True if |port| is not available."""
+ return True
+
+ def WaitDevicePortReady(self, port):
+ """Returns whether the provided port is used."""
+ return True
+
+ def Unmap(self, device_port):
+ """Unmaps specified port"""
+ pass
+
+
+class TestServerThread(threading.Thread):
+ """A thread to run the test server in a separate process."""
+
+ def __init__(self, ready_event, arguments, port_forwarder):
+ """Initialize TestServerThread with the following argument.
+
+ Args:
+ ready_event: event which will be set when the test server is ready.
+ arguments: dictionary of arguments to run the test server.
+ device: An instance of DeviceUtils.
+ tool: instance of runtime error detection tool.
+ """
+ threading.Thread.__init__(self)
+ self.wait_event = threading.Event()
+ self.stop_event = threading.Event()
+ self.ready_event = ready_event
+ self.ready_event.clear()
+ self.arguments = arguments
+ self.port_forwarder = port_forwarder
+ self.test_server_process = None
+ self.is_ready = False
+ self.host_port = self.arguments['port']
+ self.host_ocsp_port = 0
+ assert isinstance(self.host_port, int)
+ # The forwarder device port now is dynamically allocated.
+ self.forwarder_device_port = 0
+ self.forwarder_ocsp_device_port = 0
+ # Anonymous pipe in order to get port info from test server.
+ self.pipe_in = None
+ self.pipe_out = None
+ self.process = None
+ self.command_line = []
+
+ def _WaitToStartAndGetPortFromTestServer(self):
+ """Waits for the Python test server to start and gets the port it is using.
+
+ The port information is passed by the Python test server with a pipe given
+ by self.pipe_out. It is written as a result to |self.host_port|.
+
+ Returns:
+ Whether the port used by the test server was successfully fetched.
+ """
+ assert self.host_port == 0 and self.pipe_out and self.pipe_in
+ (in_fds, _, _) = select.select([self.pipe_in, ], [], [],
+ _TEST_SERVER_STARTUP_TIMEOUT)
+ if len(in_fds) == 0:
+ _logger.error('Failed to wait to the Python test server to be started.')
+ return False
+ # First read the data length as an unsigned 4-byte value. This
+ # is _not_ using network byte ordering since the Python test server packs
+ # size as native byte order and all Chromium platforms so far are
+ # configured to use little-endian.
+ # TODO(jnd): Change the Python test server and local_test_server_*.cc to
+ # use a unified byte order (either big-endian or little-endian).
+ data_length = os.read(self.pipe_in, struct.calcsize('=L'))
+ if data_length:
+ (data_length,) = struct.unpack('=L', data_length)
+ assert data_length
+ if not data_length:
+ _logger.error('Failed to get length of server data.')
+ return False
+ server_data_json = os.read(self.pipe_in, data_length)
+ if not server_data_json:
+ _logger.error('Failed to get server data.')
+ return False
+ _logger.info('Got port json data: %s', server_data_json)
+
+ parsed_server_data = None
+ try:
+ parsed_server_data = json.loads(server_data_json)
+ except ValueError:
+ pass
+
+ if not isinstance(parsed_server_data, dict):
+ _logger.error('Failed to parse server_data: %s' % server_data_json)
+ return False
+
+ if not isinstance(parsed_server_data.get('port'), int):
+ _logger.error('Failed to get port information from the server data.')
+ return False
+
+ self.host_port = parsed_server_data['port']
+ self.host_ocsp_port = parsed_server_data.get('ocsp_port', 0)
+
+ return self.port_forwarder.WaitPortNotAvailable(self.host_port)
+
+ def _GenerateCommandLineArguments(self):
+ """Generates the command line to run the test server.
+
+ Note that all options are processed by following the definitions in
+ testserver.py.
+ """
+ if self.command_line:
+ return
+
+ args_copy = dict(self.arguments)
+
+ # Translate the server type.
+ type_cmd = _GetServerTypeCommandLine(args_copy.pop('server-type'))
+ if type_cmd:
+ self.command_line.append(type_cmd)
+
+ # Use a pipe to get the port given by the instance of Python test server
+ # if the test does not specify the port.
+ assert self.host_port == args_copy['port']
+ if self.host_port == 0:
+ (self.pipe_in, self.pipe_out) = os.pipe()
+ self.command_line.append('--startup-pipe=%d' % self.pipe_out)
+
+ # Pass the remaining arguments as-is.
+ for key, values in args_copy.iteritems():
+ if not isinstance(values, list):
+ values = [values]
+ for value in values:
+ if value is None:
+ self.command_line.append('--%s' % key)
+ else:
+ self.command_line.append('--%s=%s' % (key, value))
+
+ def _CloseUnnecessaryFDsForTestServerProcess(self):
+ # This is required to avoid subtle deadlocks that could be caused by the
+ # test server child process inheriting undesirable file descriptors such as
+ # file lock file descriptors.
+ for fd in xrange(0, 1024):
+ if fd != self.pipe_out:
+ try:
+ os.close(fd)
+ except:
+ pass
+
+ def run(self):
+ _logger.info('Start running the thread!')
+ self.wait_event.clear()
+ self._GenerateCommandLineArguments()
+ command = _DIR_SOURCE_ROOT
+ if self.arguments['server-type'] == 'sync':
+ command = [os.path.join(command, 'components', 'sync', 'tools',
+ 'testserver',
+ 'sync_testserver.py')] + self.command_line
+ else:
+ command = [os.path.join(command, 'net', 'tools', 'testserver',
+ 'testserver.py')] + self.command_line
+ _logger.info('Running: %s', command)
+
+ # Disable PYTHONUNBUFFERED because it has a bad interaction with the
+ # testserver. Remove once this interaction is fixed.
+ unbuf = os.environ.pop('PYTHONUNBUFFERED', None)
+
+ # Pass _DIR_SOURCE_ROOT as the child's working directory so that relative
+ # paths in the arguments are resolved correctly.
+ self.process = subprocess.Popen(
+ command, preexec_fn=self._CloseUnnecessaryFDsForTestServerProcess,
+ cwd=_DIR_SOURCE_ROOT)
+ if unbuf:
+ os.environ['PYTHONUNBUFFERED'] = unbuf
+ if self.process:
+ if self.pipe_out:
+ self.is_ready = self._WaitToStartAndGetPortFromTestServer()
+ else:
+ self.is_ready = self.port_forwarder.WaitPortNotAvailable(self.host_port)
+
+ if self.is_ready:
+ port_map = [(0, self.host_port)]
+ if self.host_ocsp_port:
+ port_map.extend([(0, self.host_ocsp_port)])
+ self.port_forwarder.Map(port_map)
+
+ self.forwarder_device_port = \
+ self.port_forwarder.GetDevicePortForHostPort(self.host_port)
+ if self.host_ocsp_port:
+ self.forwarder_ocsp_device_port = \
+ self.port_forwarder.GetDevicePortForHostPort(self.host_ocsp_port)
+
+ # Check whether the forwarder is ready on the device.
+ self.is_ready = self.forwarder_device_port and \
+ self.port_forwarder.WaitDevicePortReady(self.forwarder_device_port)
+
+ # Wake up the request handler thread.
+ self.ready_event.set()
+ # Keep thread running until Stop() gets called.
+ self.stop_event.wait()
+ if self.process.poll() is None:
+ self.process.kill()
+ self.port_forwarder.Unmap(self.forwarder_device_port)
+ self.process = None
+ self.is_ready = False
+ if self.pipe_out:
+ os.close(self.pipe_in)
+ os.close(self.pipe_out)
+ self.pipe_in = None
+ self.pipe_out = None
+ _logger.info('Test-server has died.')
+ self.wait_event.set()
+
+ def Stop(self):
+ """Blocks until the loop has finished.
+
+ Note that this must be called in another thread.
+ """
+ if not self.process:
+ return
+ self.stop_event.set()
+ self.wait_event.wait()
+
+
+class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+ """A handler used to process http GET/POST request."""
+
+ def _SendResponse(self, response_code, response_reason, additional_headers,
+ contents):
+ """Generates a response sent to the client from the provided parameters.
+
+ Args:
+ response_code: number of the response status.
+ response_reason: string of reason description of the response.
+ additional_headers: dict of additional headers. Each key is the name of
+ the header, each value is the content of the header.
+ contents: string of the contents we want to send to client.
+ """
+ self.send_response(response_code, response_reason)
+ self.send_header('Content-Type', 'text/html')
+ # Specify the content-length as without it the http(s) response will not
+ # be completed properly (and the browser keeps expecting data).
+ self.send_header('Content-Length', len(contents))
+ for header_name in additional_headers:
+ self.send_header(header_name, additional_headers[header_name])
+ self.end_headers()
+ self.wfile.write(contents)
+ self.wfile.flush()
+
+ def _StartTestServer(self):
+ """Starts the test server thread."""
+ _logger.info('Handling request to spawn a test server.')
+ content_type = self.headers.getheader('content-type')
+ if content_type != 'application/json':
+ raise Exception('Bad content-type for start request.')
+ content_length = self.headers.getheader('content-length')
+ if not content_length:
+ content_length = 0
+ try:
+ content_length = int(content_length)
+ except:
+ raise Exception('Bad content-length for start request.')
+ _logger.info(content_length)
+ test_server_argument_json = self.rfile.read(content_length)
+ _logger.info(test_server_argument_json)
+
+ if len(self.server.test_servers) >= self.server.max_instances:
+ self._SendResponse(400, 'Invalid request', {},
+ 'Too many test servers running')
+ return
+
+ ready_event = threading.Event()
+ new_server = TestServerThread(ready_event,
+ json.loads(test_server_argument_json),
+ self.server.port_forwarder)
+ new_server.setDaemon(True)
+ new_server.start()
+ ready_event.wait()
+ if new_server.is_ready:
+ response = {'port': new_server.forwarder_device_port,
+ 'message': 'started'};
+ if new_server.forwarder_ocsp_device_port:
+ response['ocsp_port'] = new_server.forwarder_ocsp_device_port
+ self._SendResponse(200, 'OK', {}, json.dumps(response))
+ _logger.info('Test server is running on port %d forwarded to %d.' %
+ (new_server.forwarder_device_port, new_server.host_port))
+ port = new_server.forwarder_device_port
+ assert not self.server.test_servers.has_key(port)
+ self.server.test_servers[port] = new_server
+ else:
+ new_server.Stop()
+ self._SendResponse(500, 'Test Server Error.', {}, '')
+ _logger.info('Encounter problem during starting a test server.')
+
+ def _KillTestServer(self, params):
+ """Stops the test server instance."""
+ try:
+ port = int(params['port'][0])
+ except ValueError, KeyError:
+ port = None
+ if port == None or port <= 0:
+ self._SendResponse(400, 'Invalid request.', {}, 'port must be specified')
+ return
+
+ if not self.server.test_servers.has_key(port):
+ self._SendResponse(400, 'Invalid request.', {},
+ "testserver isn't running on port %d" % port)
+ return
+
+ server = self.server.test_servers.pop(port)
+
+ _logger.info('Handling request to kill a test server on port: %d.', port)
+ server.Stop()
+
+ # Make sure the status of test server is correct before sending response.
+ if self.server.port_forwarder.WaitHostPortAvailable(port):
+ self._SendResponse(200, 'OK', {}, 'killed')
+ _logger.info('Test server on port %d is killed', port)
+ else:
+ self._SendResponse(500, 'Test Server Error.', {}, '')
+ _logger.info('Encounter problem during killing a test server.')
+
+ def log_message(self, format, *args):
+ # Suppress the default HTTP logging behavior if the logging level is higher
+ # than INFO.
+ if _logger.getEffectiveLevel() <= logging.INFO:
+ pass
+
+ def do_POST(self):
+ parsed_path = urlparse.urlparse(self.path)
+ action = parsed_path.path
+ _logger.info('Action for POST method is: %s.', action)
+ if action == '/start':
+ self._StartTestServer()
+ else:
+ self._SendResponse(400, 'Unknown request.', {}, '')
+ _logger.info('Encounter unknown request: %s.', action)
+
+ def do_GET(self):
+ parsed_path = urlparse.urlparse(self.path)
+ action = parsed_path.path
+ params = urlparse.parse_qs(parsed_path.query, keep_blank_values=1)
+ _logger.info('Action for GET method is: %s.', action)
+ for param in params:
+ _logger.info('%s=%s', param, params[param][0])
+ if action == '/kill':
+ self._KillTestServer(params)
+ elif action == '/ping':
+ # The ping handler is used to check whether the spawner server is ready
+ # to serve the requests. We don't need to test the status of the test
+ # server when handling ping request.
+ self._SendResponse(200, 'OK', {}, 'ready')
+ _logger.info('Handled ping request and sent response.')
+ else:
+ self._SendResponse(400, 'Unknown request', {}, '')
+ _logger.info('Encounter unknown request: %s.', action)
+
+
+class SpawningServer(object):
+ """The class used to start/stop a http server."""
+
+ def __init__(self, test_server_spawner_port, port_forwarder, max_instances):
+ self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port),
+ SpawningServerRequestHandler)
+ self.server_port = self.server.server_port
+ _logger.info('Started test server spawner on port: %d.', self.server_port)
+
+ self.server.port_forwarder = port_forwarder
+ self.server.test_servers = {}
+ self.server.max_instances = max_instances
+
+ def _Listen(self):
+ _logger.info('Starting test server spawner.')
+ self.server.serve_forever()
+
+ def Start(self):
+ """Starts the test server spawner."""
+ listener_thread = threading.Thread(target=self._Listen)
+ listener_thread.setDaemon(True)
+ listener_thread.start()
+
+ def Stop(self):
+ """Stops the test server spawner.
+
+ Also cleans the server state.
+ """
+ self.CleanupState()
+ self.server.shutdown()
+
+ def CleanupState(self):
+ """Cleans up the spawning server state.
+
+ This should be called if the test server spawner is reused,
+ to avoid sharing the test server instance.
+ """
+ if self.server.test_servers:
+ _logger.warning('Not all test servers were stopped.')
+ for port in self.server.test_servers:
+ _logger.warning('Stopping test server on port %d' % port)
+ self.server.test_servers[port].Stop()
+ self.server.test_servers = {}
diff --git a/deps/v8/build/util/lib/common/perf_result_data_type.py b/deps/v8/build/util/lib/common/perf_result_data_type.py
new file mode 100644
index 0000000000..67b550a46c
--- /dev/null
+++ b/deps/v8/build/util/lib/common/perf_result_data_type.py
@@ -0,0 +1,20 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+DEFAULT = 'default'
+UNIMPORTANT = 'unimportant'
+HISTOGRAM = 'histogram'
+UNIMPORTANT_HISTOGRAM = 'unimportant-histogram'
+INFORMATIONAL = 'informational'
+
+ALL_TYPES = [DEFAULT, UNIMPORTANT, HISTOGRAM, UNIMPORTANT_HISTOGRAM,
+ INFORMATIONAL]
+
+
+def IsValidType(datatype):
+ return datatype in ALL_TYPES
+
+
+def IsHistogram(datatype):
+ return (datatype == HISTOGRAM or datatype == UNIMPORTANT_HISTOGRAM)
diff --git a/deps/v8/build/util/lib/common/perf_tests_results_helper.py b/deps/v8/build/util/lib/common/perf_tests_results_helper.py
new file mode 100644
index 0000000000..59bb5e439d
--- /dev/null
+++ b/deps/v8/build/util/lib/common/perf_tests_results_helper.py
@@ -0,0 +1,200 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+import sys
+
+import json
+import logging
+import math
+
+import perf_result_data_type
+
+
+# Mapping from result type to test output
+RESULT_TYPES = {perf_result_data_type.UNIMPORTANT: 'RESULT ',
+ perf_result_data_type.DEFAULT: '*RESULT ',
+ perf_result_data_type.INFORMATIONAL: '',
+ perf_result_data_type.UNIMPORTANT_HISTOGRAM: 'HISTOGRAM ',
+ perf_result_data_type.HISTOGRAM: '*HISTOGRAM '}
+
+
+def _EscapePerfResult(s):
+ """Escapes |s| for use in a perf result."""
+ return re.sub('[\:|=/#&,]', '_', s)
+
+
+def FlattenList(values):
+ """Returns a simple list without sub-lists."""
+ ret = []
+ for entry in values:
+ if isinstance(entry, list):
+ ret.extend(FlattenList(entry))
+ else:
+ ret.append(entry)
+ return ret
+
+
+def GeomMeanAndStdDevFromHistogram(histogram_json):
+ histogram = json.loads(histogram_json)
+ # Handle empty histograms gracefully.
+ if not 'buckets' in histogram:
+ return 0.0, 0.0
+ count = 0
+ sum_of_logs = 0
+ for bucket in histogram['buckets']:
+ if 'high' in bucket:
+ bucket['mean'] = (bucket['low'] + bucket['high']) / 2.0
+ else:
+ bucket['mean'] = bucket['low']
+ if bucket['mean'] > 0:
+ sum_of_logs += math.log(bucket['mean']) * bucket['count']
+ count += bucket['count']
+
+ if count == 0:
+ return 0.0, 0.0
+
+ sum_of_squares = 0
+ geom_mean = math.exp(sum_of_logs / count)
+ for bucket in histogram['buckets']:
+ if bucket['mean'] > 0:
+ sum_of_squares += (bucket['mean'] - geom_mean) ** 2 * bucket['count']
+ return geom_mean, math.sqrt(sum_of_squares / count)
+
+
+def _ValueToString(v):
+ # Special case for floats so we don't print using scientific notation.
+ if isinstance(v, float):
+ return '%f' % v
+ else:
+ return str(v)
+
+
+def _MeanAndStdDevFromList(values):
+ avg = None
+ sd = None
+ if len(values) > 1:
+ try:
+ value = '[%s]' % ','.join([_ValueToString(v) for v in values])
+ avg = sum([float(v) for v in values]) / len(values)
+ sqdiffs = [(float(v) - avg) ** 2 for v in values]
+ variance = sum(sqdiffs) / (len(values) - 1)
+ sd = math.sqrt(variance)
+ except ValueError:
+ value = ', '.join(values)
+ else:
+ value = values[0]
+ return value, avg, sd
+
+
+def PrintPages(page_list):
+ """Prints list of pages to stdout in the format required by perf tests."""
+ print 'Pages: [%s]' % ','.join([_EscapePerfResult(p) for p in page_list])
+
+
+def PrintPerfResult(measurement, trace, values, units,
+ result_type=perf_result_data_type.DEFAULT,
+ print_to_stdout=True):
+ """Prints numerical data to stdout in the format required by perf tests.
+
+ The string args may be empty but they must not contain any colons (:) or
+ equals signs (=).
+ This is parsed by the buildbot using:
+ http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/process_log_utils.py
+
+ Args:
+ measurement: A description of the quantity being measured, e.g. "vm_peak".
+ On the dashboard, this maps to a particular graph. Mandatory.
+ trace: A description of the particular data point, e.g. "reference".
+ On the dashboard, this maps to a particular "line" in the graph.
+ Mandatory.
+ values: A list of numeric measured values. An N-dimensional list will be
+ flattened and treated as a simple list.
+ units: A description of the units of measure, e.g. "bytes".
+ result_type: Accepts values of perf_result_data_type.ALL_TYPES.
+ print_to_stdout: If True, prints the output in stdout instead of returning
+ the output to caller.
+
+ Returns:
+ String of the formated perf result.
+ """
+ assert perf_result_data_type.IsValidType(result_type), \
+ 'result type: %s is invalid' % result_type
+
+ trace_name = _EscapePerfResult(trace)
+
+ if (result_type == perf_result_data_type.UNIMPORTANT or
+ result_type == perf_result_data_type.DEFAULT or
+ result_type == perf_result_data_type.INFORMATIONAL):
+ assert isinstance(values, list)
+ assert '/' not in measurement
+ flattened_values = FlattenList(values)
+ assert len(flattened_values)
+ value, avg, sd = _MeanAndStdDevFromList(flattened_values)
+ output = '%s%s: %s%s%s %s' % (
+ RESULT_TYPES[result_type],
+ _EscapePerfResult(measurement),
+ trace_name,
+ # Do not show equal sign if the trace is empty. Usually it happens when
+ # measurement is enough clear to describe the result.
+ '= ' if trace_name else '',
+ value,
+ units)
+ else:
+ assert perf_result_data_type.IsHistogram(result_type)
+ assert isinstance(values, list)
+ # The histograms can only be printed individually, there's no computation
+ # across different histograms.
+ assert len(values) == 1
+ value = values[0]
+ output = '%s%s: %s= %s %s' % (
+ RESULT_TYPES[result_type],
+ _EscapePerfResult(measurement),
+ trace_name,
+ value,
+ units)
+ avg, sd = GeomMeanAndStdDevFromHistogram(value)
+
+ if avg:
+ output += '\nAvg %s: %f%s' % (measurement, avg, units)
+ if sd:
+ output += '\nSd %s: %f%s' % (measurement, sd, units)
+ if print_to_stdout:
+ print output
+ sys.stdout.flush()
+ return output
+
+
+def ReportPerfResult(chart_data, graph_title, trace_title, value, units,
+ improvement_direction='down', important=True):
+ """Outputs test results in correct format.
+
+ If chart_data is None, it outputs data in old format. If chart_data is a
+ dictionary, formats in chartjson format. If any other format defaults to
+ old format.
+
+ Args:
+ chart_data: A dictionary corresponding to perf results in the chartjson
+ format.
+ graph_title: A string containing the name of the chart to add the result
+ to.
+ trace_title: A string containing the name of the trace within the chart
+ to add the result to.
+ value: The value of the result being reported.
+ units: The units of the value being reported.
+ improvement_direction: A string denoting whether higher or lower is
+ better for the result. Either 'up' or 'down'.
+ important: A boolean denoting whether the result is important or not.
+ """
+ if chart_data and isinstance(chart_data, dict):
+ chart_data['charts'].setdefault(graph_title, {})
+ chart_data['charts'][graph_title][trace_title] = {
+ 'type': 'scalar',
+ 'value': value,
+ 'units': units,
+ 'improvement_direction': improvement_direction,
+ 'important': important
+ }
+ else:
+ PrintPerfResult(graph_title, trace_title, [value], units)
diff --git a/deps/v8/build/util/lib/common/unittest_util.py b/deps/v8/build/util/lib/common/unittest_util.py
new file mode 100644
index 0000000000..9683ab717a
--- /dev/null
+++ b/deps/v8/build/util/lib/common/unittest_util.py
@@ -0,0 +1,155 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for dealing with the python unittest module."""
+
+import fnmatch
+import re
+import sys
+import unittest
+
+
+class _TextTestResult(unittest._TextTestResult):
+ """A test result class that can print formatted text results to a stream.
+
+ Results printed in conformance with gtest output format, like:
+ [ RUN ] autofill.AutofillTest.testAutofillInvalid: "test desc."
+ [ OK ] autofill.AutofillTest.testAutofillInvalid
+ [ RUN ] autofill.AutofillTest.testFillProfile: "test desc."
+ [ OK ] autofill.AutofillTest.testFillProfile
+ [ RUN ] autofill.AutofillTest.testFillProfileCrazyCharacters: "Test."
+ [ OK ] autofill.AutofillTest.testFillProfileCrazyCharacters
+ """
+ def __init__(self, stream, descriptions, verbosity):
+ unittest._TextTestResult.__init__(self, stream, descriptions, verbosity)
+ self._fails = set()
+
+ def _GetTestURI(self, test):
+ return '%s.%s.%s' % (test.__class__.__module__,
+ test.__class__.__name__,
+ test._testMethodName)
+
+ def getDescription(self, test):
+ return '%s: "%s"' % (self._GetTestURI(test), test.shortDescription())
+
+ def startTest(self, test):
+ unittest.TestResult.startTest(self, test)
+ self.stream.writeln('[ RUN ] %s' % self.getDescription(test))
+
+ def addSuccess(self, test):
+ unittest.TestResult.addSuccess(self, test)
+ self.stream.writeln('[ OK ] %s' % self._GetTestURI(test))
+
+ def addError(self, test, err):
+ unittest.TestResult.addError(self, test, err)
+ self.stream.writeln('[ ERROR ] %s' % self._GetTestURI(test))
+ self._fails.add(self._GetTestURI(test))
+
+ def addFailure(self, test, err):
+ unittest.TestResult.addFailure(self, test, err)
+ self.stream.writeln('[ FAILED ] %s' % self._GetTestURI(test))
+ self._fails.add(self._GetTestURI(test))
+
+ def getRetestFilter(self):
+ return ':'.join(self._fails)
+
+
+class TextTestRunner(unittest.TextTestRunner):
+ """Test Runner for displaying test results in textual format.
+
+ Results are displayed in conformance with google test output.
+ """
+
+ def __init__(self, verbosity=1):
+ unittest.TextTestRunner.__init__(self, stream=sys.stderr,
+ verbosity=verbosity)
+
+ def _makeResult(self):
+ return _TextTestResult(self.stream, self.descriptions, self.verbosity)
+
+
+def GetTestsFromSuite(suite):
+ """Returns all the tests from a given test suite."""
+ tests = []
+ for x in suite:
+ if isinstance(x, unittest.TestSuite):
+ tests += GetTestsFromSuite(x)
+ else:
+ tests += [x]
+ return tests
+
+
+def GetTestNamesFromSuite(suite):
+ """Returns a list of every test name in the given suite."""
+ return map(lambda x: GetTestName(x), GetTestsFromSuite(suite))
+
+
+def GetTestName(test):
+ """Gets the test name of the given unittest test."""
+ return '.'.join([test.__class__.__module__,
+ test.__class__.__name__,
+ test._testMethodName])
+
+
+def FilterTestSuite(suite, gtest_filter):
+ """Returns a new filtered tests suite based on the given gtest filter.
+
+ See https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md
+ for gtest_filter specification.
+ """
+ return unittest.TestSuite(FilterTests(GetTestsFromSuite(suite), gtest_filter))
+
+
+def FilterTests(all_tests, gtest_filter):
+ """Filter a list of tests based on the given gtest filter.
+
+ Args:
+ all_tests: List of tests (unittest.TestSuite)
+ gtest_filter: Filter to apply.
+
+ Returns:
+ Filtered subset of the given list of tests.
+ """
+ test_names = [GetTestName(test) for test in all_tests]
+ filtered_names = FilterTestNames(test_names, gtest_filter)
+ return [test for test in all_tests if GetTestName(test) in filtered_names]
+
+
+def FilterTestNames(all_tests, gtest_filter):
+ """Filter a list of test names based on the given gtest filter.
+
+ See https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md
+ for gtest_filter specification.
+
+ Args:
+ all_tests: List of test names.
+ gtest_filter: Filter to apply.
+
+ Returns:
+ Filtered subset of the given list of test names.
+ """
+ pattern_groups = gtest_filter.split('-')
+ positive_patterns = ['*']
+ if pattern_groups[0]:
+ positive_patterns = pattern_groups[0].split(':')
+ negative_patterns = []
+ if len(pattern_groups) > 1:
+ negative_patterns = pattern_groups[1].split(':')
+
+ neg_pats = None
+ if negative_patterns:
+ neg_pats = re.compile('|'.join(fnmatch.translate(p) for p in
+ negative_patterns))
+
+ tests = []
+ test_set = set()
+ for pattern in positive_patterns:
+ pattern_tests = [
+ test for test in all_tests
+ if (fnmatch.fnmatch(test, pattern)
+ and not (neg_pats and neg_pats.match(test))
+ and test not in test_set)]
+ tests.extend(pattern_tests)
+ test_set.update(pattern_tests)
+ return tests
diff --git a/deps/v8/build/util/lib/common/unittest_util_test.py b/deps/v8/build/util/lib/common/unittest_util_test.py
new file mode 100755
index 0000000000..1514c9b6d4
--- /dev/null
+++ b/deps/v8/build/util/lib/common/unittest_util_test.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import logging
+import sys
+import unittest
+import unittest_util
+
+
+class FilterTestNamesTest(unittest.TestCase):
+
+ possible_list = ["Foo.One",
+ "Foo.Two",
+ "Foo.Three",
+ "Bar.One",
+ "Bar.Two",
+ "Bar.Three",
+ "Quux.One",
+ "Quux.Two",
+ "Quux.Three"]
+
+ def testMatchAll(self):
+ x = unittest_util.FilterTestNames(self.possible_list, "*")
+ self.assertEquals(x, self.possible_list)
+
+ def testMatchPartial(self):
+ x = unittest_util.FilterTestNames(self.possible_list, "Foo.*")
+ self.assertEquals(x, ["Foo.One", "Foo.Two", "Foo.Three"])
+
+ def testMatchFull(self):
+ x = unittest_util.FilterTestNames(self.possible_list, "Foo.Two")
+ self.assertEquals(x, ["Foo.Two"])
+
+ def testMatchTwo(self):
+ x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:Foo.*")
+ self.assertEquals(x, ["Bar.One",
+ "Bar.Two",
+ "Bar.Three",
+ "Foo.One",
+ "Foo.Two",
+ "Foo.Three"])
+
+ def testMatchWithNegative(self):
+ x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:Foo.*-*.Three")
+ self.assertEquals(x, ["Bar.One",
+ "Bar.Two",
+ "Foo.One",
+ "Foo.Two"])
+
+ def testMatchOverlapping(self):
+ x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:*.Two")
+ self.assertEquals(x, ["Bar.One",
+ "Bar.Two",
+ "Bar.Three",
+ "Foo.Two",
+ "Quux.Two"])
+
+
+if __name__ == '__main__':
+ logging.getLogger().setLevel(logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/deps/v8/build/util/lib/common/util.py b/deps/v8/build/util/lib/common/util.py
new file mode 100644
index 0000000000..a415b1f534
--- /dev/null
+++ b/deps/v8/build/util/lib/common/util.py
@@ -0,0 +1,151 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generic utilities for all python scripts."""
+
+import atexit
+import httplib
+import os
+import signal
+import stat
+import subprocess
+import sys
+import tempfile
+import urlparse
+
+
+def GetPlatformName():
+ """Return a string to be used in paths for the platform."""
+ if IsWindows():
+ return 'win'
+ if IsMac():
+ return 'mac'
+ if IsLinux():
+ return 'linux'
+ raise NotImplementedError('Unknown platform "%s".' % sys.platform)
+
+
+def IsWindows():
+ return sys.platform == 'cygwin' or sys.platform.startswith('win')
+
+
+def IsLinux():
+ return sys.platform.startswith('linux')
+
+
+def IsMac():
+ return sys.platform.startswith('darwin')
+
+
+def _DeleteDir(path):
+ """Deletes a directory recursively, which must exist."""
+ # Don't use shutil.rmtree because it can't delete read-only files on Win.
+ for root, dirs, files in os.walk(path, topdown=False):
+ for name in files:
+ filename = os.path.join(root, name)
+ os.chmod(filename, stat.S_IWRITE)
+ os.remove(filename)
+ for name in dirs:
+ os.rmdir(os.path.join(root, name))
+ os.rmdir(path)
+
+
+def Delete(path):
+ """Deletes the given file or directory (recursively), which must exist."""
+ if os.path.isdir(path):
+ _DeleteDir(path)
+ else:
+ os.remove(path)
+
+
+def MaybeDelete(path):
+ """Deletes the given file or directory (recurisvely), if it exists."""
+ if os.path.exists(path):
+ Delete(path)
+
+
+def MakeTempDir(parent_dir=None):
+ """Creates a temporary directory and returns an absolute path to it.
+
+ The temporary directory is automatically deleted when the python interpreter
+ exits normally.
+
+ Args:
+ parent_dir: the directory to create the temp dir in. If None, the system
+ temp dir is used.
+
+ Returns:
+ The absolute path to the temporary directory.
+ """
+ path = tempfile.mkdtemp(dir=parent_dir)
+ atexit.register(MaybeDelete, path)
+ return path
+
+
+def Unzip(zip_path, output_dir):
+ """Unzips the given zip file using a system installed unzip tool.
+
+ Args:
+ zip_path: zip file to unzip.
+ output_dir: directory to unzip the contents of the zip file. The directory
+ must exist.
+
+ Raises:
+ RuntimeError if the unzip operation fails.
+ """
+ if IsWindows():
+ unzip_cmd = ['C:\\Program Files\\7-Zip\\7z.exe', 'x', '-y']
+ else:
+ unzip_cmd = ['unzip', '-o']
+ unzip_cmd += [zip_path]
+ if RunCommand(unzip_cmd, output_dir) != 0:
+ raise RuntimeError('Unable to unzip %s to %s' % (zip_path, output_dir))
+
+
+def Kill(pid):
+ """Terminate the given pid."""
+ if IsWindows():
+ subprocess.call(['taskkill.exe', '/T', '/F', '/PID', str(pid)])
+ else:
+ os.kill(pid, signal.SIGTERM)
+
+
+def RunCommand(cmd, cwd=None):
+ """Runs the given command and returns the exit code.
+
+ Args:
+ cmd: list of command arguments.
+ cwd: working directory to execute the command, or None if the current
+ working directory should be used.
+
+ Returns:
+ The exit code of the command.
+ """
+ process = subprocess.Popen(cmd, cwd=cwd)
+ process.wait()
+ return process.returncode
+
+
+def DoesUrlExist(url):
+ """Determines whether a resource exists at the given URL.
+
+ Args:
+ url: URL to be verified.
+
+ Returns:
+ True if url exists, otherwise False.
+ """
+ parsed = urlparse.urlparse(url)
+ try:
+ conn = httplib.HTTPConnection(parsed.netloc)
+ conn.request('HEAD', parsed.path)
+ response = conn.getresponse()
+ except (socket.gaierror, socket.error):
+ return False
+ finally:
+ conn.close()
+ # Follow both permanent (301) and temporary (302) redirects.
+ if response.status == 302 or response.status == 301:
+ return DoesUrlExist(response.getheader('location'))
+ return response.status == 200
diff --git a/deps/v8/build/util/process_version.gni b/deps/v8/build/util/process_version.gni
new file mode 100644
index 0000000000..e27346e6f0
--- /dev/null
+++ b/deps/v8/build/util/process_version.gni
@@ -0,0 +1,126 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Runs the version processing script over the given template file to produce
+# an output file. This is used for generating various forms of files that
+# incorporate the product name and version.
+#
+# Unlike GYP, this will actually compile the resulting file, so you don't need
+# to add it separately to the sources, just depend on the target.
+#
+# In GYP this is a rule that runs once per ".ver" file. In GN this just
+# processes one file per invocation of the template so you may have to have
+# multiple targets.
+#
+# Parameters:
+# sources (optional):
+# List of file names to read. When converting a GYP target, this should
+# list the 'source' (see above) as well as any extra_variable_files.
+# The files will be passed to version.py in the order specified here.
+#
+# output:
+# File name of file to write. In GYP this is unspecified and it will
+# make up a file name for you based on the input name, and tack on
+# "_version.rc" to the end. But in GN you need to specify the full name.
+#
+# template_file (optional):
+# Template file to use (not a list). Most Windows users that want to use
+# this to process a .rc template should use process_version_rc_template(),
+# defined in //chrome/process_version_rc_template.gni, instead.
+#
+# extra_args (optional):
+# Extra arguments to pass to version.py. Any "-f <filename>" args should
+# use sources instead.
+#
+# process_only (optional, defaults to false)
+# Set to generate only one action that processes the version file and
+# doesn't attempt to link the result into a source set. This is for if
+# you are processing the version as data only.
+#
+# visibility (optional)
+#
+# Example:
+# process_version("myversion") {
+# sources = [
+# "//chrome/VERSION"
+# "myfile.h.in"
+# ]
+# output = "$target_gen_dir/myfile.h"
+# extra_args = [ "-e", "FOO=42" ]
+# }
+template("process_version") {
+ assert(defined(invoker.output), "Output must be defined for $target_name")
+
+ process_only = defined(invoker.process_only) && invoker.process_only
+
+ if (process_only) {
+ action_name = target_name
+ } else {
+ action_name = target_name + "_action"
+ source_set_name = target_name
+ }
+
+ action(action_name) {
+ script = "//build/util/version.py"
+
+ inputs = []
+ if (defined(invoker.inputs)) {
+ inputs += invoker.inputs
+ }
+ if (defined(invoker.template_file)) {
+ inputs += [ invoker.template_file ]
+ }
+
+ outputs = [
+ invoker.output,
+ ]
+
+ args = []
+
+ if (is_official_build) {
+ args += [ "--official" ]
+ }
+
+ if (defined(invoker.sources)) {
+ inputs += invoker.sources
+ foreach(i, invoker.sources) {
+ args += [
+ "-f",
+ rebase_path(i, root_build_dir),
+ ]
+ }
+ }
+
+ if (defined(invoker.extra_args)) {
+ args += invoker.extra_args
+ }
+ args += [
+ "-o",
+ rebase_path(invoker.output, root_build_dir),
+ ]
+ if (defined(invoker.template_file)) {
+ args += [ rebase_path(invoker.template_file, root_build_dir) ]
+ }
+
+ forward_variables_from(invoker, [ "deps" ])
+
+ if (process_only) {
+ # When processing only, visibility gets applied to this target.
+ forward_variables_from(invoker, [ "visibility" ])
+ } else {
+ # When linking the result, only the source set can depend on the action.
+ visibility = [ ":$source_set_name" ]
+ }
+ }
+
+ if (!process_only) {
+ source_set(source_set_name) {
+ forward_variables_from(invoker, [ "visibility" ])
+ sources = get_target_outputs(":$action_name")
+ public_deps = [
+ ":$action_name",
+ ]
+ }
+ }
+}
diff --git a/deps/v8/build/util/version.gni b/deps/v8/build/util/version.gni
new file mode 100644
index 0000000000..5bfceb52f9
--- /dev/null
+++ b/deps/v8/build/util/version.gni
@@ -0,0 +1,159 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This exposes the Chrome version as GN variables for use in build files.
+# This also generates the various version codes used for builds of chrome for
+# android.
+#
+# PREFER NOT TO USE THESE. The GYP build uses this kind of thing extensively.
+# However, it is far better to write an action (or use the process_version
+# wrapper in build/util/version.gni) to generate a file at build-time with the
+# information you need. This allows better dependency checking and GN will
+# run faster.
+#
+# These values should only be used if you REALLY need to depend on them at
+# build-time, for example, in the computation of output file names.
+
+# Give version.py a pattern that will expand to a GN scope consisting of
+# all values we need at once.
+_version_dictionary_template = "full = \"@MAJOR@.@MINOR@.@BUILD@.@PATCH@\" " +
+ "major = \"@MAJOR@\" minor = \"@MINOR@\" " +
+ "build = \"@BUILD@\" patch = \"@PATCH@\" "
+
+# The file containing the Chrome version number.
+chrome_version_file = "//chrome/VERSION"
+
+_script_arguments = []
+
+if (target_os == "mac") {
+ _version_dictionary_template += "patch_hi = @PATCH_HI@ patch_lo = @PATCH_LO@ "
+
+ _script_arguments += [
+ "-e",
+ "PATCH_HI=int(PATCH)//256",
+ "-e",
+ "PATCH_LO=int(PATCH)%256",
+ ]
+} else if (target_os == "android") {
+ import("//build/config/android/config.gni")
+
+ _version_dictionary_template +=
+ "chrome_version_code = " + "\"@CHROME_VERSION_CODE@\" " +
+ "chrome_modern_version_code = \"@CHROME_MODERN_VERSION_CODE@\" " +
+ "monochrome_version_code = \"@MONOCHROME_VERSION_CODE@\" " +
+ "trichrome_version_code = \"@TRICHROME_VERSION_CODE@\" " +
+ "notouch_chrome_version_code = \"@NOTOUCH_CHROME_VERSION_CODE@\" " +
+ "webview_stable_version_code = \"@WEBVIEW_STABLE_VERSION_CODE@\" " +
+ "webview_beta_version_code = \"@WEBVIEW_BETA_VERSION_CODE@\" " +
+ "webview_dev_version_code = \"@WEBVIEW_DEV_VERSION_CODE@\" "
+
+ if (target_cpu == "arm64" || target_cpu == "x64") {
+ _version_dictionary_template +=
+ "monochrome_64_32_version_code = \"@MONOCHROME_64_32_VERSION_CODE@\" " +
+ "monochrome_64_version_code = \"@MONOCHROME_64_VERSION_CODE@\" " +
+ "trichrome_64_32_version_code = \"@TRICHROME_64_32_VERSION_CODE@\" " +
+ "trichrome_64_version_code = \"@TRICHROME_64_VERSION_CODE@\" "
+ }
+
+ _script_arguments += [
+ "-a",
+ target_cpu,
+ ]
+
+ if (!public_android_sdk) {
+ _script_arguments += [ "--next" ]
+ }
+}
+
+_script_arguments += [
+ "-f",
+ rebase_path(chrome_version_file, root_build_dir),
+ "-t",
+ _version_dictionary_template,
+ "--os",
+ target_os,
+]
+
+_result = exec_script("version.py",
+ _script_arguments,
+ "scope",
+ [ chrome_version_file ])
+
+# Full version. For example "45.0.12321.0"
+chrome_version_full = _result.full
+
+# The consituent parts of the full version.
+chrome_version_major = _result.major
+chrome_version_minor = _result.minor
+chrome_version_build = _result.build
+chrome_version_patch = _result.patch
+
+if (target_os == "mac") {
+ chrome_version_patch_hi = _result.patch_hi
+ chrome_version_patch_lo = _result.patch_lo
+
+ chrome_dylib_version = "$chrome_version_build.$chrome_version_patch_hi" +
+ ".$chrome_version_patch_lo"
+} else if (target_os == "android") {
+ forward_variables_from(_result,
+ [
+ "chrome_modern_version_code",
+ "chrome_version_code",
+ "monochrome_64_32_version_code",
+ "monochrome_64_version_code",
+ "monochrome_version_code",
+ "notouch_chrome_version_code",
+ "trichrome_64_32_version_code",
+ "trichrome_64_version_code",
+ "trichrome_version_code",
+ "webview_beta_version_code",
+ "webview_dev_version_code",
+ "webview_stable_version_code",
+ ])
+
+ chrome_version_name = chrome_version_full
+
+ lines_to_write__deprecated = [
+ "VersionName=$chrome_version_name",
+ "Chrome=$chrome_version_code",
+ "ChromeModern=$chrome_modern_version_code",
+ "Monochrome=$monochrome_version_code",
+ "TrichromeChrome=$trichrome_version_code",
+ "NoTouchChrome=$notouch_chrome_version_code",
+ "WebviewStable=$webview_stable_version_code",
+ "WebviewBeta=$webview_beta_version_code",
+ "WebviewDev=$webview_dev_version_code",
+ ]
+ lines_to_write = [
+ "VersionName: $chrome_version_name",
+ "Chrome: $chrome_version_code",
+ "ChromeModern: $chrome_modern_version_code",
+ "Monochrome: $monochrome_version_code",
+ "TrichromeChrome: $trichrome_version_code",
+ "MonochromeFP: $notouch_chrome_version_code",
+ "WebviewStable: $webview_stable_version_code",
+ "WebviewBeta: $webview_beta_version_code",
+ "WebviewDev: $webview_dev_version_code",
+ ]
+
+ if (target_cpu == "arm64" || target_cpu == "x64") {
+ lines_to_write__deprecated += [
+ "Monochrome_64_32=$monochrome_64_32_version_code",
+ "Monochrome_64=$monochrome_64_version_code",
+ "TrichromeChrome_64_32=$trichrome_64_32_version_code",
+ "TrichromeChrome_64=$trichrome_64_version_code",
+ ]
+ lines_to_write += [
+ "Monochrome6432: $monochrome_64_32_version_code",
+ "Monochrome64: $monochrome_64_version_code",
+ "TrichromeChrome6432: $trichrome_64_32_version_code",
+ "TrichromeChrome64: $trichrome_64_version_code",
+ ]
+ }
+
+ write_file("$root_out_dir/android_chrome_versions.txt", lines_to_write)
+
+ # TODO (stonebraker) For a 3-way patch; to be removed
+ write_file("$root_out_dir/chrome_versions.txt", lines_to_write__deprecated)
+}
diff --git a/deps/v8/build/util/version.py b/deps/v8/build/util/version.py
new file mode 100755
index 0000000000..4f440c4ee7
--- /dev/null
+++ b/deps/v8/build/util/version.py
@@ -0,0 +1,259 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+version.py -- Chromium version string substitution utility.
+"""
+
+from __future__ import print_function
+
+import argparse
+import os
+import sys
+
+import android_chrome_version
+
+
+def FetchValuesFromFile(values_dict, file_name):
+ """
+ Fetches KEYWORD=VALUE settings from the specified file.
+
+ Everything to the left of the first '=' is the keyword,
+ everything to the right is the value. No stripping of
+ white space, so beware.
+
+ The file must exist, otherwise you get the Python exception from open().
+ """
+ for line in open(file_name, 'r').readlines():
+ key, val = line.rstrip('\r\n').split('=', 1)
+ values_dict[key] = val
+
+
+def FetchValues(file_list, is_official_build=None):
+ """
+ Returns a dictionary of values to be used for substitution.
+
+ Populates the dictionary with KEYWORD=VALUE settings from the files in
+ 'file_list'.
+
+ Explicitly adds the following value from internal calculations:
+
+ OFFICIAL_BUILD
+ """
+ CHROME_BUILD_TYPE = os.environ.get('CHROME_BUILD_TYPE')
+ if CHROME_BUILD_TYPE == '_official' or is_official_build:
+ official_build = '1'
+ else:
+ official_build = '0'
+
+ values = dict(
+ OFFICIAL_BUILD = official_build,
+ )
+
+ for file_name in file_list:
+ FetchValuesFromFile(values, file_name)
+
+ return values
+
+
+def SubstTemplate(contents, values):
+ """
+ Returns the template with substituted values from the specified dictionary.
+
+ Keywords to be substituted are surrounded by '@': @KEYWORD@.
+
+ No attempt is made to avoid recursive substitution. The order
+ of evaluation is random based on the order of the keywords returned
+ by the Python dictionary. So do NOT substitute a value that
+ contains any @KEYWORD@ strings expecting them to be recursively
+ substituted, okay?
+ """
+ for key, val in values.items():
+ try:
+ contents = contents.replace('@' + key + '@', val)
+ except TypeError:
+ print(repr(key), repr(val))
+ return contents
+
+
+def SubstFile(file_name, values):
+ """
+ Returns the contents of the specified file_name with substituted values.
+
+ Substituted values come from the specified dictionary.
+
+ This is like SubstTemplate, except it operates on a file.
+ """
+ template = open(file_name, 'r').read()
+ return SubstTemplate(template, values)
+
+
+def WriteIfChanged(file_name, contents):
+ """
+ Writes the specified contents to the specified file_name.
+
+ Does nothing if the contents aren't different than the current contents.
+ """
+ try:
+ old_contents = open(file_name, 'r').read()
+ except EnvironmentError:
+ pass
+ else:
+ if contents == old_contents:
+ return
+ os.unlink(file_name)
+ open(file_name, 'w').write(contents)
+
+
+def BuildParser():
+ """Build argparse parser, with added arguments."""
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-f', '--file', action='append', default=[],
+ help='Read variables from FILE.')
+ parser.add_argument('-i', '--input', default=None,
+ help='Read strings to substitute from FILE.')
+ parser.add_argument('-o', '--output', default=None,
+ help='Write substituted strings to FILE.')
+ parser.add_argument('-t', '--template', default=None,
+ help='Use TEMPLATE as the strings to substitute.')
+ parser.add_argument(
+ '-e',
+ '--eval',
+ action='append',
+ default=[],
+ help='Evaluate VAL after reading variables. Can be used '
+ 'to synthesize variables. e.g. -e \'PATCH_HI=int('
+ 'PATCH)//256.')
+ parser.add_argument(
+ '-a',
+ '--arch',
+ default=None,
+ choices=android_chrome_version.ARCH_CHOICES,
+ help='Set which cpu architecture the build is for.')
+ parser.add_argument('--os', default=None, help='Set the target os.')
+ parser.add_argument('--official', action='store_true',
+ help='Whether the current build should be an official '
+ 'build, used in addition to the environment '
+ 'variable.')
+ parser.add_argument(
+ '--next',
+ action='store_true',
+ help='Whether the current build should be a "next" '
+ 'build, which targets pre-release versions of '
+ 'Android')
+ parser.add_argument('args', nargs=argparse.REMAINDER,
+ help='For compatibility: INPUT and OUTPUT can be '
+ 'passed as positional arguments.')
+ return parser
+
+
+def BuildEvals(options, parser):
+ """Construct a dict of passed '-e' arguments for evaluating."""
+ evals = {}
+ for expression in options.eval:
+ try:
+ evals.update(dict([expression.split('=', 1)]))
+ except ValueError:
+ parser.error('-e requires VAR=VAL')
+ return evals
+
+
+def ModifyOptionsCompat(options, parser):
+ """Support compatibility with old versions.
+
+ Specifically, for old versions that considered the first two
+ positional arguments shorthands for --input and --output.
+ """
+ while len(options.args) and (options.input is None or options.output is None):
+ if options.input is None:
+ options.input = options.args.pop(0)
+ elif options.output is None:
+ options.output = options.args.pop(0)
+ if options.args:
+ parser.error('Unexpected arguments: %r' % options.args)
+
+
+def GenerateValues(options, evals):
+ """Construct a dict of raw values used to generate output.
+
+ e.g. this could return a dict like
+ {
+ 'BUILD': 74,
+ }
+
+ which would be used to resolve a template like
+ 'build = "@BUILD@"' into 'build = "74"'
+
+ """
+ values = FetchValues(options.file, options.official)
+
+ for key, val in evals.items():
+ values[key] = str(eval(val, globals(), values))
+
+ if options.os == 'android':
+ android_chrome_version_codes = android_chrome_version.GenerateVersionCodes(
+ values, options.arch, options.next)
+ values.update(android_chrome_version_codes)
+
+ return values
+
+
+def GenerateOutputContents(options, values):
+ """Construct output string (e.g. from template).
+
+ Arguments:
+ options -- argparse parsed arguments
+ values -- dict with raw values used to resolve the keywords in a template
+ string
+ """
+
+ if options.template is not None:
+ return SubstTemplate(options.template, values)
+ elif options.input:
+ return SubstFile(options.input, values)
+ else:
+ # Generate a default set of version information.
+ return """MAJOR=%(MAJOR)s
+MINOR=%(MINOR)s
+BUILD=%(BUILD)s
+PATCH=%(PATCH)s
+LASTCHANGE=%(LASTCHANGE)s
+OFFICIAL_BUILD=%(OFFICIAL_BUILD)s
+""" % values
+
+
+def BuildOutput(args):
+ """Gets all input and output values needed for writing output."""
+ # Build argparse parser with arguments
+ parser = BuildParser()
+ options = parser.parse_args(args)
+
+ # Get dict of passed '-e' arguments for evaluating
+ evals = BuildEvals(options, parser)
+ # For compatibility with interface that considered first two positional
+ # arguments shorthands for --input and --output.
+ ModifyOptionsCompat(options, parser)
+
+ # Get the raw values that will be used the generate the output
+ values = GenerateValues(options, evals)
+ # Get the output string
+ contents = GenerateOutputContents(options, values)
+
+ return {'options': options, 'contents': contents}
+
+
+def main():
+ output = BuildOutput(sys.argv[1:])
+
+ if output['options'].output is not None:
+ WriteIfChanged(output['options'].output, output['contents'])
+ else:
+ print(output['contents'])
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/util/version_test.py b/deps/v8/build/util/version_test.py
new file mode 100644
index 0000000000..2a65ddc716
--- /dev/null
+++ b/deps/v8/build/util/version_test.py
@@ -0,0 +1,174 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import unittest
+
+import mock
+import version
+
+
+def _ReplaceArgs(args, *replacements):
+ new_args = args[:]
+ for flag, val in replacements:
+ flag_index = args.index(flag)
+ new_args[flag_index + 1] = val
+ return new_args
+
+
+class _VersionTest(unittest.TestCase):
+ """Unittests for the version module.
+ """
+
+ _CHROME_VERSION_FILE = os.path.join(
+ os.path.dirname(__file__), os.pardir, os.pardir, 'chrome', 'VERSION')
+
+ _SCRIPT = os.path.join(os.path.dirname(__file__), 'version.py')
+
+ _EXAMPLE_VERSION = {
+ 'MAJOR': '74',
+ 'MINOR': '0',
+ 'BUILD': '3720',
+ 'PATCH': '0',
+ }
+
+ _EXAMPLE_TEMPLATE = (
+ 'full = "@MAJOR@.@MINOR@.@BUILD@.@PATCH@" '
+ 'major = "@MAJOR@" minor = "@MINOR@" '
+ 'build = "@BUILD@" patch = "@PATCH@" version_id = @VERSION_ID@ ')
+
+ _ANDROID_CHROME_VARS = [
+ 'chrome_version_code',
+ 'chrome_modern_version_code',
+ 'monochrome_version_code',
+ 'trichrome_version_code',
+ 'webview_stable_version_code',
+ 'webview_beta_version_code',
+ 'webview_dev_version_code',
+ ]
+
+ _EXAMPLE_ANDROID_TEMPLATE = (
+ _EXAMPLE_TEMPLATE + ''.join(
+ ['%s = "@%s@" ' % (el, el.upper()) for el in _ANDROID_CHROME_VARS]))
+
+ _EXAMPLE_ARGS = [
+ '-f',
+ _CHROME_VERSION_FILE,
+ '-t',
+ _EXAMPLE_TEMPLATE,
+ ]
+
+ _EXAMPLE_ANDROID_ARGS = _ReplaceArgs(_EXAMPLE_ARGS,
+ ['-t', _EXAMPLE_ANDROID_TEMPLATE]) + [
+ '-a',
+ 'arm',
+ '--os',
+ 'android',
+ ]
+
+ @staticmethod
+ def _RunBuildOutput(new_version_values={},
+ get_new_args=lambda old_args: old_args):
+ """Parameterized helper method for running the main testable method in
+ version.py.
+
+ Keyword arguments:
+ new_version_values -- dict used to update _EXAMPLE_VERSION
+ get_new_args -- lambda for updating _EXAMPLE_ANDROID_ARGS
+ """
+
+ with mock.patch('version.FetchValuesFromFile') as \
+ fetch_values_from_file_mock:
+
+ fetch_values_from_file_mock.side_effect = (lambda values, file :
+ values.update(
+ dict(_VersionTest._EXAMPLE_VERSION, **new_version_values)))
+
+ new_args = get_new_args(_VersionTest._EXAMPLE_ARGS)
+ return version.BuildOutput(new_args)
+
+ def testFetchValuesFromFile(self):
+ """It returns a dict in correct format - { <str>: <str> }, to verify
+ assumption of other tests that mock this function
+ """
+ result = {}
+ version.FetchValuesFromFile(result, self._CHROME_VERSION_FILE)
+
+ for key, val in result.iteritems():
+ self.assertIsInstance(key, str)
+ self.assertIsInstance(val, str)
+
+ def testBuildOutputAndroid(self):
+ """Assert it gives includes assignments of expected variables"""
+ output = self._RunBuildOutput(
+ get_new_args=lambda args: self._EXAMPLE_ANDROID_ARGS)
+ contents = output['contents']
+
+ self.assertRegexpMatches(contents, r'\bchrome_version_code = "\d+"\s')
+ self.assertRegexpMatches(contents,
+ r'\bchrome_modern_version_code = "\d+"\s')
+ self.assertRegexpMatches(contents, r'\bmonochrome_version_code = "\d+"\s')
+ self.assertRegexpMatches(contents, r'\btrichrome_version_code = "\d+"\s')
+ self.assertRegexpMatches(contents,
+ r'\bwebview_stable_version_code = "\d+"\s')
+ self.assertRegexpMatches(contents, r'\bwebview_beta_version_code = "\d+"\s')
+ self.assertRegexpMatches(contents, r'\bwebview_dev_version_code = "\d+"\s')
+
+ def testBuildOutputAndroidArchVariantsArm64(self):
+ """Assert 64-bit-specific version codes"""
+ new_template = (
+ self._EXAMPLE_ANDROID_TEMPLATE +
+ "monochrome_64_32_version_code = \"@MONOCHROME_64_32_VERSION_CODE@\" "
+ "monochrome_64_version_code = \"@MONOCHROME_64_VERSION_CODE@\" "
+ "trichrome_64_32_version_code = \"@TRICHROME_64_32_VERSION_CODE@\" "
+ "trichrome_64_version_code = \"@TRICHROME_64_VERSION_CODE@\" ")
+ args_with_template = _ReplaceArgs(self._EXAMPLE_ANDROID_ARGS,
+ ['-t', new_template])
+ new_args = _ReplaceArgs(args_with_template, ['-a', 'arm64'])
+ output = self._RunBuildOutput(get_new_args=lambda args: new_args)
+ contents = output['contents']
+
+ self.assertRegexpMatches(contents,
+ r'\bmonochrome_64_32_version_code = "\d+"\s')
+ self.assertRegexpMatches(contents,
+ r'\bmonochrome_64_version_code = "\d+"\s')
+ self.assertRegexpMatches(contents,
+ r'\btrichrome_64_32_version_code = "\d+"\s')
+ self.assertRegexpMatches(contents,
+ r'\btrichrome_64_version_code = "\d+"\s')
+
+ def testBuildOutputAndroidArchVariantsX64(self):
+ """Assert 64-bit-specific version codes"""
+ new_template = (
+ self._EXAMPLE_ANDROID_TEMPLATE +
+ "monochrome_64_32_version_code = \"@MONOCHROME_64_32_VERSION_CODE@\" "
+ "monochrome_64_version_code = \"@MONOCHROME_64_VERSION_CODE@\" "
+ "trichrome_64_32_version_code = \"@TRICHROME_64_32_VERSION_CODE@\" "
+ "trichrome_64_version_code = \"@TRICHROME_64_VERSION_CODE@\" ")
+ args_with_template = _ReplaceArgs(self._EXAMPLE_ANDROID_ARGS,
+ ['-t', new_template])
+ new_args = _ReplaceArgs(args_with_template, ['-a', 'x64'])
+ output = self._RunBuildOutput(get_new_args=lambda args: new_args)
+ contents = output['contents']
+
+ self.assertRegexpMatches(contents,
+ r'\bmonochrome_64_32_version_code = "\d+"\s')
+ self.assertRegexpMatches(contents,
+ r'\bmonochrome_64_version_code = "\d+"\s')
+ self.assertRegexpMatches(contents,
+ r'\btrichrome_64_32_version_code = "\d+"\s')
+ self.assertRegexpMatches(contents,
+ r'\btrichrome_64_version_code = "\d+"\s')
+
+ def testBuildOutputAndroidChromeArchInput(self):
+ """Assert it raises an exception when using an invalid architecture input"""
+ new_args = _ReplaceArgs(self._EXAMPLE_ANDROID_ARGS, ['-a', 'foobar'])
+ with self.assertRaises(SystemExit) as cm:
+ self._RunBuildOutput(get_new_args=lambda args: new_args)
+
+ self.assertEqual(cm.exception.code, 2)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/util/webkit_version.h.in b/deps/v8/build/util/webkit_version.h.in
new file mode 100644
index 0000000000..41960e7d88
--- /dev/null
+++ b/deps/v8/build/util/webkit_version.h.in
@@ -0,0 +1,9 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// webkit_version.h is generated from webkit_version.h.in. Edit the source!
+
+#define WEBKIT_VERSION_MAJOR 537
+#define WEBKIT_VERSION_MINOR 36
+#define WEBKIT_SVN_REVISION "@@LASTCHANGE@"
diff --git a/deps/v8/build/vs_toolchain.py b/deps/v8/build/vs_toolchain.py
new file mode 100755
index 0000000000..0ce237fe13
--- /dev/null
+++ b/deps/v8/build/vs_toolchain.py
@@ -0,0 +1,509 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import glob
+import json
+import os
+import pipes
+import platform
+import re
+import shutil
+import stat
+import subprocess
+import sys
+
+from gn_helpers import ToGNString
+
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+json_data_file = os.path.join(script_dir, 'win_toolchain.json')
+
+
+# Use MSVS2017 as the default toolchain.
+CURRENT_DEFAULT_TOOLCHAIN_VERSION = '2017'
+
+
+def SetEnvironmentAndGetRuntimeDllDirs():
+ """Sets up os.environ to use the depot_tools VS toolchain with gyp, and
+ returns the location of the VC runtime DLLs so they can be copied into
+ the output directory after gyp generation.
+
+ Return value is [x64path, x86path, 'Arm64Unused'] or None. arm64path is
+ generated separately because there are multiple folders for the arm64 VC
+ runtime.
+ """
+ vs_runtime_dll_dirs = None
+ depot_tools_win_toolchain = \
+ bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
+ # When running on a non-Windows host, only do this if the SDK has explicitly
+ # been downloaded before (in which case json_data_file will exist).
+ if ((sys.platform in ('win32', 'cygwin') or os.path.exists(json_data_file))
+ and depot_tools_win_toolchain):
+ if ShouldUpdateToolchain():
+ if len(sys.argv) > 1 and sys.argv[1] == 'update':
+ update_result = Update()
+ else:
+ update_result = Update(no_download=True)
+ if update_result != 0:
+ raise Exception('Failed to update, error code %d.' % update_result)
+ with open(json_data_file, 'r') as tempf:
+ toolchain_data = json.load(tempf)
+
+ toolchain = toolchain_data['path']
+ version = toolchain_data['version']
+ win_sdk = toolchain_data.get('win_sdk')
+ if not win_sdk:
+ win_sdk = toolchain_data['win8sdk']
+ wdk = toolchain_data['wdk']
+ # TODO(scottmg): The order unfortunately matters in these. They should be
+ # split into separate keys for x64/x86/arm64. (See CopyDlls call below).
+ # http://crbug.com/345992
+ vs_runtime_dll_dirs = toolchain_data['runtime_dirs']
+ # The number of runtime_dirs in the toolchain_data was two (x64/x86) but
+ # changed to three (x64/x86/arm64) and this code needs to handle both
+ # possibilities, which can change independently from this code.
+ if len(vs_runtime_dll_dirs) == 2:
+ vs_runtime_dll_dirs.append('Arm64Unused')
+
+ os.environ['GYP_MSVS_OVERRIDE_PATH'] = toolchain
+ os.environ['GYP_MSVS_VERSION'] = version
+
+ os.environ['WINDOWSSDKDIR'] = win_sdk
+ os.environ['WDK_DIR'] = wdk
+ # Include the VS runtime in the PATH in case it's not machine-installed.
+ runtime_path = os.path.pathsep.join(vs_runtime_dll_dirs)
+ os.environ['PATH'] = runtime_path + os.path.pathsep + os.environ['PATH']
+ elif sys.platform == 'win32' and not depot_tools_win_toolchain:
+ if not 'GYP_MSVS_OVERRIDE_PATH' in os.environ:
+ os.environ['GYP_MSVS_OVERRIDE_PATH'] = DetectVisualStudioPath()
+ if not 'GYP_MSVS_VERSION' in os.environ:
+ os.environ['GYP_MSVS_VERSION'] = GetVisualStudioVersion()
+
+ # When using an installed toolchain these files aren't needed in the output
+ # directory in order to run binaries locally, but they are needed in order
+ # to create isolates or the mini_installer. Copying them to the output
+ # directory ensures that they are available when needed.
+ bitness = platform.architecture()[0]
+ # When running 64-bit python the x64 DLLs will be in System32
+ # ARM64 binaries will not be available in the system directories because we
+ # don't build on ARM64 machines.
+ x64_path = 'System32' if bitness == '64bit' else 'Sysnative'
+ x64_path = os.path.join(os.path.expandvars('%windir%'), x64_path)
+ vs_runtime_dll_dirs = [x64_path,
+ os.path.join(os.path.expandvars('%windir%'),
+ 'SysWOW64'),
+ 'Arm64Unused']
+
+ return vs_runtime_dll_dirs
+
+
+def _RegistryGetValueUsingWinReg(key, value):
+ """Use the _winreg module to obtain the value of a registry key.
+
+ Args:
+ key: The registry key.
+ value: The particular registry value to read.
+ Return:
+ contents of the registry key's value, or None on failure. Throws
+ ImportError if _winreg is unavailable.
+ """
+ import _winreg
+ try:
+ root, subkey = key.split('\\', 1)
+ assert root == 'HKLM' # Only need HKLM for now.
+ with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
+ return _winreg.QueryValueEx(hkey, value)[0]
+ except WindowsError:
+ return None
+
+
+def _RegistryGetValue(key, value):
+ try:
+ return _RegistryGetValueUsingWinReg(key, value)
+ except ImportError:
+ raise Exception('The python library _winreg not found.')
+
+
+def GetVisualStudioVersion():
+ """Return GYP_MSVS_VERSION of Visual Studio.
+ """
+ return os.environ.get('GYP_MSVS_VERSION', CURRENT_DEFAULT_TOOLCHAIN_VERSION)
+
+
+def DetectVisualStudioPath():
+ """Return path to the GYP_MSVS_VERSION of Visual Studio.
+ """
+
+ # Note that this code is used from
+ # build/toolchain/win/setup_toolchain.py as well.
+ version_as_year = GetVisualStudioVersion()
+ year_to_version = {
+ '2017': '15.0',
+ '2019': '16.0',
+ }
+ if version_as_year not in year_to_version:
+ raise Exception(('Visual Studio version %s (from GYP_MSVS_VERSION)'
+ ' not supported. Supported versions are: %s') % (
+ version_as_year, ', '.join(year_to_version.keys())))
+
+ # The VC++ >=2017 install location needs to be located using COM instead of
+ # the registry. For details see:
+ # https://blogs.msdn.microsoft.com/heaths/2016/09/15/changes-to-visual-studio-15-setup/
+ # For now we use a hardcoded default with an environment variable override.
+ for path in (
+ os.environ.get('vs%s_install' % version_as_year),
+ os.path.expandvars('%ProgramFiles(x86)%' +
+ '/Microsoft Visual Studio/%s/Enterprise' %
+ version_as_year),
+ os.path.expandvars('%ProgramFiles(x86)%' +
+ '/Microsoft Visual Studio/%s/Professional' %
+ version_as_year),
+ os.path.expandvars('%ProgramFiles(x86)%' +
+ '/Microsoft Visual Studio/%s/Community' %
+ version_as_year),
+ os.path.expandvars('%ProgramFiles(x86)%' +
+ '/Microsoft Visual Studio/%s/Preview' %
+ version_as_year)):
+ if path and os.path.exists(path):
+ return path
+
+ raise Exception(('Visual Studio Version %s (from GYP_MSVS_VERSION)'
+ ' not found.') % (version_as_year))
+
+
+def _CopyRuntimeImpl(target, source, verbose=True):
+ """Copy |source| to |target| if it doesn't already exist or if it needs to be
+ updated (comparing last modified time as an approximate float match as for
+ some reason the values tend to differ by ~1e-07 despite being copies of the
+ same file... https://crbug.com/603603).
+ """
+ if (os.path.isdir(os.path.dirname(target)) and
+ (not os.path.isfile(target) or
+ abs(os.stat(target).st_mtime - os.stat(source).st_mtime) >= 0.01)):
+ if verbose:
+ print('Copying %s to %s...' % (source, target))
+ if os.path.exists(target):
+ # Make the file writable so that we can delete it now, and keep it
+ # readable.
+ os.chmod(target, stat.S_IWRITE | stat.S_IREAD)
+ os.unlink(target)
+ shutil.copy2(source, target)
+ # Make the file writable so that we can overwrite or delete it later,
+ # keep it readable.
+ os.chmod(target, stat.S_IWRITE | stat.S_IREAD)
+
+
+def _CopyUCRTRuntime(target_dir, source_dir, target_cpu, dll_pattern, suffix):
+ """Copy both the msvcp and vccorlib runtime DLLs, only if the target doesn't
+ exist, but the target directory does exist."""
+ if target_cpu == 'arm64':
+ # Windows ARM64 VCRuntime is located at {toolchain_root}/VC/Redist/MSVC/
+ # {x.y.z}/[debug_nonredist/]arm64/Microsoft.VC141.CRT/.
+ vc_redist_root = FindVCRedistRoot()
+ if suffix.startswith('.'):
+ source_dir = os.path.join(vc_redist_root,
+ 'arm64', 'Microsoft.VC141.CRT')
+ else:
+ source_dir = os.path.join(vc_redist_root, 'debug_nonredist',
+ 'arm64', 'Microsoft.VC141.DebugCRT')
+ for file_part in ('msvcp', 'vccorlib', 'vcruntime'):
+ dll = dll_pattern % file_part
+ target = os.path.join(target_dir, dll)
+ source = os.path.join(source_dir, dll)
+ _CopyRuntimeImpl(target, source)
+ # Copy the UCRT files from the Windows SDK. This location includes the
+ # api-ms-win-crt-*.dll files that are not found in the Windows directory.
+ # These files are needed for component builds. If WINDOWSSDKDIR is not set
+ # use the default SDK path. This will be the case when
+ # DEPOT_TOOLS_WIN_TOOLCHAIN=0 and vcvarsall.bat has not been run.
+ win_sdk_dir = os.path.normpath(
+ os.environ.get('WINDOWSSDKDIR',
+ os.path.expandvars('%ProgramFiles(x86)%'
+ '\\Windows Kits\\10')))
+ # ARM64 doesn't have a redist for the ucrt DLLs because they are always
+ # present in the OS.
+ if target_cpu != 'arm64':
+ # Starting with the 10.0.17763 SDK the ucrt files are in a version-named
+ # directory - this handles both cases.
+ redist_dir = os.path.join(win_sdk_dir, 'Redist')
+ version_dirs = glob.glob(os.path.join(redist_dir, '10.*'))
+ if len(version_dirs) > 0:
+ version_dirs.sort(reverse=True)
+ redist_dir = version_dirs[0]
+ ucrt_dll_dirs = os.path.join(redist_dir, 'ucrt', 'DLLs', target_cpu)
+ ucrt_files = glob.glob(os.path.join(ucrt_dll_dirs, 'api-ms-win-*.dll'))
+ assert len(ucrt_files) > 0
+ for ucrt_src_file in ucrt_files:
+ file_part = os.path.basename(ucrt_src_file)
+ ucrt_dst_file = os.path.join(target_dir, file_part)
+ _CopyRuntimeImpl(ucrt_dst_file, ucrt_src_file, False)
+ # We must copy ucrtbase.dll for x64/x86, and ucrtbased.dll for all CPU types.
+ if target_cpu != 'arm64' or not suffix.startswith('.'):
+ if not suffix.startswith('.'):
+ # ucrtbased.dll is located at {win_sdk_dir}/bin/{a.b.c.d}/{target_cpu}/
+ # ucrt/.
+ sdk_redist_root = os.path.join(win_sdk_dir, 'bin')
+ sdk_bin_sub_dirs = os.listdir(sdk_redist_root)
+ # Select the most recent SDK if there are multiple versions installed.
+ sdk_bin_sub_dirs.sort(reverse=True)
+ for directory in sdk_bin_sub_dirs:
+ sdk_redist_root_version = os.path.join(sdk_redist_root, directory)
+ if not os.path.isdir(sdk_redist_root_version):
+ continue
+ if re.match('10\.\d+\.\d+\.\d+', directory):
+ source_dir = os.path.join(sdk_redist_root_version, target_cpu, 'ucrt')
+ break
+ _CopyRuntimeImpl(os.path.join(target_dir, 'ucrtbase' + suffix),
+ os.path.join(source_dir, 'ucrtbase' + suffix))
+
+
+def FindVCComponentRoot(component):
+ """Find the most recent Tools or Redist or other directory in an MSVC install.
+ Typical results are {toolchain_root}/VC/{component}/MSVC/{x.y.z}. The {x.y.z}
+ version number part changes frequently so the highest version number found is
+ used.
+ """
+ assert GetVisualStudioVersion() in ['2017', '2019']
+ SetEnvironmentAndGetRuntimeDllDirs()
+ assert ('GYP_MSVS_OVERRIDE_PATH' in os.environ)
+ vc_component_msvc_root = os.path.join(os.environ['GYP_MSVS_OVERRIDE_PATH'],
+ 'VC', component, 'MSVC')
+ vc_component_msvc_contents = os.listdir(vc_component_msvc_root)
+ # Select the most recent toolchain if there are several.
+ vc_component_msvc_contents.sort(reverse=True)
+ for directory in vc_component_msvc_contents:
+ if not os.path.isdir(os.path.join(vc_component_msvc_root, directory)):
+ continue
+ if re.match('14\.\d+\.\d+', directory):
+ return os.path.join(vc_component_msvc_root, directory)
+ raise Exception('Unable to find the VC %s directory.' % component)
+
+
+def FindVCRedistRoot():
+ """In >=VS2017, Redist binaries are located in
+ {toolchain_root}/VC/Redist/MSVC/{x.y.z}/{target_cpu}/.
+
+ This returns the '{toolchain_root}/VC/Redist/MSVC/{x.y.z}/' path.
+ """
+ return FindVCComponentRoot('Redist')
+
+
+def _CopyRuntime(target_dir, source_dir, target_cpu, debug):
+ """Copy the VS runtime DLLs, only if the target doesn't exist, but the target
+ directory does exist. Handles VS 2015, 2017 and 2019."""
+ suffix = 'd.dll' if debug else '.dll'
+ # VS 2015, 2017 and 2019 use the same CRT DLLs.
+ _CopyUCRTRuntime(target_dir, source_dir, target_cpu, '%s140' + suffix,
+ suffix)
+
+
+def CopyDlls(target_dir, configuration, target_cpu):
+ """Copy the VS runtime DLLs into the requested directory as needed.
+
+ configuration is one of 'Debug' or 'Release'.
+ target_cpu is one of 'x86', 'x64' or 'arm64'.
+
+ The debug configuration gets both the debug and release DLLs; the
+ release config only the latter.
+ """
+ vs_runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs()
+ if not vs_runtime_dll_dirs:
+ return
+
+ x64_runtime, x86_runtime, arm64_runtime = vs_runtime_dll_dirs
+ if target_cpu == 'x64':
+ runtime_dir = x64_runtime
+ elif target_cpu == 'x86':
+ runtime_dir = x86_runtime
+ elif target_cpu == 'arm64':
+ runtime_dir = arm64_runtime
+ else:
+ raise Exception('Unknown target_cpu: ' + target_cpu)
+ _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=False)
+ if configuration == 'Debug':
+ _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=True)
+ _CopyDebugger(target_dir, target_cpu)
+
+
+def _CopyDebugger(target_dir, target_cpu):
+ """Copy dbghelp.dll and dbgcore.dll into the requested directory as needed.
+
+ target_cpu is one of 'x86', 'x64' or 'arm64'.
+
+ dbghelp.dll is used when Chrome needs to symbolize stacks. Copying this file
+ from the SDK directory avoids using the system copy of dbghelp.dll which then
+ ensures compatibility with recent debug information formats, such as VS
+ 2017 /debug:fastlink PDBs.
+
+ dbgcore.dll is needed when using some functions from dbghelp.dll (like
+ MinidumpWriteDump).
+ """
+ win_sdk_dir = SetEnvironmentAndGetSDKDir()
+ if not win_sdk_dir:
+ return
+
+ # List of debug files that should be copied, the first element of the tuple is
+ # the name of the file and the second indicates if it's optional.
+ debug_files = [('dbghelp.dll', False), ('dbgcore.dll', True)]
+ for debug_file, is_optional in debug_files:
+ full_path = os.path.join(win_sdk_dir, 'Debuggers', target_cpu, debug_file)
+ if not os.path.exists(full_path):
+ if is_optional:
+ continue
+ else:
+ # TODO(crbug.com/773476): remove version requirement.
+ raise Exception('%s not found in "%s"\r\nYou must install the '
+ '"Debugging Tools for Windows" feature from the Windows'
+ ' 10 SDK.'
+ % (debug_file, full_path))
+ target_path = os.path.join(target_dir, debug_file)
+ _CopyRuntimeImpl(target_path, full_path)
+
+
+def _GetDesiredVsToolchainHashes():
+ """Load a list of SHA1s corresponding to the toolchains that we want installed
+ to build with."""
+ env_version = GetVisualStudioVersion()
+ if env_version == '2017':
+ # VS 2017 Update 9 (15.9.3) with 10.0.17763.132 SDK, 10.0.17134 version of
+ # d3dcompiler_47.dll, with ARM64 libraries.
+ toolchain_hash = '818a152b3f1da991c1725d85be19a0f27af6bab4'
+ # Third parties that do not have access to the canonical toolchain can map
+ # canonical toolchain version to their own toolchain versions.
+ toolchain_hash_mapping_key = 'GYP_MSVS_HASH_%s' % toolchain_hash
+ return [os.environ.get(toolchain_hash_mapping_key, toolchain_hash)]
+ raise Exception('Unsupported VS version %s' % env_version)
+
+
+def ShouldUpdateToolchain():
+ """Check if the toolchain should be upgraded."""
+ if not os.path.exists(json_data_file):
+ return True
+ with open(json_data_file, 'r') as tempf:
+ toolchain_data = json.load(tempf)
+ version = toolchain_data['version']
+ env_version = GetVisualStudioVersion()
+ # If there's a mismatch between the version set in the environment and the one
+ # in the json file then the toolchain should be updated.
+ return version != env_version
+
+
+def Update(force=False, no_download=False):
+ """Requests an update of the toolchain to the specific hashes we have at
+ this revision. The update outputs a .json of the various configuration
+ information required to pass to gyp which we use in |GetToolchainDir()|.
+ If no_download is true then the toolchain will be configured if present but
+ will not be downloaded.
+ """
+ if force != False and force != '--force':
+ print('Unknown parameter "%s"' % force, file=sys.stderr)
+ return 1
+ if force == '--force' or os.path.exists(json_data_file):
+ force = True
+
+ depot_tools_win_toolchain = \
+ bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
+ if ((sys.platform in ('win32', 'cygwin') or force) and
+ depot_tools_win_toolchain):
+ import find_depot_tools
+ depot_tools_path = find_depot_tools.add_depot_tools_to_path()
+
+ # On Linux, the file system is usually case-sensitive while the Windows
+ # SDK only works on case-insensitive file systems. If it doesn't already
+ # exist, set up a ciopfs fuse mount to put the SDK in a case-insensitive
+ # part of the file system.
+ toolchain_dir = os.path.join(depot_tools_path, 'win_toolchain', 'vs_files')
+ # For testing this block, unmount existing mounts with
+ # fusermount -u third_party/depot_tools/win_toolchain/vs_files
+ if sys.platform.startswith('linux') and not os.path.ismount(toolchain_dir):
+ import distutils.spawn
+ ciopfs = distutils.spawn.find_executable('ciopfs')
+ if not ciopfs:
+ # ciopfs not found in PATH; try the one downloaded from the DEPS hook.
+ ciopfs = os.path.join(script_dir, 'ciopfs')
+ if not os.path.isdir(toolchain_dir):
+ os.mkdir(toolchain_dir)
+ if not os.path.isdir(toolchain_dir + '.ciopfs'):
+ os.mkdir(toolchain_dir + '.ciopfs')
+ # Without use_ino, clang's #pragma once and Wnonportable-include-path
+ # both don't work right, see https://llvm.org/PR34931
+ # use_ino doesn't slow down builds, so it seems there's no drawback to
+ # just using it always.
+ subprocess.check_call([
+ ciopfs, '-o', 'use_ino', toolchain_dir + '.ciopfs', toolchain_dir])
+
+ # Necessary so that get_toolchain_if_necessary.py will put the VS toolkit
+ # in the correct directory.
+ os.environ['GYP_MSVS_VERSION'] = GetVisualStudioVersion()
+ get_toolchain_args = [
+ sys.executable,
+ os.path.join(depot_tools_path,
+ 'win_toolchain',
+ 'get_toolchain_if_necessary.py'),
+ '--output-json', json_data_file,
+ ] + _GetDesiredVsToolchainHashes()
+ if force:
+ get_toolchain_args.append('--force')
+ if no_download:
+ get_toolchain_args.append('--no-download')
+ subprocess.check_call(get_toolchain_args)
+
+ return 0
+
+
+def NormalizePath(path):
+ while path.endswith('\\'):
+ path = path[:-1]
+ return path
+
+
+def SetEnvironmentAndGetSDKDir():
+ """Gets location information about the current sdk (must have been
+ previously updated by 'update'). This is used for the GN build."""
+ SetEnvironmentAndGetRuntimeDllDirs()
+
+ # If WINDOWSSDKDIR is not set, search the default SDK path and set it.
+ if not 'WINDOWSSDKDIR' in os.environ:
+ default_sdk_path = os.path.expandvars('%ProgramFiles(x86)%'
+ '\\Windows Kits\\10')
+ if os.path.isdir(default_sdk_path):
+ os.environ['WINDOWSSDKDIR'] = default_sdk_path
+
+ return NormalizePath(os.environ['WINDOWSSDKDIR'])
+
+
+def GetToolchainDir():
+ """Gets location information about the current toolchain (must have been
+ previously updated by 'update'). This is used for the GN build."""
+ runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs()
+ win_sdk_dir = SetEnvironmentAndGetSDKDir()
+
+ print('''vs_path = %s
+sdk_path = %s
+vs_version = %s
+wdk_dir = %s
+runtime_dirs = %s
+''' % (ToGNString(NormalizePath(os.environ['GYP_MSVS_OVERRIDE_PATH'])),
+ ToGNString(win_sdk_dir), ToGNString(GetVisualStudioVersion()),
+ ToGNString(NormalizePath(os.environ.get('WDK_DIR', ''))),
+ ToGNString(os.path.pathsep.join(runtime_dll_dirs or ['None']))))
+
+
+def main():
+ commands = {
+ 'update': Update,
+ 'get_toolchain_dir': GetToolchainDir,
+ 'copy_dlls': CopyDlls,
+ }
+ if len(sys.argv) < 2 or sys.argv[1] not in commands:
+ print('Expected one of: %s' % ', '.join(commands), file=sys.stderr)
+ return 1
+ return commands[sys.argv[1]](*sys.argv[2:])
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/whitespace_file.txt b/deps/v8/build/whitespace_file.txt
new file mode 100644
index 0000000000..dded3d9c1f
--- /dev/null
+++ b/deps/v8/build/whitespace_file.txt
@@ -0,0 +1,177 @@
+Copyright 2014 The Chromium Authors. All rights reserved.
+Use of this useless file is governed by a BSD-style license that can be
+found in the LICENSE file.
+
+
+This file is used for making non-code changes to trigger buildbot cycles. Make
+any modification below this line.
+
+======================================================================
+
+Let's make a story. Add zero+ sentences for every commit:
+
+CHÄPTER 1:
+It was a dark and blinky night; the rain fell in torrents -- except at
+occasional intervals, when it was checked by a violent gust of wind which
+swept up the streets (for it is in London that our scene lies), rattling along
+the housetops, and fiercely agitating the scanty flame of the lamps that
+struggled against the elements. A hooded figure emerged.
+
+It was a Domo-Kun.
+
+"What took you so long?", inquired his wife.
+
+Silence. Oblivious to his silence, she continued, "Did Mr. Usagi enjoy the
+waffles you brought him?" "You know him, he's not one to forego a waffle,
+no matter how burnt," he snickered.
+
+The pause was filled with the sound of compile errors.
+
+CHAPTER 2:
+The jelly was as dark as night, and just as runny.
+The Domo-Kun shuddered, remembering the way Mr. Usagi had speared his waffles
+with his fork, watching the runny jelly spread and pool across his plate,
+like the blood of a dying fawn. "It reminds me of that time --" he started, as
+his wife cut in quickly: "-- please. I can't bear to hear it.". A flury of
+images coming from the past flowed through his mind.
+
+"You recall what happened on Mulholland drive?" The ceiling fan rotated slowly
+overhead, barely disturbing the thick cigarette smoke. No doubt was left about
+when the fan was last cleaned.
+
+There was a poignant pause.
+
+CHAPTER 3:
+Mr. Usagi felt that something wasn't right. Shortly after the Domo-Kun left he
+began feeling sick. He thought out loud to himself, "No, he wouldn't have done
+that to me." He considered that perhaps he shouldn't have pushed so hard.
+Perhaps he shouldn't have been so cold and sarcastic, after the unimaginable
+horror that had occurred just the week before.
+
+Next time, there won't be any sushi. Why sushi with waffles anyway? It's like
+adorning breakfast cereal with halibut -- shameful.
+
+CHAPTER 4:
+The taste of stale sushi in his mouth the next morning was unbearable. He
+wondered where the sushi came from as he attempted to wash the taste away with
+a bottle of 3000¥ sake. He tries to recall the cook's face. Green? Probably.
+
+CHAPTER 5:
+Many tears later, Mr. Usagi would laugh at the memory of the earnest,
+well-intentioned Domo-Kun. Another day in the life. That is when he realized that
+life goes on.
+
+$CHAPTER6
+
+TRUISMS (1978-1983)
+JENNY HOLZER
+A LITTLE KNOWLEDGE CAN GO A LONG WAY
+A LOT OF PROFESSIONALS ARE CRACKPOTS
+A MAN CAN'T KNOW WHAT IT IS TO BE A MOTHER
+A NAME MEANS A LOT JUST BY ITSELF
+A POSITIVE ATTITUDE MEANS ALL THE DIFFERENCE IN THE WORLD
+A RELAXED MAN IS NOT NECESSARILY A BETTER MAN
+NO ONE SHOULD EVER USE SVN
+AN INFLEXIBLE POSITION SOMETIMES IS A SIGN OF PARALYSIS
+IT IS MANS FATE TO OUTSMART HIMSELF
+BEING SURE OF YOURSELF MEANS YOU'RE A FOOL
+AM NOT
+ARE TOO
+IF AT FIRST YOU DON'T SUCCEED: TRY, EXCEPT, FINALLY
+AND THEN, TIME LEAPT BACKWARDS
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAaaaaaaaaaaaaaaaaaaaaaaaaaaaahhhh LOT
+I'm really tempted to change something above the line.
+Reeccciiiipppppeeeeeesssssss!!!!!!!!!
+PEOPLE SAY "FAILURE IS NOT AN OPTION", BUT FAILURE IS ALWAYS AN OPTION.
+WHAT GOES UP MUST HAVE A NON-ZERO VELOCITY
+
+I can feel the heat closing in, feel them out there making their moves...
+What could possibly go wrong? We've already ate our cake.
+
+Stand Still. Pause Clocks. We can make the World Stop.
+WUBWUBWUBWUBWUB
+
+I want a 1917 build and you will give me what I want.
+
+This sentence is false.
+
+Beauty is in the eyes of a Beholder.
+
+I'm the best at space.
+
+The first time Yossarian saw the chaplain, he fell madly in love with him.
+*
+*
+*
+Give not thyself up, then, to fire, lest it invert thee, deaden thee; as for
+the time it did me. There is a wisdom that is woe; but there is a woe that is
+madness. And there is a Catskill eagle in some souls that can alike dive down
+into the blackest gorges, and soar out of them again and become invisible in
+the sunny spaces. And even if he for ever flies within the gorge, that gorge
+is in the mountains; so that even in his lowest swoop the mountain eagle is
+still higher than other birds upon the plain, even though they soar.
+*
+*
+*
+
+I'm here to commit lines and drop rhymes
+*
+This is a line to test and try uploading a cl.
+*
+Yay, another first commit! What a beautiful day!
+
+And lo, in the year 2014, there was verily an attempt to upgrade to GCC 4.8 on
+the Android bots, and it was good. Except on one bot, where it was bad. And
+lo, the change was reverted, and GCC went back to 4.6, where code is slower
+and less optimized. And verily did it break the build, because artifacts had
+been created with 4.8, and alignment was no longer the same, and a great
+sadness descended upon the Android GN buildbot, and it did refuseth to build
+any more. But the sheriffs thought to themselves: Placebo! Let us clobber the
+bot, and perhaps it will rebuild with GCC 4.6, which hath worked for many many
+seasons. And so they modified the whitespace file with these immortal lines,
+and visited it upon the bots, that great destruction might be wrought upon
+their outdated binaries. In clobberus, veritas.
+
+As the git approaches, light begins to shine through the SCM thrice again...
+However, the git, is, after all, quite stupid.
+
+Suddenly Domo-Kun found itself in a room filled with dazzling mirrors. As
+Domo-Kun looked around, it realized that some of the mirrors were actually but
+pale reflections of true reality.
+
+A herd of wild gits appears! Time for CQ :D
+And one more for sizes.py...
+
+What's an overmarketed dietary supplement expressing sadness, relief,
+tiredness, or a similar feeling.? Ah-Sigh-ee.
+
+It was love at first sight. The moment Yossarian first laid eyes on the chaplain, he fell madly in love with him.
+
+Cool whitespace change for git-cl land
+
+Oh god the bots are red! I'm blind! Mmmm, cronuts.
+
+If you stand on your head, you will get footprints in your hair.
+
+sigh
+sigher
+pick up cls
+
+In the BUILD we trust.
+^_^
+
+In the masters we don't.
+In the tryservers, we don't either.
+In the CQ sometimes.
+Auto-generated by git-eject-upstream (http://goo.gl/cIHsYR)
+My sandwiches are like my children: I love them all.
+No, really, I couldn't eat another bit.
+When I hunger I think of you, and a pastrami sandwich.
+Do make a terrible mistake every once in a while.
+I just made two.
+Mistakes are the best sometimes.
+\o/
+This is groovy.
+
+SECRET ENDING: IT WAS _____ ALL ALONG!
+testing trailing line.
diff --git a/deps/v8/build/win/BUILD.gn b/deps/v8/build/win/BUILD.gn
new file mode 100644
index 0000000000..ea2b399169
--- /dev/null
+++ b/deps/v8/build/win/BUILD.gn
@@ -0,0 +1,168 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/win/manifest.gni")
+
+# Depending on this target will cause the manifests for Chrome's default
+# Windows and common control compatibility and elevation for executables.
+windows_manifest("default_exe_manifest") {
+ sources = [
+ as_invoker_manifest,
+ common_controls_manifest,
+ default_compatibility_manifest,
+ ]
+}
+
+if (is_win) {
+ action("copy_cdb_to_output") {
+ script = "//build/win/copy_cdb_to_output.py"
+ inputs = [
+ script,
+ "//build/vs_toolchain.py",
+ ]
+ outputs = [
+ "$root_out_dir/cdb/cdb.exe",
+ "$root_out_dir/cdb/dbgeng.dll",
+ "$root_out_dir/cdb/dbghelp.dll",
+ "$root_out_dir/cdb/dbgmodel.dll",
+ "$root_out_dir/cdb/winext/ext.dll",
+ "$root_out_dir/cdb/winext/uext.dll",
+ "$root_out_dir/cdb/winxp/exts.dll",
+ "$root_out_dir/cdb/winxp/ntsdexts.dll",
+ ]
+ if (current_cpu != "arm64") {
+ # The UCRT files are not redistributable for ARM64 Win32.
+ outputs += [
+ "$root_out_dir/cdb/api-ms-win-core-console-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-datetime-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-debug-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-errorhandling-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-file-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-file-l1-2-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-file-l2-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-handle-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-heap-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-interlocked-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-libraryloader-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-localization-l1-2-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-memory-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-namedpipe-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-processenvironment-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-processthreads-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-processthreads-l1-1-1.dll",
+ "$root_out_dir/cdb/api-ms-win-core-profile-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-rtlsupport-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-string-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-synch-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-synch-l1-2-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-sysinfo-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-timezone-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-core-util-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-conio-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-convert-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-environment-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-filesystem-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-heap-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-locale-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-math-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-multibyte-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-private-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-process-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-runtime-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-stdio-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-string-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-time-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-crt-utility-l1-1-0.dll",
+ "$root_out_dir/cdb/api-ms-win-eventing-provider-l1-1-0.dll",
+ "$root_out_dir/cdb/ucrtbase.dll",
+ ]
+ }
+ args = [
+ rebase_path("$root_out_dir/cdb", root_out_dir),
+ current_cpu,
+ ]
+ }
+
+ group("runtime_libs") {
+ data = []
+ if (is_component_build) {
+ # Copy the VS runtime DLLs into the isolate so that they don't have to be
+ # preinstalled on the target machine. The debug runtimes have a "d" at
+ # the end.
+ if (is_debug) {
+ vcrt_suffix = "d"
+ } else {
+ vcrt_suffix = ""
+ }
+
+ # These runtime files are copied to the output directory by the
+ # vs_toolchain script that runs as part of toolchain configuration.
+ data += [
+ "$root_out_dir/msvcp140${vcrt_suffix}.dll",
+ "$root_out_dir/vccorlib140${vcrt_suffix}.dll",
+ "$root_out_dir/vcruntime140${vcrt_suffix}.dll",
+ ]
+ if (is_debug) {
+ data += [ "$root_out_dir/ucrtbased.dll" ]
+ }
+ if (is_asan) {
+ if (current_cpu == "x64") {
+ data += [ "$clang_base_path/lib/clang/$clang_version/lib/windows/clang_rt.asan_dynamic-x86_64.dll" ]
+ } else {
+ data += [ "$clang_base_path/lib/clang/$clang_version/lib/windows/clang_rt.asan_dynamic-i386.dll" ]
+ }
+ }
+ if (current_cpu != "arm64") {
+ data += [
+ # Universal Windows 10 CRT files
+ "$root_out_dir/api-ms-win-core-console-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-datetime-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-debug-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-errorhandling-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-file-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-file-l1-2-0.dll",
+ "$root_out_dir/api-ms-win-core-file-l2-1-0.dll",
+ "$root_out_dir/api-ms-win-core-handle-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-heap-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-interlocked-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-libraryloader-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-localization-l1-2-0.dll",
+ "$root_out_dir/api-ms-win-core-memory-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-namedpipe-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-processenvironment-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-processthreads-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-processthreads-l1-1-1.dll",
+ "$root_out_dir/api-ms-win-core-profile-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-rtlsupport-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-string-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-synch-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-synch-l1-2-0.dll",
+ "$root_out_dir/api-ms-win-core-sysinfo-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-timezone-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-core-util-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-crt-conio-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-crt-convert-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-crt-environment-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-crt-filesystem-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-crt-heap-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-crt-locale-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-crt-math-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-crt-multibyte-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-crt-private-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-crt-process-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-crt-runtime-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-crt-stdio-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-crt-string-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-crt-time-l1-1-0.dll",
+ "$root_out_dir/api-ms-win-crt-utility-l1-1-0.dll",
+ ]
+ if (!is_debug) {
+ data += [ "$root_out_dir/ucrtbase.dll" ]
+ }
+ }
+ }
+ }
+}
diff --git a/deps/v8/build/win/as_invoker.manifest b/deps/v8/build/win/as_invoker.manifest
new file mode 100644
index 0000000000..df046fdf68
--- /dev/null
+++ b/deps/v8/build/win/as_invoker.manifest
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+ <security>
+ <requestedPrivileges>
+ <requestedExecutionLevel level="asInvoker" uiAccess="false"></requestedExecutionLevel>
+ </requestedPrivileges>
+ </security>
+</trustInfo></assembly>
diff --git a/deps/v8/build/win/chrome_win.croc b/deps/v8/build/win/chrome_win.croc
new file mode 100644
index 0000000000..e1e3bb76d6
--- /dev/null
+++ b/deps/v8/build/win/chrome_win.croc
@@ -0,0 +1,26 @@
+# -*- python -*-
+# Crocodile config file for Chromium windows
+
+{
+ # List of rules, applied in order
+ 'rules' : [
+ # Specify inclusions before exclusions, since rules are in order.
+
+ # Don't include chromeos, posix, or linux specific files
+ {
+ 'regexp' : '.*(_|/)(chromeos|linux|posix)(\\.|_)',
+ 'include' : 0,
+ },
+ # Don't include ChromeOS dirs
+ {
+ 'regexp' : '.*/chromeos/',
+ 'include' : 0,
+ },
+
+ # Groups
+ {
+ 'regexp' : '.*_test_win\\.',
+ 'group' : 'test',
+ },
+ ],
+}
diff --git a/deps/v8/build/win/common_controls.manifest b/deps/v8/build/win/common_controls.manifest
new file mode 100644
index 0000000000..1710196fce
--- /dev/null
+++ b/deps/v8/build/win/common_controls.manifest
@@ -0,0 +1,8 @@
+<?xml version='1.0' encoding='UTF-8' standalone='yes'?>
+<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>
+ <dependency>
+ <dependentAssembly>
+ <assemblyIdentity type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='*' publicKeyToken='6595b64144ccf1df' language='*' />
+ </dependentAssembly>
+ </dependency>
+</assembly>
diff --git a/deps/v8/build/win/compatibility.manifest b/deps/v8/build/win/compatibility.manifest
new file mode 100644
index 0000000000..10d10da382
--- /dev/null
+++ b/deps/v8/build/win/compatibility.manifest
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+ <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
+ <application>
+ <!--The ID below indicates application support for Windows Vista -->
+ <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
+ <!--The ID below indicates application support for Windows 7 -->
+ <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
+ <!--The ID below indicates application support for Windows 8 -->
+ <supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
+ <!--The ID below indicates application support for Windows 8.1 -->
+ <supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
+ <!--The ID below indicates application support for Windows 10 -->
+ <supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
+ </application>
+ </compatibility>
+</assembly>
diff --git a/deps/v8/build/win/copy_cdb_to_output.py b/deps/v8/build/win/copy_cdb_to_output.py
new file mode 100755
index 0000000000..1f0f22a879
--- /dev/null
+++ b/deps/v8/build/win/copy_cdb_to_output.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import glob
+import hashlib
+import os
+import shutil
+import sys
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+src_build_dir = os.path.abspath(os.path.join(script_dir, os.pardir))
+sys.path.insert(0, src_build_dir)
+
+import vs_toolchain
+
+
+def _HexDigest(file_name):
+ hasher = hashlib.sha256()
+ afile = open(file_name, 'rb')
+ blocksize = 65536
+ buf = afile.read(blocksize)
+ while len(buf) > 0:
+ hasher.update(buf)
+ buf = afile.read(blocksize)
+ afile.close()
+ return hasher.hexdigest()
+
+
+def _CopyImpl(file_name, target_dir, source_dir, verbose=False):
+ """Copy |source| to |target| if it doesn't already exist or if it
+ needs to be updated.
+ """
+ target = os.path.join(target_dir, file_name)
+ source = os.path.join(source_dir, file_name)
+ if (os.path.isdir(os.path.dirname(target)) and
+ ((not os.path.isfile(target)) or
+ _HexDigest(source) != _HexDigest(target))):
+ if verbose:
+ print('Copying %s to %s...' % (source, target))
+ if os.path.exists(target):
+ os.unlink(target)
+ shutil.copy(source, target)
+
+
+def _ConditionalMkdir(output_dir):
+ if not os.path.isdir(output_dir):
+ os.makedirs(output_dir)
+
+
+def _CopyCDBToOutput(output_dir, target_arch):
+ """Copies the Windows debugging executable cdb.exe to the output
+ directory, which is created if it does not exist. The output
+ directory, and target architecture that should be copied, are
+ passed. Supported values for the target architecture are the GYP
+ values "ia32", "x64", "arm64" and the GN values "x86", "x64", "arm64".
+ """
+ _ConditionalMkdir(output_dir)
+ vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
+ # If WINDOWSSDKDIR is not set use the default SDK path. This will be the case
+ # when DEPOT_TOOLS_WIN_TOOLCHAIN=0 and vcvarsall.bat has not been run.
+ win_sdk_dir = os.path.normpath(
+ os.environ.get('WINDOWSSDKDIR',
+ os.path.expandvars('%ProgramFiles(x86)%'
+ '\\Windows Kits\\10')))
+ if target_arch == 'ia32' or target_arch == 'x86':
+ src_arch = 'x86'
+ elif target_arch in ['x64', 'arm64']:
+ src_arch = target_arch
+ else:
+ print('copy_cdb_to_output.py: unknown target_arch %s' % target_arch)
+ sys.exit(1)
+ # We need to copy multiple files, so cache the computed source directory.
+ src_dir = os.path.join(win_sdk_dir, 'Debuggers', src_arch)
+ # We need to copy some helper DLLs to get access to the !uniqstack
+ # command to dump all threads' stacks.
+ src_winext_dir = os.path.join(src_dir, 'winext')
+ dst_winext_dir = os.path.join(output_dir, 'winext')
+ src_winxp_dir = os.path.join(src_dir, 'winxp')
+ dst_winxp_dir = os.path.join(output_dir, 'winxp')
+ # Starting with the 10.0.17763 SDK the ucrt files are in a version-named
+ # directory - this handles both cases.
+ redist_dir = os.path.join(win_sdk_dir, 'Redist')
+ version_dirs = glob.glob(os.path.join(redist_dir, '10.*'))
+ if len(version_dirs) > 0:
+ version_dirs.sort(reverse=True)
+ redist_dir = version_dirs[0]
+ src_crt_dir = os.path.join(redist_dir, 'ucrt', 'DLLs', src_arch)
+ _ConditionalMkdir(dst_winext_dir)
+ _ConditionalMkdir(dst_winxp_dir)
+ # Note that the outputs from the "copy_cdb_to_output" target need to
+ # be kept in sync with this list.
+ _CopyImpl('cdb.exe', output_dir, src_dir)
+ _CopyImpl('dbgeng.dll', output_dir, src_dir)
+ _CopyImpl('dbghelp.dll', output_dir, src_dir)
+ _CopyImpl('dbgmodel.dll', output_dir, src_dir)
+ _CopyImpl('ext.dll', dst_winext_dir, src_winext_dir)
+ _CopyImpl('uext.dll', dst_winext_dir, src_winext_dir)
+ _CopyImpl('exts.dll', dst_winxp_dir, src_winxp_dir)
+ _CopyImpl('ntsdexts.dll', dst_winxp_dir, src_winxp_dir)
+ if src_arch in ['x64', 'x86']:
+ _CopyImpl('api-ms-win-eventing-provider-l1-1-0.dll', output_dir, src_dir)
+ _CopyImpl('ucrtbase.dll', output_dir, src_crt_dir)
+ for dll_path in glob.glob(os.path.join(src_crt_dir, 'api-ms-win-*.dll')):
+ _CopyImpl(os.path.split(dll_path)[1], output_dir, src_crt_dir)
+ return 0
+
+
+def main():
+ if len(sys.argv) < 2:
+ print('Usage: copy_cdb_to_output.py <output_dir> ' + \
+ '<target_arch>', file=sys.stderr)
+ return 1
+ return _CopyCDBToOutput(sys.argv[1], sys.argv[2])
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/win/gn_meta_sln.py b/deps/v8/build/win/gn_meta_sln.py
new file mode 100644
index 0000000000..9f72edae5c
--- /dev/null
+++ b/deps/v8/build/win/gn_meta_sln.py
@@ -0,0 +1,212 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# gn_meta_sln.py
+# Helper utility to combine GN-generated Visual Studio projects into
+# a single meta-solution.
+
+import os
+import glob
+import re
+import sys
+from shutil import copyfile
+
+# Helpers
+def EnsureExists(path):
+ try:
+ os.makedirs(path)
+ except OSError:
+ pass
+
+def WriteLinesToFile(lines, file_name):
+ EnsureExists(os.path.dirname(file_name))
+ with open(file_name, "w") as f:
+ f.writelines(lines)
+
+def ExtractIdg(proj_file_name):
+ result = []
+ with open(proj_file_name) as proj_file:
+ lines = iter(proj_file)
+ for p_line in lines:
+ if "<ItemDefinitionGroup" in p_line:
+ while not "</ItemDefinitionGroup" in p_line:
+ result.append(p_line)
+ p_line = lines.next()
+ result.append(p_line)
+ return result
+
+# [ (name, solution_name, vs_version), ... ]
+configs = []
+
+def GetVSVersion(solution_file):
+ with open(solution_file) as f:
+ f.readline()
+ comment = f.readline().strip()
+ return comment[-4:]
+
+# Find all directories that can be used as configs (and record if they have VS
+# files present)
+for root, dirs, files in os.walk("out"):
+ for out_dir in dirs:
+ gn_file = os.path.join("out", out_dir, "build.ninja.d")
+ if os.path.exists(gn_file):
+ solutions = glob.glob(os.path.join("out", out_dir, "*.sln"))
+ for solution in solutions:
+ vs_version = GetVSVersion(solution)
+ configs.append((out_dir, os.path.basename(solution),
+ vs_version))
+ break
+
+# Every project has a GUID that encodes the type. We only care about C++.
+cpp_type_guid = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
+
+# Work around MSBuild limitations by always using a fixed arch.
+hard_coded_arch = "x64"
+
+# name -> [ (config, pathToProject, GUID, arch), ... ]
+all_projects = {}
+project_pattern = (r'Project\("\{' + cpp_type_guid +
+ r'\}"\) = "([^"]*)", "([^"]*)", "\{([^\}]*)\}"')
+
+# We need something to work with. Typically, this will fail if no GN folders
+# have IDE files
+if len(configs) == 0:
+ print("ERROR: At least one GN directory must have been built with --ide=vs")
+ sys.exit()
+
+# Filter out configs which don't match the name and vs version of the first.
+name = configs[0][1]
+vs_version = configs[0][2]
+
+for config in configs:
+ if config[1] != name or config[2] != vs_version:
+ continue
+
+ sln_lines = iter(open(os.path.join("out", config[0], config[1])))
+ for sln_line in sln_lines:
+ match_obj = re.match(project_pattern, sln_line)
+ if match_obj:
+ proj_name = match_obj.group(1)
+ if not all_projects.has_key(proj_name):
+ all_projects[proj_name] = []
+ all_projects[proj_name].append((config[0], match_obj.group(2),
+ match_obj.group(3)))
+
+# We need something to work with. Typically, this will fail if no GN folders
+# have IDE files
+if len(all_projects) == 0:
+ print("ERROR: At least one GN directory must have been built with --ide=vs")
+ sys.exit()
+
+# Create a new solution. We arbitrarily use the first config as the GUID source
+# (but we need to match that behavior later, when we copy/generate the project
+# files).
+new_sln_lines = []
+new_sln_lines.append(
+ 'Microsoft Visual Studio Solution File, Format Version 12.00\n')
+new_sln_lines.append('# Visual Studio ' + vs_version + '\n')
+for proj_name, proj_configs in all_projects.items():
+ new_sln_lines.append('Project("{' + cpp_type_guid + '}") = "' + proj_name +
+ '", "' + proj_configs[0][1] + '", "{' +
+ proj_configs[0][2] + '}"\n')
+ new_sln_lines.append('EndProject\n')
+
+new_sln_lines.append('Global\n')
+new_sln_lines.append(
+ '\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n')
+for config in configs:
+ match = config[0] + '|' + hard_coded_arch
+ new_sln_lines.append('\t\t' + match + ' = ' + match + '\n')
+new_sln_lines.append('\tEndGlobalSection\n')
+new_sln_lines.append(
+ '\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n')
+for proj_name, proj_configs in all_projects.items():
+ proj_guid = proj_configs[0][2]
+ for config in configs:
+ match = config[0] + '|' + hard_coded_arch
+ new_sln_lines.append('\t\t{' + proj_guid + '}.' + match +
+ '.ActiveCfg = ' + match + '\n')
+ new_sln_lines.append('\t\t{' + proj_guid + '}.' + match +
+ '.Build.0 = ' + match + '\n')
+new_sln_lines.append('\tEndGlobalSection\n')
+new_sln_lines.append('\tGlobalSection(SolutionProperties) = preSolution\n')
+new_sln_lines.append('\t\tHideSolutionNode = FALSE\n')
+new_sln_lines.append('\tEndGlobalSection\n')
+new_sln_lines.append('\tGlobalSection(NestedProjects) = preSolution\n')
+new_sln_lines.append('\tEndGlobalSection\n')
+new_sln_lines.append('EndGlobal\n')
+
+# Write solution file
+WriteLinesToFile(new_sln_lines, 'out/sln/' + name)
+
+idg_hdr = "<ItemDefinitionGroup Condition=\"'$(Configuration)|$(Platform)'=='"
+
+configuration_template = """ <ProjectConfiguration Include="{config}|{arch}">
+ <Configuration>{config}</Configuration>
+ <Platform>{arch}</Platform>
+ </ProjectConfiguration>
+"""
+
+def FormatProjectConfig(config):
+ return configuration_template.format(
+ config = config[0], arch = hard_coded_arch)
+
+# Now, bring over the project files
+for proj_name, proj_configs in all_projects.items():
+ # Paths to project and filter file in src and dst locations
+ src_proj_path = os.path.join("out", proj_configs[0][0], proj_configs[0][1])
+ dst_proj_path = os.path.join("out", "sln", proj_configs[0][1])
+ src_filter_path = src_proj_path + ".filters"
+ dst_filter_path = dst_proj_path + ".filters"
+
+ # Copy the filter file unmodified
+ EnsureExists(os.path.dirname(dst_proj_path))
+ copyfile(src_filter_path, dst_filter_path)
+
+ preferred_tool_arch = None
+ config_arch = {}
+
+ # Bring over the project file, modified with extra configs
+ with open(src_proj_path) as src_proj_file:
+ proj_lines = iter(src_proj_file)
+ new_proj_lines = []
+ for line in proj_lines:
+ if "<ItemDefinitionGroup" in line:
+ # This is a large group that contains many settings. We need to
+ # replicate it, with conditions so it varies per configuration.
+ idg_lines = []
+ while not "</ItemDefinitionGroup" in line:
+ idg_lines.append(line)
+ line = proj_lines.next()
+ idg_lines.append(line)
+ for proj_config in proj_configs:
+ config_idg_lines = ExtractIdg(os.path.join("out",
+ proj_config[0],
+ proj_config[1]))
+ match = proj_config[0] + '|' + hard_coded_arch
+ new_proj_lines.append(idg_hdr + match + "'\">\n")
+ for idg_line in config_idg_lines[1:]:
+ new_proj_lines.append(idg_line)
+ elif "ProjectConfigurations" in line:
+ new_proj_lines.append(line)
+ proj_lines.next()
+ proj_lines.next()
+ proj_lines.next()
+ proj_lines.next()
+ for config in configs:
+ new_proj_lines.append(FormatProjectConfig(config))
+
+ elif "<OutDir" in line:
+ new_proj_lines.append(line.replace(proj_configs[0][0],
+ "$(Configuration)"))
+ elif "<PreferredToolArchitecture" in line:
+ new_proj_lines.append(" <PreferredToolArchitecture>" +
+ hard_coded_arch +
+ "</PreferredToolArchitecture>\n")
+ else:
+ new_proj_lines.append(line)
+ with open(dst_proj_path, "w") as new_proj:
+ new_proj.writelines(new_proj_lines)
+
+print('Wrote meta solution to out/sln/' + name)
diff --git a/deps/v8/build/win/message_compiler.gni b/deps/v8/build/win/message_compiler.gni
new file mode 100644
index 0000000000..814eb6521e
--- /dev/null
+++ b/deps/v8/build/win/message_compiler.gni
@@ -0,0 +1,89 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_win, "This only runs on Windows.")
+
+# Runs mc.exe over a list of sources. The outputs (a header and rc file) are
+# placed in the target gen dir, and compiled.
+#
+# sources
+# List of message files to process.
+#
+# user_mode_logging (optional bool)
+# Generates user-mode logging code. Defaults to false (no logging code).
+#
+# compile_generated_code (optional, deafults = true)
+# If unset or true, the generated code will be compiled and linked into
+# targets that depend on it. If set to false, the .h and .rc files will only
+# be generated.
+#
+# deps, public_deps, visibility
+# Normal meaning.
+template("message_compiler") {
+ if (defined(invoker.compile_generated_code) &&
+ !invoker.compile_generated_code) {
+ compile_generated_code = false
+ action_name = target_name
+ } else {
+ compile_generated_code = true
+ action_name = "${target_name}_mc"
+ source_set_name = target_name
+ }
+
+ action_foreach(action_name) {
+ if (compile_generated_code) {
+ visibility = [ ":$source_set_name" ]
+ } else {
+ forward_variables_from(invoker, [ "visibility" ])
+ }
+
+ script = "//build/win/message_compiler.py"
+
+ outputs = [
+ "$target_gen_dir/{{source_name_part}}.h",
+ "$target_gen_dir/{{source_name_part}}.rc",
+ ]
+
+ args = [
+ # The first argument is the environment file saved to the build
+ # directory. This is required because the Windows toolchain setup saves
+ # the VC paths and such so that running "mc.exe" will work with the
+ # configured toolchain. This file is in the root build dir.
+ "environment.$current_cpu",
+
+ # Where to put the header.
+ "-h",
+ rebase_path(target_gen_dir, root_build_dir),
+
+ # Where to put the .rc file.
+ "-r",
+ rebase_path(target_gen_dir, root_build_dir),
+
+ # Input is Unicode.
+ "-u",
+ ]
+ if (defined(invoker.user_mode_logging) && invoker.user_mode_logging) {
+ args += [ "-um" ]
+ }
+ args += [ "{{source}}" ]
+
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "public_deps",
+ "sources",
+ ])
+ }
+
+ if (compile_generated_code) {
+ # Compile the generated rc file.
+ source_set(source_set_name) {
+ forward_variables_from(invoker, [ "visibility" ])
+ sources = get_target_outputs(":$action_name")
+ deps = [
+ ":$action_name",
+ ]
+ }
+ }
+}
diff --git a/deps/v8/build/win/message_compiler.py b/deps/v8/build/win/message_compiler.py
new file mode 100644
index 0000000000..51de52f0fc
--- /dev/null
+++ b/deps/v8/build/win/message_compiler.py
@@ -0,0 +1,148 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Runs the Microsoft Message Compiler (mc.exe).
+#
+# Usage: message_compiler.py <environment_file> [<args to mc.exe>*]
+
+from __future__ import print_function
+
+import difflib
+import distutils.dir_util
+import filecmp
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+
+def main():
+ env_file, rest = sys.argv[1], sys.argv[2:]
+
+ # Parse some argument flags.
+ header_dir = None
+ resource_dir = None
+ input_file = None
+ for i, arg in enumerate(rest):
+ if arg == '-h' and len(rest) > i + 1:
+ assert header_dir == None
+ header_dir = rest[i + 1]
+ elif arg == '-r' and len(rest) > i + 1:
+ assert resource_dir == None
+ resource_dir = rest[i + 1]
+ elif arg.endswith('.mc') or arg.endswith('.man'):
+ assert input_file == None
+ input_file = arg
+
+ # Copy checked-in outputs to final location.
+ THIS_DIR = os.path.abspath(os.path.dirname(__file__))
+ assert header_dir == resource_dir
+ source = os.path.join(THIS_DIR, "..", "..",
+ "third_party", "win_build_output",
+ re.sub(r'^(?:[^/]+/)?gen/', 'mc/', header_dir))
+ distutils.dir_util.copy_tree(source, header_dir, preserve_times=False)
+
+ # On non-Windows, that's all we can do.
+ if sys.platform != 'win32':
+ return
+
+ # On Windows, run mc.exe on the input and check that its outputs are
+ # identical to the checked-in outputs.
+
+ # Read the environment block from the file. This is stored in the format used
+ # by CreateProcess. Drop last 2 NULs, one for list terminator, one for
+ # trailing vs. separator.
+ env_pairs = open(env_file).read()[:-2].split('\0')
+ env_dict = dict([item.split('=', 1) for item in env_pairs])
+
+ extension = os.path.splitext(input_file)[1]
+ if extension in ['.man', '.mc']:
+ # For .man files, mc's output changed significantly from Version 10.0.15063
+ # to Version 10.0.16299. We should always have the output of the current
+ # default SDK checked in and compare to that. Early out if a different SDK
+ # is active. This also happens with .mc files.
+ # TODO(thakis): Check in new baselines and compare to 16299 instead once
+ # we use the 2017 Fall Creator's Update by default.
+ mc_help = subprocess.check_output(['mc.exe', '/?'], env=env_dict,
+ stderr=subprocess.STDOUT, shell=True)
+ version = re.search(br'Message Compiler\s+Version (\S+)', mc_help).group(1)
+ if version != '10.0.15063':
+ return
+
+ # mc writes to stderr, so this explicitly redirects to stdout and eats it.
+ try:
+ tmp_dir = tempfile.mkdtemp()
+ delete_tmp_dir = True
+ if header_dir:
+ rest[rest.index('-h') + 1] = tmp_dir
+ header_dir = tmp_dir
+ if resource_dir:
+ rest[rest.index('-r') + 1] = tmp_dir
+ resource_dir = tmp_dir
+
+ # This needs shell=True to search the path in env_dict for the mc
+ # executable.
+ subprocess.check_output(['mc.exe'] + rest,
+ env=env_dict,
+ stderr=subprocess.STDOUT,
+ shell=True)
+ # We require all source code (in particular, the header generated here) to
+ # be UTF-8. jinja can output the intermediate .mc file in UTF-8 or UTF-16LE.
+ # However, mc.exe only supports Unicode via the -u flag, and it assumes when
+ # that is specified that the input is UTF-16LE (and errors out on UTF-8
+ # files, assuming they're ANSI). Even with -u specified and UTF16-LE input,
+ # it generates an ANSI header, and includes broken versions of the message
+ # text in the comment before the value. To work around this, for any invalid
+ # // comment lines, we simply drop the line in the header after building it.
+ # Also, mc.exe apparently doesn't always write #define lines in
+ # deterministic order, so manually sort each block of #defines.
+ if header_dir:
+ header_file = os.path.join(
+ header_dir, os.path.splitext(os.path.basename(input_file))[0] + '.h')
+ header_contents = []
+ with open(header_file, 'rb') as f:
+ define_block = [] # The current contiguous block of #defines.
+ for line in f.readlines():
+ if line.startswith('//') and '?' in line:
+ continue
+ if line.startswith('#define '):
+ define_block.append(line)
+ continue
+ # On the first non-#define line, emit the sorted preceding #define
+ # block.
+ header_contents += sorted(define_block, key=lambda s: s.split()[-1])
+ define_block = []
+ header_contents.append(line)
+ # If the .h file ends with a #define block, flush the final block.
+ header_contents += sorted(define_block, key=lambda s: s.split()[-1])
+ with open(header_file, 'wb') as f:
+ f.write(''.join(header_contents))
+
+ # mc.exe invocation and post-processing are complete, now compare the output
+ # in tmp_dir to the checked-in outputs.
+ diff = filecmp.dircmp(tmp_dir, source)
+ if diff.diff_files or set(diff.left_list) != set(diff.right_list):
+ print('mc.exe output different from files in %s, see %s' % (source,
+ tmp_dir))
+ diff.report()
+ for f in diff.diff_files:
+ if f.endswith('.bin'): continue
+ fromfile = os.path.join(source, f)
+ tofile = os.path.join(tmp_dir, f)
+ print(''.join(
+ difflib.unified_diff(
+ open(fromfile, 'U').readlines(),
+ open(tofile, 'U').readlines(), fromfile, tofile)))
+ delete_tmp_dir = False
+ sys.exit(1)
+ except subprocess.CalledProcessError as e:
+ print(e.output)
+ sys.exit(e.returncode)
+ finally:
+ if os.path.exists(tmp_dir) and delete_tmp_dir:
+ shutil.rmtree(tmp_dir)
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/win/reorder-imports.py b/deps/v8/build/win/reorder-imports.py
new file mode 100755
index 0000000000..ee27ed19cc
--- /dev/null
+++ b/deps/v8/build/win/reorder-imports.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import glob
+import optparse
+import os
+import shutil
+import subprocess
+import sys
+
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..',
+ 'third_party', 'pefile'))
+import pefile
+
+def reorder_imports(input_dir, output_dir, architecture):
+ """Swap chrome_elf.dll to be the first import of chrome.exe.
+ Also copy over any related files that might be needed
+ (pdbs, manifests etc.).
+ """
+ # TODO(thakis): See if there is a reliable way to write the
+ # correct executable in the first place, so that this script
+ # only needs to verify that and not write a whole new exe.
+
+ input_image = os.path.join(input_dir, 'chrome.exe')
+ output_image = os.path.join(output_dir, 'chrome.exe')
+
+ # pefile mmap()s the whole executable, and then parses parts of
+ # it into python data structures for ease of processing.
+ # To write the file again, only the mmap'd data is written back,
+ # so modifying the parsed python objects generally has no effect.
+ # However, parsed raw data ends up in pe.Structure instances,
+ # and these all get serialized back when the file gets written.
+ # So things that are in a Structure must have their data set
+ # through the Structure, while other data must bet set through
+ # the set_bytes_*() methods.
+ pe = pefile.PE(input_image, fast_load=True)
+ if architecture == 'x64' or architecture == 'arm64':
+ assert pe.PE_TYPE == pefile.OPTIONAL_HEADER_MAGIC_PE_PLUS
+ else:
+ assert pe.PE_TYPE == pefile.OPTIONAL_HEADER_MAGIC_PE
+
+ pe.parse_data_directories(directories=[
+ pefile.DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_IMPORT']])
+
+ found_elf = False
+ for i, peimport in enumerate(pe.DIRECTORY_ENTRY_IMPORT):
+ if peimport.dll.lower() == 'chrome_elf.dll':
+ assert not found_elf, 'only one chrome_elf.dll import expected'
+ found_elf = True
+ if i > 0:
+ swap = pe.DIRECTORY_ENTRY_IMPORT[0]
+
+ # Morally we want to swap peimport.struct and swap.struct here,
+ # but the pe module doesn't expose a public method on Structure
+ # to get all data of a Structure without explicitly listing all
+ # field names.
+ # NB: OriginalFirstThunk and Characteristics are an union both at
+ # offset 0, handling just one of them is enough.
+ peimport.struct.OriginalFirstThunk, swap.struct.OriginalFirstThunk = \
+ swap.struct.OriginalFirstThunk, peimport.struct.OriginalFirstThunk
+ peimport.struct.TimeDateStamp, swap.struct.TimeDateStamp = \
+ swap.struct.TimeDateStamp, peimport.struct.TimeDateStamp
+ peimport.struct.ForwarderChain, swap.struct.ForwarderChain = \
+ swap.struct.ForwarderChain, peimport.struct.ForwarderChain
+ peimport.struct.Name, swap.struct.Name = \
+ swap.struct.Name, peimport.struct.Name
+ peimport.struct.FirstThunk, swap.struct.FirstThunk = \
+ swap.struct.FirstThunk, peimport.struct.FirstThunk
+ assert found_elf, 'chrome_elf.dll import not found'
+
+ pe.write(filename=output_image)
+
+ for fname in glob.iglob(os.path.join(input_dir, 'chrome.exe.*')):
+ shutil.copy(fname, os.path.join(output_dir, os.path.basename(fname)))
+ return 0
+
+
+def main(argv):
+ usage = 'reorder_imports.py -i <input_dir> -o <output_dir> -a <target_arch>'
+ parser = optparse.OptionParser(usage=usage)
+ parser.add_option('-i', '--input', help='reorder chrome.exe in DIR',
+ metavar='DIR')
+ parser.add_option('-o', '--output', help='write new chrome.exe to DIR',
+ metavar='DIR')
+ parser.add_option('-a', '--arch', help='architecture of build (optional)',
+ default='ia32')
+ opts, args = parser.parse_args()
+
+ if not opts.input or not opts.output:
+ parser.error('Please provide and input and output directory')
+ return reorder_imports(opts.input, opts.output, opts.arch)
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/win/require_administrator.manifest b/deps/v8/build/win/require_administrator.manifest
new file mode 100644
index 0000000000..4142e73342
--- /dev/null
+++ b/deps/v8/build/win/require_administrator.manifest
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+ <security>
+ <requestedPrivileges>
+ <requestedExecutionLevel level="requireAdministrator" uiAccess="false"></requestedExecutionLevel>
+ </requestedPrivileges>
+ </security>
+</trustInfo></assembly>
diff --git a/deps/v8/build/win/use_ansi_codes.py b/deps/v8/build/win/use_ansi_codes.py
new file mode 100755
index 0000000000..5951c2ab4d
--- /dev/null
+++ b/deps/v8/build/win/use_ansi_codes.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Prints if the the terminal is likely to understand ANSI codes."""
+
+from __future__ import print_function
+
+import os
+
+# Add more terminals here as needed.
+print('ANSICON' in os.environ)
diff --git a/deps/v8/build/win_is_xtree_patched.py b/deps/v8/build/win_is_xtree_patched.py
new file mode 100755
index 0000000000..54aff46d20
--- /dev/null
+++ b/deps/v8/build/win_is_xtree_patched.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Determines if the VS xtree header has been patched to disable C4702."""
+
+from __future__ import print_function
+
+import os
+
+
+def IsPatched():
+ # TODO(scottmg): For now, just return if we're using the packaged toolchain
+ # script (because we know it's patched). Another case could be added here to
+ # query the active VS installation and actually check the contents of xtree.
+ # http://crbug.com/346399.
+ return int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1)) == 1
+
+
+def DoMain(_):
+ """Hook to be called from gyp without starting a separate python
+ interpreter."""
+ return "1" if IsPatched() else "0"
+
+
+if __name__ == '__main__':
+ print(DoMain([]))
diff --git a/deps/v8/build/write_build_date_header.py b/deps/v8/build/write_build_date_header.py
new file mode 100755
index 0000000000..77388288b3
--- /dev/null
+++ b/deps/v8/build/write_build_date_header.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Takes a timestamp and writes it in as readable text to a .h file."""
+
+import argparse
+import datetime
+import os
+import sys
+
+
+def main():
+ argument_parser = argparse.ArgumentParser()
+ argument_parser.add_argument('output_file', help='The file to write to')
+ argument_parser.add_argument('timestamp')
+ args = argument_parser.parse_args()
+
+ date = datetime.datetime.utcfromtimestamp(int(args.timestamp))
+ output = ('// Generated by //build/write_build_date_header.py\n'
+ '#ifndef BUILD_DATE\n'
+ '#define BUILD_DATE "{:%b %d %Y %H:%M:%S}"\n'
+ '#endif // BUILD_DATE\n'.format(date))
+
+ current_contents = ''
+ if os.path.isfile(args.output_file):
+ with open(args.output_file, 'r') as current_file:
+ current_contents = current_file.read()
+
+ if current_contents != output:
+ with open(args.output_file, 'w') as output_file:
+ output_file.write(output)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/write_buildflag_header.py b/deps/v8/build/write_buildflag_header.py
new file mode 100755
index 0000000000..d46cfc89a9
--- /dev/null
+++ b/deps/v8/build/write_buildflag_header.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This writes headers for build flags. See buildflag_header.gni for usage of
+# this system as a whole.
+#
+# The parameters are passed in a response file so we don't have to worry
+# about command line lengths. The name of the response file is passed on the
+# command line.
+#
+# The format of the response file is:
+# [--flags <list of one or more flag values>]
+
+import optparse
+import os
+import shlex
+import sys
+
+
+class Options:
+ def __init__(self, output, rulename, header_guard, flags):
+ self.output = output
+ self.rulename = rulename
+ self.header_guard = header_guard
+ self.flags = flags
+
+
+def GetOptions():
+ parser = optparse.OptionParser()
+ parser.add_option('--output', help="Output header name inside --gen-dir.")
+ parser.add_option('--rulename',
+ help="Helpful name of build rule for including in the " +
+ "comment at the top of the file.")
+ parser.add_option('--gen-dir',
+ help="Path to root of generated file directory tree.")
+ parser.add_option('--definitions',
+ help="Name of the response file containing the flags.")
+ cmdline_options, cmdline_flags = parser.parse_args()
+
+ # Compute header guard by replacing some chars with _ and upper-casing.
+ header_guard = cmdline_options.output.upper()
+ header_guard = \
+ header_guard.replace('/', '_').replace('\\', '_').replace('.', '_')
+ header_guard += '_'
+
+ # The actual output file is inside the gen dir.
+ output = os.path.join(cmdline_options.gen_dir, cmdline_options.output)
+
+ # Definition file in GYP is newline separated, in GN they are shell formatted.
+ # shlex can parse both of these.
+ with open(cmdline_options.definitions, 'r') as def_file:
+ defs = shlex.split(def_file.read())
+ flags_index = defs.index('--flags')
+
+ # Everything after --flags are flags. true/false are remapped to 1/0,
+ # everything else is passed through.
+ flags = []
+ for flag in defs[flags_index + 1 :]:
+ equals_index = flag.index('=')
+ key = flag[:equals_index]
+ value = flag[equals_index + 1:]
+
+ # Canonicalize and validate the value.
+ if value == 'true':
+ value = '1'
+ elif value == 'false':
+ value = '0'
+ flags.append((key, str(value)))
+
+ return Options(output=output,
+ rulename=cmdline_options.rulename,
+ header_guard=header_guard,
+ flags=flags)
+
+
+def WriteHeader(options):
+ with open(options.output, 'w') as output_file:
+ output_file.write("// Generated by build/write_buildflag_header.py\n")
+ if options.rulename:
+ output_file.write('// From "' + options.rulename + '"\n')
+
+ output_file.write('\n#ifndef %s\n' % options.header_guard)
+ output_file.write('#define %s\n\n' % options.header_guard)
+ output_file.write('#include "build/buildflag.h"\n\n')
+
+ for pair in options.flags:
+ output_file.write('#define BUILDFLAG_INTERNAL_%s() (%s)\n' % pair)
+
+ output_file.write('\n#endif // %s\n' % options.header_guard)
+
+
+options = GetOptions()
+WriteHeader(options)