summaryrefslogtreecommitdiff
path: root/deps/v8/build/android
diff options
context:
space:
mode:
authorFlorian Dold <florian.dold@gmail.com>2019-08-07 22:45:47 +0200
committerFlorian Dold <florian.dold@gmail.com>2019-08-07 22:45:47 +0200
commit65e39b7046a29aa299f06285441b62bcf1e4df01 (patch)
tree2eb012aabb59533b954aa169199733292de336cf /deps/v8/build/android
parent936cd90b7def6ef7c1e0b80265a9dc77a9ad23c6 (diff)
downloadandroid-node-v8-65e39b7046a29aa299f06285441b62bcf1e4df01.tar.gz
android-node-v8-65e39b7046a29aa299f06285441b62bcf1e4df01.tar.bz2
android-node-v8-65e39b7046a29aa299f06285441b62bcf1e4df01.zip
Move v8/build into this repository.
Since we need to patch some files, we don't let depot_tools manage these files anymore. build.git commit a0b2e3b2708bcf81ec00ac1738b586bcc5e04eea
Diffstat (limited to 'deps/v8/build/android')
-rw-r--r--deps/v8/build/android/.style.yapf6
-rw-r--r--deps/v8/build/android/AndroidManifest.xml20
-rw-r--r--deps/v8/build/android/BUILD.gn135
-rw-r--r--deps/v8/build/android/CheckInstallApk-debug.apkbin0 -> 37106 bytes
-rw-r--r--deps/v8/build/android/OWNERS7
-rw-r--r--deps/v8/build/android/PRESUBMIT.py97
-rwxr-xr-xdeps/v8/build/android/adb_chrome_public_command_line16
-rwxr-xr-xdeps/v8/build/android/adb_command_line.py93
-rwxr-xr-xdeps/v8/build/android/adb_gdb1000
-rwxr-xr-xdeps/v8/build/android/adb_install_apk.py132
-rwxr-xr-xdeps/v8/build/android/adb_logcat_monitor.py156
-rwxr-xr-xdeps/v8/build/android/adb_logcat_printer.py222
-rwxr-xr-xdeps/v8/build/android/adb_profile_chrome9
-rwxr-xr-xdeps/v8/build/android/adb_profile_chrome_startup9
-rwxr-xr-xdeps/v8/build/android/adb_reverse_forwarder.py90
-rwxr-xr-xdeps/v8/build/android/adb_system_webview_command_line16
-rw-r--r--deps/v8/build/android/android_only_explicit_jni_exports.lst13
-rw-r--r--deps/v8/build/android/android_only_jni_exports.lst13
-rwxr-xr-xdeps/v8/build/android/apk_operations.py1678
-rwxr-xr-xdeps/v8/build/android/apply_shared_preference_file.py50
-rwxr-xr-xdeps/v8/build/android/asan_symbolize.py138
-rw-r--r--deps/v8/build/android/binary_size/OWNERS4
-rw-r--r--deps/v8/build/android/binary_size/__init__.py3
-rwxr-xr-xdeps/v8/build/android/binary_size/apk_downloader.py138
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/56/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/57/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/57/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/58/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/58/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/58/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/59/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/59/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/59/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/60/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/60/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/60/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/61/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/61/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/61/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/62/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/62/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/62/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/63/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/63/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/63/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/64/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/64/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/64/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/65/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/65/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/65/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/66/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/66/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/66/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/67/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/67/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_Builder/67/MonochromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/56/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/57/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/57/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/58/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/58/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/59/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/59/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/60/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/60/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/61/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/61/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/62/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/62/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/63/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/63/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/64/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/64/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/65/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/65/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/66/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/66/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/67/ChromeModernPublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/Android_arm64_Builder/67/ChromePublic.apk.sha11
-rw-r--r--deps/v8/build/android/binary_size/apks/README.md45
-rw-r--r--deps/v8/build/android/buildhooks/BUILD.gn58
-rw-r--r--deps/v8/build/android/buildhooks/OWNERS4
-rw-r--r--deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooks.java50
-rw-r--r--deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooksAndroid.java107
-rw-r--r--deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooksAndroidImpl.java54
-rw-r--r--deps/v8/build/android/buildhooks/java/templates/BuildHooksConfig.template16
-rw-r--r--deps/v8/build/android/buildhooks/proguard/build_hooks_android_impl.flags5
-rw-r--r--deps/v8/build/android/bytecode/BUILD.gn27
-rw-r--r--deps/v8/build/android/bytecode/OWNERS4
-rw-r--r--deps/v8/build/android/bytecode/java/org/chromium/bytecode/AssertionEnablerClassAdapter.java109
-rw-r--r--deps/v8/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java293
-rw-r--r--deps/v8/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java167
-rw-r--r--deps/v8/build/android/bytecode/java/org/chromium/bytecode/CustomClassLoaderClassWriter.java51
-rw-r--r--deps/v8/build/android/bytecode/java/org/chromium/bytecode/CustomResourcesClassAdapter.java302
-rw-r--r--deps/v8/build/android/bytecode/java/org/chromium/bytecode/SplitCompatClassAdapter.java149
-rw-r--r--deps/v8/build/android/bytecode/java/org/chromium/bytecode/ThreadAssertionClassAdapter.java83
-rw-r--r--deps/v8/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java87
-rw-r--r--deps/v8/build/android/chromium-debug.keystorebin0 -> 2223 bytes
-rwxr-xr-xdeps/v8/build/android/convert_dex_profile.py557
-rw-r--r--deps/v8/build/android/convert_dex_profile_tests.py276
-rw-r--r--deps/v8/build/android/devil_chromium.json130
-rw-r--r--deps/v8/build/android/devil_chromium.py170
-rw-r--r--deps/v8/build/android/devil_chromium.pydeps38
-rwxr-xr-xdeps/v8/build/android/diff_resource_sizes.py198
-rw-r--r--deps/v8/build/android/docs/README.md11
-rw-r--r--deps/v8/build/android/docs/android_app_bundles.md210
-rw-r--r--deps/v8/build/android/docs/build_config.md170
-rw-r--r--deps/v8/build/android/docs/coverage.md56
-rw-r--r--deps/v8/build/android/docs/life_of_a_resource.md233
-rw-r--r--deps/v8/build/android/docs/lint.md91
-rwxr-xr-xdeps/v8/build/android/download_doclava.py31
-rwxr-xr-xdeps/v8/build/android/dump_apk_resource_strings.py662
-rwxr-xr-xdeps/v8/build/android/emma_coverage_stats.py479
-rwxr-xr-xdeps/v8/build/android/emma_coverage_stats_test.py563
-rw-r--r--deps/v8/build/android/empty/.keep2
-rw-r--r--deps/v8/build/android/empty_proguard.flags1
-rwxr-xr-xdeps/v8/build/android/envsetup.sh29
-rwxr-xr-xdeps/v8/build/android/generate_emma_html.py115
-rw-r--r--deps/v8/build/android/gradle/AndroidManifest.xml14
-rw-r--r--deps/v8/build/android/gradle/OWNERS4
-rw-r--r--deps/v8/build/android/gradle/android.jinja114
-rw-r--r--deps/v8/build/android/gradle/cmake.jinja26
-rw-r--r--deps/v8/build/android/gradle/dependencies.jinja28
-rwxr-xr-xdeps/v8/build/android/gradle/generate_gradle.py974
-rwxr-xr-xdeps/v8/build/android/gradle/gn_to_cmake.py687
-rw-r--r--deps/v8/build/android/gradle/java.jinja41
-rw-r--r--deps/v8/build/android/gradle/manifest.jinja7
-rw-r--r--deps/v8/build/android/gradle/root.jinja20
-rw-r--r--deps/v8/build/android/gyp/OWNERS6
-rwxr-xr-xdeps/v8/build/android/gyp/aar.py166
-rw-r--r--deps/v8/build/android/gyp/aar.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/aidl.py58
-rw-r--r--deps/v8/build/android/gyp/aidl.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/apkbuilder.py377
-rw-r--r--deps/v8/build/android/gyp/apkbuilder.pydeps8
-rwxr-xr-xdeps/v8/build/android/gyp/assert_static_initializers.py160
-rw-r--r--deps/v8/build/android/gyp/assert_static_initializers.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/bundletool.py32
-rwxr-xr-xdeps/v8/build/android/gyp/bytecode_processor.py76
-rw-r--r--deps/v8/build/android/gyp/bytecode_processor.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/compile_resources.py916
-rw-r--r--deps/v8/build/android/gyp/compile_resources.pydeps29
-rwxr-xr-xdeps/v8/build/android/gyp/copy_ex.py128
-rw-r--r--deps/v8/build/android/gyp/copy_ex.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/create_apk_operations_script.py84
-rw-r--r--deps/v8/build/android/gyp/create_apk_operations_script.pydeps3
-rwxr-xr-xdeps/v8/build/android/gyp/create_app_bundle.py377
-rw-r--r--deps/v8/build/android/gyp/create_app_bundle.pydeps30
-rwxr-xr-xdeps/v8/build/android/gyp/create_app_bundle_minimal_apks.py46
-rw-r--r--deps/v8/build/android/gyp/create_app_bundle_minimal_apks.pydeps33
-rwxr-xr-xdeps/v8/build/android/gyp/create_bundle_wrapper_script.py115
-rw-r--r--deps/v8/build/android/gyp/create_bundle_wrapper_script.pydeps102
-rwxr-xr-xdeps/v8/build/android/gyp/create_java_binary_script.py112
-rw-r--r--deps/v8/build/android/gyp/create_java_binary_script.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/create_size_info_files.py167
-rw-r--r--deps/v8/build/android/gyp/create_size_info_files.pydeps8
-rwxr-xr-xdeps/v8/build/android/gyp/create_stack_script.py80
-rw-r--r--deps/v8/build/android/gyp/create_stack_script.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/create_tool_wrapper.py46
-rw-r--r--deps/v8/build/android/gyp/create_tool_wrapper.pydeps3
-rwxr-xr-xdeps/v8/build/android/gyp/create_ui_locale_resources.py91
-rwxr-xr-xdeps/v8/build/android/gyp/desugar.py60
-rw-r--r--deps/v8/build/android/gyp/desugar.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/dex.py388
-rw-r--r--deps/v8/build/android/gyp/dex.pydeps8
-rwxr-xr-xdeps/v8/build/android/gyp/dexsplitter.py133
-rw-r--r--deps/v8/build/android/gyp/dexsplitter.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/dist_aar.py132
-rw-r--r--deps/v8/build/android/gyp/dist_aar.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/emma_instr.py271
-rw-r--r--deps/v8/build/android/gyp/emma_instr.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/extract_unwind_tables.py288
-rwxr-xr-xdeps/v8/build/android/gyp/extract_unwind_tables_tests.py121
-rwxr-xr-xdeps/v8/build/android/gyp/filter_zip.py71
-rw-r--r--deps/v8/build/android/gyp/filter_zip.pydeps7
-rw-r--r--deps/v8/build/android/gyp/finalize_apk.py32
-rwxr-xr-xdeps/v8/build/android/gyp/find.py30
-rwxr-xr-xdeps/v8/build/android/gyp/find_sun_tools_jar.py54
-rwxr-xr-xdeps/v8/build/android/gyp/gcc_preprocess.py54
-rw-r--r--deps/v8/build/android/gyp/gcc_preprocess.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/generate_android_wrapper.py42
-rwxr-xr-xdeps/v8/build/android/gyp/generate_linker_version_script.py72
-rw-r--r--deps/v8/build/android/gyp/generate_linker_version_script.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/generate_resource_rewriter.py109
-rwxr-xr-xdeps/v8/build/android/gyp/generate_v14_compatible_resources.py281
-rwxr-xr-xdeps/v8/build/android/gyp/ijar.py24
-rw-r--r--deps/v8/build/android/gyp/ijar.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/jar.py93
-rwxr-xr-xdeps/v8/build/android/gyp/java_cpp_enum.py435
-rw-r--r--deps/v8/build/android/gyp/java_cpp_enum.pydeps8
-rwxr-xr-xdeps/v8/build/android/gyp/java_cpp_enum_tests.py747
-rwxr-xr-xdeps/v8/build/android/gyp/java_cpp_strings.py213
-rw-r--r--deps/v8/build/android/gyp/java_cpp_strings.pydeps8
-rwxr-xr-xdeps/v8/build/android/gyp/java_cpp_strings_tests.py105
-rwxr-xr-xdeps/v8/build/android/gyp/java_google_api_keys.py123
-rwxr-xr-xdeps/v8/build/android/gyp/java_google_api_keys_tests.py42
-rwxr-xr-xdeps/v8/build/android/gyp/javac.py595
-rw-r--r--deps/v8/build/android/gyp/javac.pydeps15
-rwxr-xr-xdeps/v8/build/android/gyp/jinja_template.py160
-rw-r--r--deps/v8/build/android/gyp/jinja_template.pydeps41
-rwxr-xr-xdeps/v8/build/android/gyp/lint.py399
-rw-r--r--deps/v8/build/android/gyp/lint.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/main_dex_list.py174
-rw-r--r--deps/v8/build/android/gyp/main_dex_list.pydeps8
-rwxr-xr-xdeps/v8/build/android/gyp/merge_manifest.py174
-rw-r--r--deps/v8/build/android/gyp/merge_manifest.pydeps8
-rwxr-xr-xdeps/v8/build/android/gyp/prepare_resources.py324
-rw-r--r--deps/v8/build/android/gyp/prepare_resources.pydeps30
-rwxr-xr-xdeps/v8/build/android/gyp/proguard.py290
-rw-r--r--deps/v8/build/android/gyp/proguard.pydeps9
-rw-r--r--deps/v8/build/android/gyp/test/BUILD.gn13
-rw-r--r--deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java15
-rw-r--r--deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java12
-rw-r--r--deps/v8/build/android/gyp/util/__init__.py3
-rw-r--r--deps/v8/build/android/gyp/util/build_utils.py650
-rwxr-xr-xdeps/v8/build/android/gyp/util/build_utils_test.py48
-rwxr-xr-xdeps/v8/build/android/gyp/util/diff_utils.py44
-rw-r--r--deps/v8/build/android/gyp/util/jar_info_utils.py51
-rwxr-xr-xdeps/v8/build/android/gyp/util/java_cpp_utils.py32
-rw-r--r--deps/v8/build/android/gyp/util/md5_check.py420
-rwxr-xr-xdeps/v8/build/android/gyp/util/md5_check_test.py151
-rw-r--r--deps/v8/build/android/gyp/util/proguard_util.py236
-rw-r--r--deps/v8/build/android/gyp/util/resource_utils.py834
-rwxr-xr-xdeps/v8/build/android/gyp/util/resource_utils_test.py268
-rwxr-xr-xdeps/v8/build/android/gyp/write_build_config.py1643
-rw-r--r--deps/v8/build/android/gyp/write_build_config.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/write_ordered_libraries.py117
-rw-r--r--deps/v8/build/android/gyp/write_ordered_libraries.pydeps7
-rwxr-xr-xdeps/v8/build/android/gyp/zip.py71
-rw-r--r--deps/v8/build/android/gyp/zip.pydeps7
-rwxr-xr-xdeps/v8/build/android/host_heartbeat.py36
-rw-r--r--deps/v8/build/android/incremental_install/BUILD.gn20
-rw-r--r--deps/v8/build/android/incremental_install/README.md81
-rw-r--r--deps/v8/build/android/incremental_install/__init__.py3
-rwxr-xr-xdeps/v8/build/android/incremental_install/generate_android_manifest.py139
-rw-r--r--deps/v8/build/android/incremental_install/generate_android_manifest.pydeps29
-rwxr-xr-xdeps/v8/build/android/incremental_install/installer.py303
-rw-r--r--deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java288
-rw-r--r--deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java25
-rw-r--r--deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java291
-rw-r--r--deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java129
-rw-r--r--deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java142
-rw-r--r--deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java12
-rwxr-xr-xdeps/v8/build/android/incremental_install/write_installer_json.py80
-rw-r--r--deps/v8/build/android/incremental_install/write_installer_json.pydeps7
-rwxr-xr-xdeps/v8/build/android/lighttpd_server.py258
-rw-r--r--deps/v8/build/android/lint/OWNERS2
-rwxr-xr-xdeps/v8/build/android/lint/suppress.py138
-rw-r--r--deps/v8/build/android/lint/suppressions.xml404
-rwxr-xr-xdeps/v8/build/android/list_class_verification_failures.py282
-rw-r--r--deps/v8/build/android/list_class_verification_failures_test.py233
-rw-r--r--deps/v8/build/android/main_dex_classes.flags61
-rwxr-xr-xdeps/v8/build/android/method_count.py116
-rw-r--r--deps/v8/build/android/multidex.flags8
-rw-r--r--deps/v8/build/android/play_services/__init__.py3
-rwxr-xr-xdeps/v8/build/android/play_services/preprocess.py244
-rw-r--r--deps/v8/build/android/play_services/utils.py144
-rwxr-xr-xdeps/v8/build/android/provision_devices.py561
-rw-r--r--deps/v8/build/android/pylib/OWNERS6
-rw-r--r--deps/v8/build/android/pylib/__init__.py31
-rw-r--r--deps/v8/build/android/pylib/android/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/android/logcat_symbolizer.py98
-rw-r--r--deps/v8/build/android/pylib/base/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/base/base_test_result.py262
-rw-r--r--deps/v8/build/android/pylib/base/base_test_result_unittest.py82
-rw-r--r--deps/v8/build/android/pylib/base/environment.py49
-rw-r--r--deps/v8/build/android/pylib/base/environment_factory.py19
-rw-r--r--deps/v8/build/android/pylib/base/mock_environment.py12
-rw-r--r--deps/v8/build/android/pylib/base/mock_test_instance.py12
-rw-r--r--deps/v8/build/android/pylib/base/output_manager.py158
-rw-r--r--deps/v8/build/android/pylib/base/output_manager_factory.py16
-rw-r--r--deps/v8/build/android/pylib/base/output_manager_test_case.py14
-rw-r--r--deps/v8/build/android/pylib/base/test_collection.py80
-rw-r--r--deps/v8/build/android/pylib/base/test_exception.py8
-rw-r--r--deps/v8/build/android/pylib/base/test_instance.py40
-rw-r--r--deps/v8/build/android/pylib/base/test_instance_factory.py31
-rw-r--r--deps/v8/build/android/pylib/base/test_run.py50
-rw-r--r--deps/v8/build/android/pylib/base/test_run_factory.py56
-rw-r--r--deps/v8/build/android/pylib/base/test_server.py18
-rw-r--r--deps/v8/build/android/pylib/constants/__init__.py274
-rw-r--r--deps/v8/build/android/pylib/constants/host_paths.py95
-rwxr-xr-xdeps/v8/build/android/pylib/constants/host_paths_unittest.py50
-rw-r--r--deps/v8/build/android/pylib/content_settings.py80
-rw-r--r--deps/v8/build/android/pylib/device/__init__.py0
-rw-r--r--deps/v8/build/android/pylib/device/commands/BUILD.gn20
-rw-r--r--deps/v8/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java95
-rw-r--r--deps/v8/build/android/pylib/device_settings.py199
-rw-r--r--deps/v8/build/android/pylib/gtest/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/gtest/filter/OWNERS1
-rw-r--r--deps/v8/build/android/pylib/gtest/filter/base_unittests_disabled25
-rw-r--r--deps/v8/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled10
-rw-r--r--deps/v8/build/android/pylib/gtest/filter/breakpad_unittests_disabled9
-rw-r--r--deps/v8/build/android/pylib/gtest/filter/content_browsertests_disabled50
-rw-r--r--deps/v8/build/android/pylib/gtest/filter/unit_tests_disabled80
-rw-r--r--deps/v8/build/android/pylib/gtest/gtest_config.py57
-rw-r--r--deps/v8/build/android/pylib/gtest/gtest_test_instance.py530
-rwxr-xr-xdeps/v8/build/android/pylib/gtest/gtest_test_instance_test.py217
-rw-r--r--deps/v8/build/android/pylib/instrumentation/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/instrumentation/instrumentation_parser.py105
-rwxr-xr-xdeps/v8/build/android/pylib/instrumentation/instrumentation_parser_test.py134
-rw-r--r--deps/v8/build/android/pylib/instrumentation/instrumentation_test_instance.py944
-rwxr-xr-xdeps/v8/build/android/pylib/instrumentation/instrumentation_test_instance_test.py972
-rw-r--r--deps/v8/build/android/pylib/instrumentation/json_perf_parser.py161
-rw-r--r--deps/v8/build/android/pylib/instrumentation/render_test.html.jinja40
-rw-r--r--deps/v8/build/android/pylib/instrumentation/test_result.py30
-rw-r--r--deps/v8/build/android/pylib/junit/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/junit/junit_test_instance.py80
-rw-r--r--deps/v8/build/android/pylib/linker/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/linker/linker_test_instance.py51
-rw-r--r--deps/v8/build/android/pylib/linker/test_case.py215
-rw-r--r--deps/v8/build/android/pylib/local/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/local/device/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/local/device/local_device_environment.py300
-rw-r--r--deps/v8/build/android/pylib/local/device/local_device_gtest_run.py635
-rw-r--r--deps/v8/build/android/pylib/local/device/local_device_instrumentation_test_run.py965
-rwxr-xr-xdeps/v8/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py69
-rw-r--r--deps/v8/build/android/pylib/local/device/local_device_linker_test_run.py75
-rw-r--r--deps/v8/build/android/pylib/local/device/local_device_monkey_test_run.py126
-rw-r--r--deps/v8/build/android/pylib/local/device/local_device_perf_test_run.py538
-rw-r--r--deps/v8/build/android/pylib/local/device/local_device_test_run.py251
-rwxr-xr-xdeps/v8/build/android/pylib/local/device/local_device_test_run_test.py174
-rw-r--r--deps/v8/build/android/pylib/local/local_test_server_spawner.py100
-rw-r--r--deps/v8/build/android/pylib/local/machine/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/local/machine/local_machine_environment.py24
-rw-r--r--deps/v8/build/android/pylib/local/machine/local_machine_junit_test_run.py136
-rw-r--r--deps/v8/build/android/pylib/monkey/__init__.py0
-rw-r--r--deps/v8/build/android/pylib/monkey/monkey_test_instance.py72
-rw-r--r--deps/v8/build/android/pylib/output/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/output/local_output_manager.py45
-rwxr-xr-xdeps/v8/build/android/pylib/output/local_output_manager_test.py34
-rw-r--r--deps/v8/build/android/pylib/output/noop_output_manager.py42
-rwxr-xr-xdeps/v8/build/android/pylib/output/noop_output_manager_test.py27
-rw-r--r--deps/v8/build/android/pylib/output/remote_output_manager.py89
-rwxr-xr-xdeps/v8/build/android/pylib/output/remote_output_manager_test.py34
-rw-r--r--deps/v8/build/android/pylib/perf/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/perf/perf_test_instance.py239
-rw-r--r--deps/v8/build/android/pylib/pexpect.py21
-rwxr-xr-xdeps/v8/build/android/pylib/restart_adbd.sh20
-rw-r--r--deps/v8/build/android/pylib/results/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/results/flakiness_dashboard/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/results/flakiness_dashboard/json_results_generator.py699
-rw-r--r--deps/v8/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py213
-rw-r--r--deps/v8/build/android/pylib/results/flakiness_dashboard/results_uploader.py176
-rw-r--r--deps/v8/build/android/pylib/results/json_results.py154
-rwxr-xr-xdeps/v8/build/android/pylib/results/json_results_test.py207
-rw-r--r--deps/v8/build/android/pylib/results/presentation/__init__.py3
-rw-r--r--deps/v8/build/android/pylib/results/presentation/javascript/main_html.js214
-rwxr-xr-xdeps/v8/build/android/pylib/results/presentation/standard_gtest_merge.py168
-rw-r--r--deps/v8/build/android/pylib/results/presentation/template/main.html97
-rw-r--r--deps/v8/build/android/pylib/results/presentation/template/table.html60
-rwxr-xr-xdeps/v8/build/android/pylib/results/presentation/test_results_presentation.py543
-rw-r--r--deps/v8/build/android/pylib/results/report_results.py131
-rw-r--r--deps/v8/build/android/pylib/symbols/__init__.py0
-rwxr-xr-xdeps/v8/build/android/pylib/symbols/apk_lib_dump.py59
-rw-r--r--deps/v8/build/android/pylib/symbols/apk_native_libs.py419
-rw-r--r--deps/v8/build/android/pylib/symbols/apk_native_libs_unittest.py396
-rw-r--r--deps/v8/build/android/pylib/symbols/deobfuscator.py165
-rw-r--r--deps/v8/build/android/pylib/symbols/elf_symbolizer.py487
-rwxr-xr-xdeps/v8/build/android/pylib/symbols/elf_symbolizer_unittest.py196
-rw-r--r--deps/v8/build/android/pylib/symbols/mock_addr2line/__init__.py0
-rwxr-xr-xdeps/v8/build/android/pylib/symbols/mock_addr2line/mock_addr2line79
-rw-r--r--deps/v8/build/android/pylib/symbols/stack_symbolizer.py81
-rw-r--r--deps/v8/build/android/pylib/symbols/symbol_utils.py812
-rw-r--r--deps/v8/build/android/pylib/symbols/symbol_utils_unittest.py943
-rw-r--r--deps/v8/build/android/pylib/utils/__init__.py0
-rw-r--r--deps/v8/build/android/pylib/utils/app_bundle_utils.py140
-rw-r--r--deps/v8/build/android/pylib/utils/argparse_utils.py50
-rw-r--r--deps/v8/build/android/pylib/utils/decorators.py37
-rwxr-xr-xdeps/v8/build/android/pylib/utils/decorators_test.py104
-rw-r--r--deps/v8/build/android/pylib/utils/device_dependencies.py117
-rwxr-xr-xdeps/v8/build/android/pylib/utils/device_dependencies_test.py56
-rw-r--r--deps/v8/build/android/pylib/utils/dexdump.py115
-rwxr-xr-xdeps/v8/build/android/pylib/utils/dexdump_test.py141
-rw-r--r--deps/v8/build/android/pylib/utils/google_storage_helper.py126
-rw-r--r--deps/v8/build/android/pylib/utils/instrumentation_tracing.py204
-rw-r--r--deps/v8/build/android/pylib/utils/logdog_helper.py94
-rw-r--r--deps/v8/build/android/pylib/utils/logging_utils.py136
-rwxr-xr-xdeps/v8/build/android/pylib/utils/maven_downloader.py137
-rw-r--r--deps/v8/build/android/pylib/utils/proguard.py288
-rwxr-xr-xdeps/v8/build/android/pylib/utils/proguard_test.py495
-rw-r--r--deps/v8/build/android/pylib/utils/repo_utils.py16
-rw-r--r--deps/v8/build/android/pylib/utils/shared_preference_utils.py95
-rw-r--r--deps/v8/build/android/pylib/utils/simpleperf.py259
-rw-r--r--deps/v8/build/android/pylib/utils/test_filter.py139
-rwxr-xr-xdeps/v8/build/android/pylib/utils/test_filter_test.py233
-rw-r--r--deps/v8/build/android/pylib/utils/time_profile.py45
-rw-r--r--deps/v8/build/android/pylib/utils/xvfb.py58
-rw-r--r--deps/v8/build/android/pylib/valgrind_tools.py129
-rw-r--r--deps/v8/build/android/pylintrc15
-rw-r--r--deps/v8/build/android/resource_sizes.gni39
-rwxr-xr-xdeps/v8/build/android/resource_sizes.py769
-rw-r--r--deps/v8/build/android/resource_sizes.pydeps63
-rwxr-xr-xdeps/v8/build/android/screenshot.py13
-rw-r--r--deps/v8/build/android/stacktrace/BUILD.gn17
-rw-r--r--deps/v8/build/android/stacktrace/README.md23
-rwxr-xr-xdeps/v8/build/android/stacktrace/crashpad_stackwalker.py173
-rw-r--r--deps/v8/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java116
-rwxr-xr-xdeps/v8/build/android/stacktrace/java_deobfuscate_test.py172
-rwxr-xr-xdeps/v8/build/android/stacktrace/stackwalker.py135
-rwxr-xr-xdeps/v8/build/android/test_runner.py1065
-rw-r--r--deps/v8/build/android/test_runner.pydeps210
-rwxr-xr-xdeps/v8/build/android/test_wrapper/logdog_wrapper.py136
-rw-r--r--deps/v8/build/android/test_wrapper/logdog_wrapper.pydeps12
-rw-r--r--deps/v8/build/android/tests/symbolize/Makefile11
-rw-r--r--deps/v8/build/android/tests/symbolize/a.cc14
-rw-r--r--deps/v8/build/android/tests/symbolize/b.cc14
-rwxr-xr-xdeps/v8/build/android/tombstones.py282
-rwxr-xr-xdeps/v8/build/android/update_deps/update_third_party_deps.py142
-rwxr-xr-xdeps/v8/build/android/update_verification.py115
-rwxr-xr-xdeps/v8/build/android/video_recorder.py13
411 files changed, 52539 insertions, 0 deletions
diff --git a/deps/v8/build/android/.style.yapf b/deps/v8/build/android/.style.yapf
new file mode 100644
index 0000000000..ef24bfc6b1
--- /dev/null
+++ b/deps/v8/build/android/.style.yapf
@@ -0,0 +1,6 @@
+[style]
+based_on_style = pep8
+column_limit = 80
+blank_line_before_nested_class_or_def = true
+blank_line_before_module_docstring = true
+indent_width = 2
diff --git a/deps/v8/build/android/AndroidManifest.xml b/deps/v8/build/android/AndroidManifest.xml
new file mode 100644
index 0000000000..fe21b80b4b
--- /dev/null
+++ b/deps/v8/build/android/AndroidManifest.xml
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ Copyright (c) 2012 The Chromium Authors. All rights reserved. Use of this
+ source code is governed by a BSD-style license that can be found in the
+ LICENSE file.
+-->
+
+<!--
+ This is a dummy manifest which is required by:
+ 1. aapt when generating R.java in java.gypi:
+ Nothing in the manifest is used, but it is still required by aapt.
+ 2. lint: [min|target]SdkVersion are required by lint and should
+ be kept up to date.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="org.dummy">
+
+ <uses-sdk android:minSdkVersion="19" android:targetSdkVersion="24" />
+
+</manifest>
diff --git a/deps/v8/build/android/BUILD.gn b/deps/v8/build/android/BUILD.gn
new file mode 100644
index 0000000000..f864430562
--- /dev/null
+++ b/deps/v8/build/android/BUILD.gn
@@ -0,0 +1,135 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/android/rules.gni")
+import("//build/config/python.gni")
+
+if (enable_java_templates) {
+ sun_tools_jar_path = "$root_gen_dir/sun_tools_jar/tools.jar"
+
+ # Create or update the API versions cache if necessary by running a
+ # functionally empty lint task. This prevents racy creation of the
+ # cache while linting java targets in android_lint.
+ android_lint("prepare_android_lint_cache") {
+ android_manifest = "//build/android/AndroidManifest.xml"
+ create_cache = true
+ }
+
+ action("find_sun_tools_jar") {
+ script = "//build/android/gyp/find_sun_tools_jar.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ outputs = [
+ sun_tools_jar_path,
+ ]
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--output",
+ rebase_path(sun_tools_jar_path, root_build_dir),
+ ]
+ }
+
+ java_prebuilt("sun_tools_java") {
+ jar_path = sun_tools_jar_path
+ deps = [
+ ":find_sun_tools_jar",
+ ]
+ }
+
+ # Write to a file some GN vars that are useful to scripts that use the output
+ # directory. Format is chosen as easliy importable by both python and bash.
+ _lines = [
+ "android_sdk_build_tools=" +
+ rebase_path(android_sdk_build_tools, root_build_dir),
+ "android_sdk_build_tools_version=$android_sdk_build_tools_version",
+ "android_sdk_tools_version_suffix=$android_sdk_tools_version_suffix",
+ "android_sdk_root=" + rebase_path(android_sdk_root, root_build_dir),
+ "android_sdk_version=$android_sdk_version",
+ "android_ndk_root=" + rebase_path(android_ndk_root, root_build_dir),
+ "android_tool_prefix=" + rebase_path(android_tool_prefix, root_build_dir),
+ ]
+ if (defined(android_secondary_abi_cpu)) {
+ _secondary_label_info =
+ get_label_info(":foo($android_secondary_abi_toolchain)", "root_out_dir")
+ _lines += [ "android_secondary_abi_toolchain=" +
+ rebase_path(_secondary_label_info, root_build_dir) ]
+ }
+ if (defined(build_apk_secondary_abi)) {
+ _lines += [ "build_apk_secondary_abi=$build_apk_secondary_abi" ]
+ }
+ write_file(android_build_vars, _lines)
+}
+
+python_library("devil_chromium_py") {
+ pydeps_file = "devil_chromium.pydeps"
+ data = [
+ "devil_chromium.py",
+ "devil_chromium.json",
+ "//third_party/catapult/third_party/gsutil/",
+ "//third_party/catapult/devil/devil/devil_dependencies.json",
+ ]
+}
+
+python_library("test_runner_py") {
+ pydeps_file = "test_runner.pydeps"
+ data = [
+ "pylib/gtest/filter/",
+ "pylib/instrumentation/render_test.html.jinja",
+ "test_wrapper/logdog_wrapper.py",
+ "${android_sdk_build_tools}/aapt",
+ "${android_sdk_build_tools}/dexdump",
+ "${android_sdk_build_tools}/lib64/libc++.so",
+ "${android_sdk_build_tools}/split-select",
+ "${android_sdk_root}/platform-tools/adb",
+ ]
+ data_deps = [
+ ":devil_chromium_py",
+ ]
+ if (is_asan) {
+ data_deps += [ "//tools/android/asan/third_party:asan_device_setup" ]
+ }
+
+ # Proguard is needed only when using apks (rather than native executables).
+ if (enable_java_templates) {
+ deps = [
+ "//third_party/proguard:proguard603_java",
+ ]
+ }
+}
+
+python_library("logdog_wrapper_py") {
+ pydeps_file = "test_wrapper/logdog_wrapper.pydeps"
+}
+
+python_library("resource_sizes_py") {
+ pydeps_file = "resource_sizes.pydeps"
+ data_deps = [
+ ":devil_chromium_py",
+ ]
+ data = [
+ android_build_vars,
+ android_readelf,
+ ]
+}
+
+# Create wrapper scripts in out/bin that takes care of setting the
+# --output-directory.
+_scripts_to_wrap = [
+ "asan_symbolize.py",
+ "tombstones.py",
+]
+
+_wrapper_targets = []
+foreach(script, _scripts_to_wrap) {
+ _target_name = get_path_info(script, "name") + "_wrapper"
+ _wrapper_targets += [ ":$_target_name" ]
+ wrapper_script(_target_name) {
+ target = script
+ }
+}
+
+group("wrapper_scripts") {
+ deps = _wrapper_targets
+}
diff --git a/deps/v8/build/android/CheckInstallApk-debug.apk b/deps/v8/build/android/CheckInstallApk-debug.apk
new file mode 100644
index 0000000000..3dc31910a5
--- /dev/null
+++ b/deps/v8/build/android/CheckInstallApk-debug.apk
Binary files differ
diff --git a/deps/v8/build/android/OWNERS b/deps/v8/build/android/OWNERS
new file mode 100644
index 0000000000..2feaebcf83
--- /dev/null
+++ b/deps/v8/build/android/OWNERS
@@ -0,0 +1,7 @@
+estevenson@chromium.org
+jbudorick@chromium.org
+pasko@chromium.org
+perezju@chromium.org
+wnwen@chromium.org
+
+# COMPONENT: Build
diff --git a/deps/v8/build/android/PRESUBMIT.py b/deps/v8/build/android/PRESUBMIT.py
new file mode 100644
index 0000000000..0ec045cc23
--- /dev/null
+++ b/deps/v8/build/android/PRESUBMIT.py
@@ -0,0 +1,97 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Presubmit script for android buildbot.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
+details on the presubmit API built into depot_tools.
+"""
+
+
+def CommonChecks(input_api, output_api):
+ output = []
+
+ build_android_dir = input_api.PresubmitLocalPath()
+
+ def J(*dirs):
+ """Returns a path relative to presubmit directory."""
+ return input_api.os_path.join(build_android_dir, *dirs)
+
+ build_pys = [
+ r'gyp/.*\.py$',
+ r'gn/.*\.py',
+ ]
+ output.extend(input_api.canned_checks.RunPylint(
+ input_api,
+ output_api,
+ pylintrc='pylintrc',
+ black_list=build_pys,
+ extra_paths_list=[
+ J(),
+ J('gyp'),
+ J('buildbot'),
+ J('..', 'util', 'lib', 'common'),
+ J('..', '..', 'third_party', 'catapult', 'common', 'py_trace_event'),
+ J('..', '..', 'third_party', 'catapult', 'common', 'py_utils'),
+ J('..', '..', 'third_party', 'catapult', 'devil'),
+ J('..', '..', 'third_party', 'catapult', 'tracing'),
+ J('..', '..', 'third_party', 'depot_tools'),
+ ]))
+ output.extend(input_api.canned_checks.RunPylint(
+ input_api,
+ output_api,
+ white_list=build_pys,
+ extra_paths_list=[J('gyp'), J('gn')]))
+
+ # Disabled due to http://crbug.com/410936
+ #output.extend(input_api.canned_checks.RunUnitTestsInDirectory(
+ #input_api, output_api, J('buildbot', 'tests')))
+
+ pylib_test_env = dict(input_api.environ)
+ pylib_test_env.update({
+ 'PYTHONPATH': build_android_dir,
+ 'PYTHONDONTWRITEBYTECODE': '1',
+ })
+ output.extend(
+ input_api.canned_checks.RunUnitTests(
+ input_api,
+ output_api,
+ unit_tests=[
+ J('.', 'emma_coverage_stats_test.py'),
+ J('.', 'list_class_verification_failures_test.py'),
+ J('gyp', 'util', 'build_utils_test.py'),
+ J('gyp', 'util', 'md5_check_test.py'),
+ J('gyp', 'util', 'resource_utils_test.py'),
+ J('pylib', 'constants', 'host_paths_unittest.py'),
+ J('pylib', 'gtest', 'gtest_test_instance_test.py'),
+ J('pylib', 'instrumentation',
+ 'instrumentation_test_instance_test.py'),
+ J('pylib', 'local', 'device',
+ 'local_device_instrumentation_test_run_test.py'),
+ J('pylib', 'local', 'device', 'local_device_test_run_test.py'),
+ J('pylib', 'output', 'local_output_manager_test.py'),
+ J('pylib', 'output', 'noop_output_manager_test.py'),
+ J('pylib', 'output', 'remote_output_manager_test.py'),
+ J('pylib', 'results', 'json_results_test.py'),
+ J('pylib', 'symbols', 'apk_native_libs_unittest.py'),
+ J('pylib', 'symbols', 'elf_symbolizer_unittest.py'),
+ J('pylib', 'symbols', 'symbol_utils_unittest.py'),
+ J('pylib', 'utils', 'decorators_test.py'),
+ J('pylib', 'utils', 'device_dependencies_test.py'),
+ J('pylib', 'utils', 'dexdump_test.py'),
+ J('pylib', 'utils', 'proguard_test.py'),
+ J('pylib', 'utils', 'test_filter_test.py'),
+ J('.', 'convert_dex_profile_tests.py'),
+ ],
+ env=pylib_test_env))
+
+ return output
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ return CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return CommonChecks(input_api, output_api)
diff --git a/deps/v8/build/android/adb_chrome_public_command_line b/deps/v8/build/android/adb_chrome_public_command_line
new file mode 100755
index 0000000000..86ece8cec7
--- /dev/null
+++ b/deps/v8/build/android/adb_chrome_public_command_line
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current Chrome flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the Chrome
+# flags. For example:
+# adb_chrome_public_command_line --enable-webgl
+#
+# To remove all Chrome flags, pass an empty string for the flags:
+# adb_chrome_public_command_line ""
+
+exec $(dirname $0)/adb_command_line.py --name chrome-command-line "$@"
diff --git a/deps/v8/build/android/adb_command_line.py b/deps/v8/build/android/adb_command_line.py
new file mode 100755
index 0000000000..2f3a615a35
--- /dev/null
+++ b/deps/v8/build/android/adb_command_line.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility for reading / writing command-line flag files on device(s)."""
+
+import argparse
+import logging
+import sys
+
+import devil_chromium # pylint: disable=import-error, unused-import
+
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android import flag_changer
+from devil.android.tools import script_common
+from devil.utils import cmd_helper
+from devil.utils import logging_common
+
+
+def main():
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.usage = '''%(prog)s --name FILENAME [--device SERIAL] [flags...]
+
+No flags: Prints existing command-line file.
+Empty string: Deletes command-line file.
+Otherwise: Writes command-line file.
+
+'''
+ parser.add_argument('--name', required=True,
+ help='Name of file where to store flags on the device.')
+ parser.add_argument('-e', '--executable', dest='executable', default='chrome',
+ help='(deprecated) No longer used.')
+ script_common.AddEnvironmentArguments(parser)
+ script_common.AddDeviceArguments(parser)
+ logging_common.AddLoggingArguments(parser)
+
+ args, remote_args = parser.parse_known_args()
+ script_common.InitializeEnvironment(args)
+ logging_common.InitializeLogging(args)
+
+ devices = device_utils.DeviceUtils.HealthyDevices(device_arg=args.devices,
+ default_retries=0)
+ all_devices = device_utils.DeviceUtils.parallel(devices)
+
+ if not remote_args:
+ # No args == do not update, just print flags.
+ remote_args = None
+ action = ''
+ elif len(remote_args) == 1 and not remote_args[0]:
+ # Single empty string arg == delete flags
+ remote_args = []
+ action = 'Deleted command line file. '
+ else:
+ action = 'Wrote command line file. '
+
+ is_webview = args.name == 'webview-command-line'
+
+ def update_flags(device):
+ if device.IsUserBuild() and is_webview:
+ raise device_errors.CommandFailedError(
+ 'WebView only respects flags on a userdebug or eng device, yours '
+ 'is a user build.', device)
+ elif device.IsUserBuild():
+ logging.warning(
+ 'Your device (%s) is a user build; Chrome may or may not pick up '
+ 'your commandline flags. Check your '
+ '"command_line_on_non_rooted_enabled" preference, or switch '
+ 'devices.', device)
+ changer = flag_changer.FlagChanger(device, args.name)
+ if remote_args is not None:
+ flags = changer.ReplaceFlags(remote_args)
+ else:
+ flags = changer.GetCurrentFlags()
+ return (device, device.build_description, flags)
+
+ updated_values = all_devices.pMap(update_flags).pGet(None)
+
+ print '%sCurrent flags (in %s):' % (action, args.name)
+ for d, desc, flags in updated_values:
+ if flags:
+ # Shell-quote flags for easy copy/paste as new args on the terminal.
+ quoted_flags = ' '.join(cmd_helper.SingleQuote(f) for f in sorted(flags))
+ else:
+ quoted_flags = '( empty )'
+ print ' %s (%s): %s' % (d, desc, quoted_flags)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/adb_gdb b/deps/v8/build/android/adb_gdb
new file mode 100755
index 0000000000..1dc3ce5f3b
--- /dev/null
+++ b/deps/v8/build/android/adb_gdb
@@ -0,0 +1,1000 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+
+# A generic script used to attach to a running Chromium process and
+# debug it. Most users should not use this directly, but one of the
+# wrapper scripts like adb_gdb_content_shell
+#
+# Use --help to print full usage instructions.
+#
+
+PROGNAME=$(basename "$0")
+PROGDIR=$(dirname "$0")
+
+# Force locale to C to allow recognizing output from subprocesses.
+LC_ALL=C
+
+# Location of Chromium-top-level sources.
+CHROMIUM_SRC=$(cd "$PROGDIR"/../.. >/dev/null && pwd 2>/dev/null)
+
+TMPDIR=
+GDBSERVER_PIDFILE=
+TARGET_GDBSERVER=
+COMMAND_PREFIX=
+COMMAND_SUFFIX=
+
+clean_exit () {
+ if [ "$TMPDIR" ]; then
+ GDBSERVER_PID=$(cat $GDBSERVER_PIDFILE 2>/dev/null)
+ if [ "$GDBSERVER_PID" ]; then
+ log "Killing background gdbserver process: $GDBSERVER_PID"
+ kill -9 $GDBSERVER_PID >/dev/null 2>&1
+ rm -f "$GDBSERVER_PIDFILE"
+ fi
+ if [ "$TARGET_GDBSERVER" ]; then
+ log "Removing target gdbserver binary: $TARGET_GDBSERVER."
+ "$ADB" shell "$COMMAND_PREFIX" rm "$TARGET_GDBSERVER" \
+ "$TARGET_DOMAIN_SOCKET" "$COMMAND_SUFFIX" >/dev/null 2>&1
+ fi
+ log "Cleaning up: $TMPDIR"
+ rm -rf "$TMPDIR"
+ fi
+ trap "" EXIT
+ exit $1
+}
+
+# Ensure clean exit on Ctrl-C or normal exit.
+trap "clean_exit 1" INT HUP QUIT TERM
+trap "clean_exit \$?" EXIT
+
+panic () {
+ echo "ERROR: $@" >&2
+ exit 1
+}
+
+fail_panic () {
+ if [ $? != 0 ]; then panic "$@"; fi
+}
+
+log () {
+ if [ "$VERBOSE" -gt 0 ]; then
+ echo "$@"
+ fi
+}
+
+DEFAULT_PULL_LIBS_DIR="/tmp/adb-gdb-support-$USER"
+IDE_DIR="$DEFAULT_PULL_LIBS_DIR"
+
+# NOTE: Allow wrapper scripts to set various default through ADB_GDB_XXX
+# environment variables. This is only for cosmetic reasons, i.e. to
+# display proper
+
+# Allow wrapper scripts to set the program name through ADB_GDB_PROGNAME
+PROGNAME=${ADB_GDB_PROGNAME:-$(basename "$0")}
+
+ADB=
+ANNOTATE=
+CGDB=
+GDBINIT=
+GDBSERVER=
+HELP=
+IDE=
+NDK_DIR=
+NO_PULL_LIBS=
+PACKAGE_NAME=
+PID=
+PORT=
+PROGRAM_NAME="activity"
+PULL_LIBS=
+PULL_LIBS_DIR=
+ATTACH_DELAY=1
+SU_PREFIX=
+SYMBOL_DIR=
+TARGET_ARCH=
+TOOLCHAIN=
+VERBOSE=0
+
+for opt; do
+ optarg=$(expr "x$opt" : 'x[^=]*=\(.*\)')
+ case $opt in
+ --adb=*)
+ ADB=$optarg
+ ;;
+ --device=*)
+ export ANDROID_SERIAL=$optarg
+ ;;
+ --annotate=3)
+ ANNOTATE=$optarg
+ ;;
+ --gdbserver=*)
+ GDBSERVER=$optarg
+ ;;
+ --gdb=*)
+ GDB=$optarg
+ ;;
+ --help|-h|-?)
+ HELP=true
+ ;;
+ --ide)
+ IDE=true
+ ;;
+ --ndk-dir=*)
+ NDK_DIR=$optarg
+ ;;
+ --no-pull-libs)
+ NO_PULL_LIBS=true
+ ;;
+ --package-name=*)
+ PACKAGE_NAME=$optarg
+ ;;
+ --pid=*)
+ PID=$optarg
+ ;;
+ --port=*)
+ PORT=$optarg
+ ;;
+ --program-name=*)
+ PROGRAM_NAME=$optarg
+ ;;
+ --pull-libs)
+ PULL_LIBS=true
+ ;;
+ --pull-libs-dir=*)
+ PULL_LIBS_DIR=$optarg
+ ;;
+ --script=*)
+ GDBINIT=$optarg
+ ;;
+ --attach-delay=*)
+ ATTACH_DELAY=$optarg
+ ;;
+ --su-prefix=*)
+ SU_PREFIX=$optarg
+ ;;
+ --symbol-dir=*)
+ SYMBOL_DIR=$optarg
+ ;;
+ --output-directory=*)
+ CHROMIUM_OUTPUT_DIR=$optarg
+ ;;
+ --target-arch=*)
+ TARGET_ARCH=$optarg
+ ;;
+ --toolchain=*)
+ TOOLCHAIN=$optarg
+ ;;
+ --cgdb)
+ CGDB=cgdb
+ ;;
+ --cgdb=*)
+ CGDB=$optarg
+ ;;
+ --verbose)
+ VERBOSE=$(( $VERBOSE + 1 ))
+ ;;
+ -*)
+ panic "Unknown option $opt, see --help." >&2
+ ;;
+ *)
+ if [ "$PACKAGE_NAME" ]; then
+ panic "You can only provide a single package name as argument!\
+ See --help."
+ fi
+ PACKAGE_NAME=$opt
+ ;;
+ esac
+done
+
+if [ "$HELP" ]; then
+ if [ "$ADB_GDB_PROGNAME" ]; then
+ # Assume wrapper scripts all provide a default package name.
+ cat <<EOF
+Usage: $PROGNAME [options]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+EOF
+ else
+ # Assume this is a direct call to adb_gdb
+ cat <<EOF
+Usage: $PROGNAME [options] [<package-name>]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+
+If provided, <package-name> must be the name of the Android application's
+package name to be debugged. You can also use --package-name=<name> to
+specify it.
+EOF
+ fi
+
+ cat <<EOF
+
+This script is used to debug a running $PROGRAM_NAME process.
+
+This script needs several things to work properly. It will try to pick
+them up automatically for you though:
+
+ - target gdbserver binary
+ - host gdb client (e.g. arm-linux-androideabi-gdb)
+ - directory with symbolic version of $PROGRAM_NAME's shared libraries.
+
+You can also use --ndk-dir=<path> to specify an alternative NDK installation
+directory.
+
+The script tries to find the most recent version of the debug version of
+shared libraries under one of the following directories:
+
+ \$CHROMIUM_SRC/<out>/lib/ (used by GYP builds)
+ \$CHROMIUM_SRC/<out>/lib.unstripped/ (used by GN builds)
+
+Where <out> is determined by CHROMIUM_OUTPUT_DIR, or --output-directory.
+
+You can set the path manually via --symbol-dir.
+
+The script tries to extract the target architecture from your target device,
+but if this fails, will default to 'arm'. Use --target-arch=<name> to force
+its value.
+
+Otherwise, the script will complain, but you can use the --gdbserver,
+--gdb and --symbol-lib options to specify everything manually.
+
+An alternative to --gdb=<file> is to use --toollchain=<path> to specify
+the path to the host target-specific cross-toolchain.
+
+You will also need the 'adb' tool in your path. Otherwise, use the --adb
+option. The script will complain if there is more than one device connected
+and a device is not specified with either --device or ANDROID_SERIAL).
+
+The first time you use it on a device, the script will pull many system
+libraries required by the process into a temporary directory. This
+is done to strongly improve the debugging experience, like allowing
+readable thread stacks and more. The libraries are copied to the following
+directory by default:
+
+ $DEFAULT_PULL_LIBS_DIR/
+
+But you can use the --pull-libs-dir=<path> option to specify an
+alternative. The script can detect when you change the connected device,
+and will re-pull the libraries only in this case. You can however force it
+with the --pull-libs option.
+
+Any local .gdbinit script will be ignored, but it is possible to pass a
+gdb command script with the --script=<file> option. Note that its commands
+will be passed to gdb after the remote connection and library symbol
+loading have completed.
+
+Valid options:
+ --help|-h|-? Print this message.
+ --verbose Increase verbosity.
+
+ --cgdb[=<file>] Use cgdb (an interface for gdb that shows the code).
+ --symbol-dir=<path> Specify directory with symbol shared libraries.
+ --output-directory=<path> Specify the output directory (e.g. "out/Debug").
+ --package-name=<name> Specify package name (alternative to 1st argument).
+ --program-name=<name> Specify program name (cosmetic only).
+ --pid=<pid> Specify application process pid.
+ --attach-delay=<num> Seconds to wait for gdbserver to attach to the
+ remote process before starting gdb. Default 1.
+ <num> may be a float if your sleep(1) supports it.
+ --annotate=<num> Enable gdb annotation.
+ --script=<file> Specify extra GDB init script.
+
+ --gdbserver=<file> Specify target gdbserver binary.
+ --gdb=<file> Specify host gdb client binary.
+ --target-arch=<name> Specify NDK target arch.
+ --adb=<file> Specify host ADB binary.
+ --device=<file> ADB device serial to use (-s flag).
+ --port=<port> Specify the tcp port to use.
+ --ide Forward gdb port, but do not enter gdb console.
+
+ --su-prefix=<prefix> Prepend <prefix> to 'adb shell' commands that are
+ run by this script. This can be useful to use
+ the 'su' program on rooted production devices.
+ e.g. --su-prefix="su -c"
+
+ --pull-libs Force system libraries extraction.
+ --no-pull-libs Do not extract any system library.
+ --libs-dir=<path> Specify system libraries extraction directory.
+
+EOF
+ exit 0
+fi
+
+if [ -z "$PACKAGE_NAME" ]; then
+ panic "Please specify a package name on the command line. See --help."
+fi
+
+if [[ -z "$SYMBOL_DIR" && -z "$CHROMIUM_OUTPUT_DIR" ]]; then
+ if [[ -e "build.ninja" ]]; then
+ CHROMIUM_OUTPUT_DIR=$PWD
+ else
+ panic "Please specify an output directory by using one of:
+ --output-directory=out/Debug
+ CHROMIUM_OUTPUT_DIR=out/Debug
+ Setting working directory to an output directory.
+ See --help."
+ fi
+fi
+
+if ls *.so >/dev/null 2>&1; then
+ panic ".so files found in your working directory. These will conflict with" \
+ "library lookup logic. Change your working directory and try again."
+fi
+
+# Detect the build type and symbol directory. This is done by finding
+# the most recent sub-directory containing debug shared libraries under
+# $CHROMIUM_OUTPUT_DIR.
+#
+# Out: nothing, but this sets SYMBOL_DIR
+#
+detect_symbol_dir () {
+ # GYP places unstripped libraries under out/lib
+ # GN places them under out/lib.unstripped
+ local PARENT_DIR="$CHROMIUM_OUTPUT_DIR"
+ if [[ ! -e "$PARENT_DIR" ]]; then
+ PARENT_DIR="$CHROMIUM_SRC/$PARENT_DIR"
+ fi
+ SYMBOL_DIR="$PARENT_DIR/lib.unstripped"
+ if [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then
+ SYMBOL_DIR="$PARENT_DIR/lib"
+ if [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then
+ panic "Could not find any symbols under \
+$PARENT_DIR/lib{.unstripped}. Please build the program first!"
+ fi
+ fi
+ log "Auto-config: --symbol-dir=$SYMBOL_DIR"
+}
+
+if [ -z "$SYMBOL_DIR" ]; then
+ detect_symbol_dir
+elif [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then
+ panic "Could not find any symbols under $SYMBOL_DIR"
+fi
+
+if [ -z "$NDK_DIR" ]; then
+ ANDROID_NDK_ROOT=$(PYTHONPATH=$CHROMIUM_SRC/build/android python -c \
+'from pylib.constants import ANDROID_NDK_ROOT; print ANDROID_NDK_ROOT,')
+else
+ if [ ! -d "$NDK_DIR" ]; then
+ panic "Invalid directory: $NDK_DIR"
+ fi
+ if [ ! -f "$NDK_DIR/ndk-build" ]; then
+ panic "Not a valid NDK directory: $NDK_DIR"
+ fi
+ ANDROID_NDK_ROOT=$NDK_DIR
+fi
+
+if [ "$GDBINIT" -a ! -f "$GDBINIT" ]; then
+ panic "Unknown --script file: $GDBINIT"
+fi
+
+# Check that ADB is in our path
+if [ -z "$ADB" ]; then
+ ADB=$(which adb 2>/dev/null)
+ if [ -z "$ADB" ]; then
+ panic "Can't find 'adb' tool in your path. Install it or use \
+--adb=<file>"
+ fi
+ log "Auto-config: --adb=$ADB"
+fi
+
+# Check that it works minimally
+ADB_VERSION=$($ADB version 2>/dev/null)
+echo "$ADB_VERSION" | fgrep -q -e "Android Debug Bridge"
+if [ $? != 0 ]; then
+ panic "Your 'adb' tool seems invalid, use --adb=<file> to specify a \
+different one: $ADB"
+fi
+
+# If there are more than one device connected, and ANDROID_SERIAL is not
+# defined, print an error message.
+NUM_DEVICES_PLUS2=$($ADB devices 2>/dev/null | wc -l)
+if [ "$NUM_DEVICES_PLUS2" -gt 3 -a -z "$ANDROID_SERIAL" ]; then
+ echo "ERROR: There is more than one Android device connected to ADB."
+ echo "Please define ANDROID_SERIAL to specify which one to use."
+ exit 1
+fi
+
+# Run a command through adb shell, strip the extra \r from the output
+# and return the correct status code to detect failures. This assumes
+# that the adb shell command prints a final \n to stdout.
+# $1+: command to run
+# Out: command's stdout
+# Return: command's status
+# Note: the command's stderr is lost
+adb_shell () {
+ local TMPOUT="$(mktemp)"
+ local LASTLINE RET
+ local ADB=${ADB:-adb}
+
+ # The weird sed rule is to strip the final \r on each output line
+ # Since 'adb shell' never returns the command's proper exit/status code,
+ # we force it to print it as '%%<status>' in the temporary output file,
+ # which we will later strip from it.
+ $ADB shell $@ ";" echo "%%\$?" 2>/dev/null | \
+ sed -e 's![[:cntrl:]]!!g' > $TMPOUT
+ # Get last line in log, which contains the exit code from the command
+ LASTLINE=$(sed -e '$!d' $TMPOUT)
+ # Extract the status code from the end of the line, which must
+ # be '%%<code>'.
+ RET=$(echo "$LASTLINE" | \
+ awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,RSTART+2); } }')
+ # Remove the status code from the last line. Note that this may result
+ # in an empty line.
+ LASTLINE=$(echo "$LASTLINE" | \
+ awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,1,RSTART-1); } }')
+ # The output itself: all lines except the status code.
+ sed -e '$d' $TMPOUT && printf "%s" "$LASTLINE"
+ # Remove temp file.
+ rm -f $TMPOUT
+ # Exit with the appropriate status.
+ return $RET
+}
+
+# Find the target architecture from a local shared library.
+# This returns an NDK-compatible architecture name.
+# out: NDK Architecture name, or empty string.
+get_gyp_target_arch () {
+ # ls prints a broken pipe error when there are a lot of libs.
+ local RANDOM_LIB=$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null| head -n1)
+ local SO_DESC=$(file $RANDOM_LIB)
+ case $ARCH in
+ *32-bit*ARM,*) echo "arm";;
+ *64-bit*ARM,*) echo "arm64";;
+ *32-bit*Intel,*) echo "x86";;
+ *x86-64,*) echo "x86_64";;
+ *32-bit*MIPS,*) echo "mips";;
+ *) echo "";
+ esac
+}
+
+if [ -z "$TARGET_ARCH" ]; then
+ TARGET_ARCH=$(get_gyp_target_arch)
+ if [ -z "$TARGET_ARCH" ]; then
+ TARGET_ARCH=arm
+ fi
+else
+ # Nit: accept Chromium's 'ia32' as a valid target architecture. This
+ # script prefers the NDK 'x86' name instead because it uses it to find
+ # NDK-specific files (host gdb) with it.
+ if [ "$TARGET_ARCH" = "ia32" ]; then
+ TARGET_ARCH=x86
+ log "Auto-config: --arch=$TARGET_ARCH (equivalent to ia32)"
+ fi
+fi
+
+# Detect the NDK system name, i.e. the name used to identify the host.
+# out: NDK system name (e.g. 'linux' or 'darwin')
+get_ndk_host_system () {
+ local HOST_OS
+ if [ -z "$NDK_HOST_SYSTEM" ]; then
+ HOST_OS=$(uname -s)
+ case $HOST_OS in
+ Linux) NDK_HOST_SYSTEM=linux;;
+ Darwin) NDK_HOST_SYSTEM=darwin;;
+ *) panic "You can't run this script on this system: $HOST_OS";;
+ esac
+ fi
+ echo "$NDK_HOST_SYSTEM"
+}
+
+# Detect the NDK host architecture name.
+# out: NDK arch name (e.g. 'x86' or 'x86_64')
+get_ndk_host_arch () {
+ local HOST_ARCH HOST_OS
+ if [ -z "$NDK_HOST_ARCH" ]; then
+ HOST_OS=$(get_ndk_host_system)
+ HOST_ARCH=$(uname -p)
+ if [ "$HOST_ARCH" = "unknown" ]; then
+ # In case where "-p" returns "unknown" just use "-m" (machine hardware
+ # name). According to this patch from Fedora "-p" is equivalent to "-m"
+ # anyway: https://goo.gl/Pd47x3
+ HOST_ARCH=$(uname -m)
+ fi
+ case $HOST_ARCH in
+ i?86) NDK_HOST_ARCH=x86;;
+ x86_64|amd64) NDK_HOST_ARCH=x86_64;;
+ *) panic "You can't run this script on this host architecture: $HOST_ARCH";;
+ esac
+ # Darwin trick: "uname -p" always returns i386 on 64-bit installations.
+ if [ "$HOST_OS" = darwin -a "$NDK_HOST_ARCH" = "x86" ]; then
+ # Use '/usr/bin/file', not just 'file' to avoid buggy MacPorts
+ # implementations of the tool. See http://b.android.com/53769
+ HOST_64BITS=$(/usr/bin/file -L "$SHELL" | grep -e "x86[_-]64")
+ if [ "$HOST_64BITS" ]; then
+ NDK_HOST_ARCH=x86_64
+ fi
+ fi
+ fi
+ echo "$NDK_HOST_ARCH"
+}
+
+# Convert an NDK architecture name into a GNU configure triplet.
+# $1: NDK architecture name (e.g. 'arm')
+# Out: Android GNU configure triplet (e.g. 'arm-linux-androideabi')
+get_arch_gnu_config () {
+ case $1 in
+ arm)
+ echo "arm-linux-androideabi"
+ ;;
+ arm64)
+ echo "aarch64-linux-android"
+ ;;
+ x86)
+ echo "i686-linux-android"
+ ;;
+ x86_64)
+ echo "x86_64-linux-android"
+ ;;
+ mips)
+ echo "mipsel-linux-android"
+ ;;
+ *)
+ echo "$ARCH-linux-android"
+ ;;
+ esac
+}
+
+# Convert an NDK architecture name into a toolchain name prefix
+# $1: NDK architecture name (e.g. 'arm')
+# Out: NDK toolchain name prefix (e.g. 'arm-linux-androideabi')
+get_arch_toolchain_prefix () {
+ # Return the configure triplet, except for x86 and x86_64!
+ if [ "$1" = "x86" -o "$1" = "x86_64" ]; then
+ echo "$1"
+ else
+ get_arch_gnu_config $1
+ fi
+}
+
+# Find a NDK toolchain prebuilt file or sub-directory.
+# This will probe the various arch-specific toolchain directories
+# in the NDK for the needed file.
+# $1: NDK install path
+# $2: NDK architecture name
+# $3: prebuilt sub-path to look for.
+# Out: file path, or empty if none is found.
+get_ndk_toolchain_prebuilt () {
+ local NDK_DIR="${1%/}"
+ local ARCH="$2"
+ local SUBPATH="$3"
+ local NAME="$(get_arch_toolchain_prefix $ARCH)"
+ local FILE TARGET
+ FILE=$NDK_DIR/toolchains/$NAME-4.9/prebuilt/$SUBPATH
+ if [ ! -f "$FILE" ]; then
+ FILE=$NDK_DIR/toolchains/$NAME-4.8/prebuilt/$SUBPATH
+ if [ ! -f "$FILE" ]; then
+ FILE=
+ fi
+ fi
+ echo "$FILE"
+}
+
+# Find the path to an NDK's toolchain full prefix for a given architecture
+# $1: NDK install path
+# $2: NDK target architecture name
+# Out: install path + binary prefix (e.g.
+# ".../path/to/bin/arm-linux-androideabi-")
+get_ndk_toolchain_fullprefix () {
+ local NDK_DIR="$1"
+ local ARCH="$2"
+ local TARGET NAME HOST_OS HOST_ARCH GCC CONFIG
+
+ # NOTE: This will need to be updated if the NDK changes the names or moves
+ # the location of its prebuilt toolchains.
+ #
+ GCC=
+ HOST_OS=$(get_ndk_host_system)
+ HOST_ARCH=$(get_ndk_host_arch)
+ CONFIG=$(get_arch_gnu_config $ARCH)
+ GCC=$(get_ndk_toolchain_prebuilt \
+ "$NDK_DIR" "$ARCH" "$HOST_OS-$HOST_ARCH/bin/$CONFIG-gcc")
+ if [ -z "$GCC" -a "$HOST_ARCH" = "x86_64" ]; then
+ GCC=$(get_ndk_toolchain_prebuilt \
+ "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/$CONFIG-gcc")
+ fi
+ if [ ! -f "$GCC" -a "$ARCH" = "x86" ]; then
+ # Special case, the x86 toolchain used to be incorrectly
+ # named i686-android-linux-gcc!
+ GCC=$(get_ndk_toolchain_prebuilt \
+ "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/i686-android-linux-gcc")
+ fi
+ if [ -z "$GCC" ]; then
+ panic "Cannot find Android NDK toolchain for '$ARCH' architecture. \
+Please verify your NDK installation!"
+ fi
+ echo "${GCC%%gcc}"
+}
+
+# $1: NDK install path
+get_ndk_host_gdb_client() {
+ local NDK_DIR="$1"
+ local HOST_OS HOST_ARCH
+
+ HOST_OS=$(get_ndk_host_system)
+ HOST_ARCH=$(get_ndk_host_arch)
+ echo "$NDK_DIR/prebuilt/$HOST_OS-$HOST_ARCH/bin/gdb"
+}
+
+# $1: NDK install path
+# $2: target architecture.
+get_ndk_gdbserver () {
+ local NDK_DIR="$1"
+ local ARCH=$2
+ local BINARY
+
+ # The location has moved after NDK r8
+ BINARY=$NDK_DIR/prebuilt/android-$ARCH/gdbserver/gdbserver
+ if [ ! -f "$BINARY" ]; then
+ BINARY=$(get_ndk_toolchain_prebuilt "$NDK_DIR" "$ARCH" gdbserver)
+ fi
+ echo "$BINARY"
+}
+
+# Check/probe the path to the Android toolchain installation. Always
+# use the NDK versions of gdb and gdbserver. They must match to avoid
+# issues when both binaries do not speak the same wire protocol.
+#
+if [ -z "$TOOLCHAIN" ]; then
+ ANDROID_TOOLCHAIN=$(get_ndk_toolchain_fullprefix \
+ "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+ ANDROID_TOOLCHAIN=$(dirname "$ANDROID_TOOLCHAIN")
+ log "Auto-config: --toolchain=$ANDROID_TOOLCHAIN"
+else
+ # Be flexible, allow one to specify either the install path or the bin
+ # sub-directory in --toolchain:
+ #
+ if [ -d "$TOOLCHAIN/bin" ]; then
+ TOOLCHAIN=$TOOLCHAIN/bin
+ fi
+ ANDROID_TOOLCHAIN=$TOOLCHAIN
+fi
+
+# Cosmetic: Remove trailing directory separator.
+ANDROID_TOOLCHAIN=${ANDROID_TOOLCHAIN%/}
+
+# Find host GDB client binary
+if [ -z "$GDB" ]; then
+ GDB=$(get_ndk_host_gdb_client "$ANDROID_NDK_ROOT")
+ if [ -z "$GDB" ]; then
+ panic "Can't find Android gdb client in your path, check your \
+--toolchain or --gdb path."
+ fi
+ log "Host gdb client: $GDB"
+fi
+
+# Find gdbserver binary, we will later push it to /data/local/tmp
+# This ensures that both gdbserver and $GDB talk the same binary protocol,
+# otherwise weird problems will appear.
+#
+if [ -z "$GDBSERVER" ]; then
+ GDBSERVER=$(get_ndk_gdbserver "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+ if [ -z "$GDBSERVER" ]; then
+ panic "Can't find NDK gdbserver binary. use --gdbserver to specify \
+valid one!"
+ fi
+ log "Auto-config: --gdbserver=$GDBSERVER"
+fi
+
+# A unique ID for this script's session. This needs to be the same in all
+# sub-shell commands we're going to launch, so take the PID of the launcher
+# process.
+TMP_ID=$$
+
+# Temporary directory, will get cleaned up on exit.
+TMPDIR=/tmp/$USER-adb-gdb-tmp-$TMP_ID
+mkdir -p "$TMPDIR" && rm -rf "$TMPDIR"/*
+
+GDBSERVER_PIDFILE="$TMPDIR"/gdbserver-$TMP_ID.pid
+
+# Return the timestamp of a given file, as number of seconds since epoch.
+# $1: file path
+# Out: file timestamp
+get_file_timestamp () {
+ stat -c %Y "$1" 2>/dev/null
+}
+
+# Allow several concurrent debugging sessions
+APP_DATA_DIR=$(adb_shell run-as $PACKAGE_NAME /system/bin/sh -c pwd)
+fail_panic "Failed to run-as $PACKAGE_NAME, is the app debuggable?"
+TARGET_GDBSERVER="$APP_DATA_DIR/gdbserver-adb-gdb-$TMP_ID"
+TMP_TARGET_GDBSERVER=/data/local/tmp/gdbserver-adb-gdb-$TMP_ID
+
+# Select correct app_process for architecture.
+case $TARGET_ARCH in
+ arm|x86|mips) GDBEXEC=app_process32;;
+ arm64|x86_64) GDBEXEC=app_process64; SUFFIX_64_BIT=64;;
+ *) panic "Unknown app_process for architecture!";;
+esac
+
+# Default to app_process if bit-width specific process isn't found.
+adb_shell ls /system/bin/$GDBEXEC > /dev/null
+if [ $? != 0 ]; then
+ GDBEXEC=app_process
+fi
+
+# Detect AddressSanitizer setup on the device. In that case app_process is a
+# script, and the real executable is app_process.real.
+GDBEXEC_ASAN=app_process.real
+adb_shell ls /system/bin/$GDBEXEC_ASAN > /dev/null
+if [ $? == 0 ]; then
+ GDBEXEC=$GDBEXEC_ASAN
+fi
+
+ORG_PULL_LIBS_DIR=$PULL_LIBS_DIR
+if [[ -n "$ANDROID_SERIAL" ]]; then
+ DEFAULT_PULL_LIBS_DIR="$DEFAULT_PULL_LIBS_DIR/$ANDROID_SERIAL-$SUFFIX_64_BIT"
+fi
+PULL_LIBS_DIR=${PULL_LIBS_DIR:-$DEFAULT_PULL_LIBS_DIR}
+
+HOST_FINGERPRINT=
+DEVICE_FINGERPRINT=$(adb_shell getprop ro.build.fingerprint)
+[[ "$DEVICE_FINGERPRINT" ]] || panic "Failed to get the device fingerprint"
+log "Device build fingerprint: $DEVICE_FINGERPRINT"
+
+if [ ! -f "$PULL_LIBS_DIR/build.fingerprint" ]; then
+ log "Auto-config: --pull-libs (no cached libraries)"
+ PULL_LIBS=true
+else
+ HOST_FINGERPRINT=$(< "$PULL_LIBS_DIR/build.fingerprint")
+ log "Host build fingerprint: $HOST_FINGERPRINT"
+ if [ "$HOST_FINGERPRINT" == "$DEVICE_FINGERPRINT" ]; then
+ log "Auto-config: --no-pull-libs (fingerprint match)"
+ NO_PULL_LIBS=true
+ else
+ log "Auto-config: --pull-libs (fingerprint mismatch)"
+ PULL_LIBS=true
+ fi
+fi
+
+# If requested, work for M-x gdb. The gdb indirections make it
+# difficult to pass --annotate=3 to the gdb binary itself.
+if [ "$ANNOTATE" ]; then
+ GDB_ARGS=$GDB_ARGS" --annotate=$ANNOTATE"
+fi
+
+# Get the PID from the first argument or else find the PID of the
+# browser process.
+if [ -z "$PID" ]; then
+ PROCESSNAME=$PACKAGE_NAME
+ if [ -z "$PID" ]; then
+ PID=$(adb_shell ps | \
+ awk '$9 == "'$PROCESSNAME'" { print $2; }' | head -1)
+ fi
+ if [ -z "$PID" ]; then
+ panic "Can't find application process PID."
+ fi
+ log "Found process PID: $PID"
+fi
+
+# Determine if 'adb shell' runs as root or not.
+# If so, we can launch gdbserver directly, otherwise, we have to
+# use run-as $PACKAGE_NAME ..., which requires the package to be debuggable.
+#
+if [ "$SU_PREFIX" ]; then
+ # Need to check that this works properly.
+ SU_PREFIX_TEST_LOG=$TMPDIR/su-prefix.log
+ adb_shell $SU_PREFIX \"echo "foo"\" > $SU_PREFIX_TEST_LOG 2>&1
+ if [ $? != 0 -o "$(cat $SU_PREFIX_TEST_LOG)" != "foo" ]; then
+ echo "ERROR: Cannot use '$SU_PREFIX' as a valid su prefix:"
+ echo "$ adb shell $SU_PREFIX \"echo foo\""
+ cat $SU_PREFIX_TEST_LOG
+ exit 1
+ fi
+ COMMAND_PREFIX="$SU_PREFIX \""
+ COMMAND_SUFFIX="\""
+else
+ SHELL_UID=$("$ADB" shell cat /proc/self/status | \
+ awk '$1 == "Uid:" { print $2; }')
+ log "Shell UID: $SHELL_UID"
+ if [ "$SHELL_UID" != 0 -o -n "$NO_ROOT" ]; then
+ COMMAND_PREFIX="run-as $PACKAGE_NAME"
+ COMMAND_SUFFIX=
+ else
+ COMMAND_PREFIX=
+ COMMAND_SUFFIX=
+ fi
+fi
+log "Command prefix: '$COMMAND_PREFIX'"
+log "Command suffix: '$COMMAND_SUFFIX'"
+
+mkdir -p "$PULL_LIBS_DIR"
+fail_panic "Can't create --libs-dir directory: $PULL_LIBS_DIR"
+
+# Pull device's system libraries that are mapped by our process.
+# Pulling all system libraries is too long, so determine which ones
+# we need by looking at /proc/$PID/maps instead
+if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then
+ echo "Extracting system libraries into: $PULL_LIBS_DIR"
+ MAPPINGS=$(adb_shell $COMMAND_PREFIX cat /proc/$PID/maps $COMMAND_SUFFIX)
+ if [ $? != 0 ]; then
+ echo "ERROR: Could not list process's memory mappings."
+ if [ "$SU_PREFIX" ]; then
+ panic "Are you sure your --su-prefix is correct?"
+ else
+ panic "Use --su-prefix if the application is not debuggable."
+ fi
+ fi
+ # Remove the fingerprint file in case pulling one of the libs fails.
+ rm -f "$PULL_LIBS_DIR/build.fingerprint"
+ SYSTEM_LIBS=$(echo "$MAPPINGS" | \
+ awk '$6 ~ /\/system\/.*\.so$/ { print $6; }' | sort -u)
+ for SYSLIB in /system/bin/linker$SUFFIX_64_BIT $SYSTEM_LIBS; do
+ echo "Pulling from device: $SYSLIB"
+ DST_FILE=$PULL_LIBS_DIR$SYSLIB
+ DST_DIR=$(dirname "$DST_FILE")
+ mkdir -p "$DST_DIR" && "$ADB" pull $SYSLIB "$DST_FILE" 2>/dev/null
+ fail_panic "Could not pull $SYSLIB from device !?"
+ done
+ echo "Writing the device fingerprint"
+ echo "$DEVICE_FINGERPRINT" > "$PULL_LIBS_DIR/build.fingerprint"
+fi
+
+# Pull the app_process binary from the device.
+log "Pulling $GDBEXEC from device"
+"$ADB" pull /system/bin/$GDBEXEC "$TMPDIR"/$GDBEXEC &>/dev/null
+fail_panic "Could not retrieve $GDBEXEC from the device!"
+
+# Find all the sub-directories of $PULL_LIBS_DIR, up to depth 4
+# so we can add them to solib-search-path later.
+SOLIB_DIRS=$(find $PULL_LIBS_DIR -mindepth 1 -maxdepth 4 -type d | \
+ grep -v "^$" | tr '\n' ':')
+SOLIB_DIRS=${SOLIB_DIRS%:} # Strip trailing :
+
+# Applications with minSdkVersion >= 24 will have their data directories
+# created with rwx------ permissions, preventing adbd from forwarding to
+# the gdbserver socket.
+adb_shell $COMMAND_PREFIX chmod a+x $APP_DATA_DIR $COMMAND_SUFFIX
+
+# Push gdbserver to the device
+log "Pushing gdbserver $GDBSERVER to $TARGET_GDBSERVER"
+"$ADB" push $GDBSERVER $TMP_TARGET_GDBSERVER >/dev/null && \
+ adb_shell $COMMAND_PREFIX cp $TMP_TARGET_GDBSERVER $TARGET_GDBSERVER $COMMAND_SUFFIX && \
+ adb_shell rm $TMP_TARGET_GDBSERVER
+fail_panic "Could not copy gdbserver to the device!"
+
+if [ -z "$PORT" ]; then
+ # Random port to allow multiple concurrent sessions.
+ PORT=$(( $RANDOM % 1000 + 5039 ))
+fi
+HOST_PORT=$PORT
+TARGET_DOMAIN_SOCKET=$APP_DATA_DIR/gdb-socket-$HOST_PORT
+
+# Setup network redirection
+log "Setting network redirection (host:$HOST_PORT -> device:$TARGET_DOMAIN_SOCKET)"
+"$ADB" forward tcp:$HOST_PORT localfilesystem:$TARGET_DOMAIN_SOCKET
+fail_panic "Could not setup network redirection from \
+host:localhost:$HOST_PORT to device:$TARGET_DOMAIN_SOCKET"
+
+# Start gdbserver in the background
+# Note that using run-as requires the package to be debuggable.
+#
+# If not, this will fail horribly. The alternative is to run the
+# program as root, which requires of course root privileges.
+# Maybe we should add a --root option to enable this?
+#
+
+for i in 1 2; do
+ log "Starting gdbserver in the background:"
+ GDBSERVER_LOG=$TMPDIR/gdbserver-$TMP_ID.log
+ log "adb shell $COMMAND_PREFIX $TARGET_GDBSERVER \
+ --once +$TARGET_DOMAIN_SOCKET \
+ --attach $PID $COMMAND_SUFFIX"
+ "$ADB" shell $COMMAND_PREFIX $TARGET_GDBSERVER \
+ --once +$TARGET_DOMAIN_SOCKET \
+ --attach $PID $COMMAND_SUFFIX > $GDBSERVER_LOG 2>&1 &
+ GDBSERVER_PID=$!
+ echo "$GDBSERVER_PID" > $GDBSERVER_PIDFILE
+ log "background job pid: $GDBSERVER_PID"
+
+ # Sleep to allow gdbserver to attach to the remote process and be
+ # ready to connect to.
+ log "Sleeping ${ATTACH_DELAY}s to ensure gdbserver is alive"
+ sleep "$ATTACH_DELAY"
+ log "Job control: $(jobs -l)"
+ STATE=$(jobs -l | awk '$2 == "'$GDBSERVER_PID'" { print $3; }')
+ if [ "$STATE" != "Running" ]; then
+ pid_msg=$(grep "is already traced by process" $GDBSERVER_LOG 2>/dev/null)
+ if [[ -n "$pid_msg" ]]; then
+ old_pid=${pid_msg##* }
+ old_pid=${old_pid//[$'\r\n']} # Trim trailing \r.
+ echo "Killing previous gdb server process (pid=$old_pid)"
+ adb_shell $COMMAND_PREFIX kill -9 $old_pid $COMMAND_SUFFIX
+ continue
+ fi
+ echo "ERROR: GDBServer either failed to run or attach to PID $PID!"
+ echo "Here is the output from gdbserver (also try --verbose for more):"
+ echo "===== gdbserver.log start ====="
+ cat $GDBSERVER_LOG
+ echo ="===== gdbserver.log end ======"
+ exit 1
+ fi
+ break
+done
+
+# Generate a file containing useful GDB initialization commands
+readonly COMMANDS=$TMPDIR/gdb.init
+log "Generating GDB initialization commands file: $COMMANDS"
+cat > "$COMMANDS" <<EOF
+set osabi GNU/Linux # Copied from ndk-gdb.py.
+set print pretty 1
+python
+import sys
+sys.path.insert(0, '$CHROMIUM_SRC/tools/gdb/')
+try:
+ import gdb_chrome
+finally:
+ sys.path.pop(0)
+end
+file $TMPDIR/$GDBEXEC
+directory $CHROMIUM_OUTPUT_DIR
+set solib-absolute-prefix $PULL_LIBS_DIR
+set solib-search-path $SOLIB_DIRS:$PULL_LIBS_DIR:$SYMBOL_DIR
+
+python
+# Copied from ndk-gdb.py:
+def target_remote_with_retry(target, timeout_seconds):
+ import time
+ end_time = time.time() + timeout_seconds
+ while True:
+ try:
+ gdb.execute('target remote ' + target)
+ return True
+ except gdb.error as e:
+ time_left = end_time - time.time()
+ if time_left < 0 or time_left > timeout_seconds:
+ print("Error: unable to connect to device.")
+ print(e)
+ return False
+ time.sleep(min(0.25, time_left))
+
+print("Connecting to :$HOST_PORT...")
+if target_remote_with_retry(':$HOST_PORT', 5):
+ print("Attached! Reading symbols (takes ~30 seconds).")
+end
+EOF
+
+if [ "$GDBINIT" ]; then
+ cat "$GDBINIT" >> "$COMMANDS"
+fi
+
+if [ "$VERBOSE" -gt 0 ]; then
+ echo "### START $COMMANDS"
+ cat "$COMMANDS"
+ echo "### END $COMMANDS"
+fi
+
+if [ "$IDE" ]; then
+ mkdir -p "$IDE_DIR"
+ SYM_GDB="$IDE_DIR/gdb"
+ SYM_EXE="$IDE_DIR/app_process"
+ SYM_INIT="$IDE_DIR/gdbinit"
+ ln -sf "$TMPDIR/$GDBEXEC" "$SYM_EXE"
+ ln -sf "$COMMANDS" "$SYM_INIT"
+ # gdb doesn't work when symlinked, so create a wrapper.
+ echo
+ cat > $SYM_GDB <<EOF
+#!/bin/sh
+exec $GDB "\$@"
+EOF
+ chmod u+x $SYM_GDB
+
+ echo "GDB server listening on: localhost:$PORT"
+ echo "GDB wrapper script: $SYM_GDB"
+ echo "App executable: $SYM_EXE"
+ echo "gdbinit: $SYM_INIT"
+ echo "Connect with vscode: https://chromium.googlesource.com/chromium/src/+/master/docs/vscode.md#Launch-Commands"
+ echo "Showing gdbserver logs. Press Ctrl-C to disconnect."
+ tail -f "$GDBSERVER_LOG"
+else
+ log "Launching gdb client: $GDB $GDB_ARGS -x $COMMANDS"
+ echo "Server log: $GDBSERVER_LOG"
+ if [ "$CGDB" ]; then
+ $CGDB -d $GDB -- $GDB_ARGS -x "$COMMANDS"
+ else
+ $GDB $GDB_ARGS -x "$COMMANDS"
+ fi
+fi
diff --git a/deps/v8/build/android/adb_install_apk.py b/deps/v8/build/android/adb_install_apk.py
new file mode 100755
index 0000000000..f17143a2a3
--- /dev/null
+++ b/deps/v8/build/android/adb_install_apk.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility script to install APKs from the command line quickly."""
+
+import argparse
+import glob
+import logging
+import os
+import sys
+
+import devil_chromium
+from devil.android import apk_helper
+from devil.android import device_blacklist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.utils import run_tests_helper
+from pylib import constants
+
+
+def main():
+ parser = argparse.ArgumentParser()
+
+ apk_group = parser.add_mutually_exclusive_group(required=True)
+ apk_group.add_argument('--apk', dest='apk_name',
+ help='DEPRECATED The name of the apk containing the'
+ ' application (with the .apk extension).')
+ apk_group.add_argument('apk_path', nargs='?',
+ help='The path to the APK to install.')
+
+ # TODO(jbudorick): Remove once no clients pass --apk_package
+ parser.add_argument('--apk_package', help='DEPRECATED unused')
+ parser.add_argument('--split',
+ action='append',
+ dest='splits',
+ help='A glob matching the apk splits. '
+ 'Can be specified multiple times.')
+ parser.add_argument('--keep_data',
+ action='store_true',
+ default=False,
+ help='Keep the package data when installing '
+ 'the application.')
+ parser.add_argument('--debug', action='store_const', const='Debug',
+ dest='build_type',
+ default=os.environ.get('BUILDTYPE', 'Debug'),
+ help='If set, run test suites under out/Debug. '
+ 'Default is env var BUILDTYPE or Debug')
+ parser.add_argument('--release', action='store_const', const='Release',
+ dest='build_type',
+ help='If set, run test suites under out/Release. '
+ 'Default is env var BUILDTYPE or Debug.')
+ parser.add_argument('-d', '--device', dest='devices', action='append',
+ default=[],
+ help='Target device for apk to install on. Enter multiple'
+ ' times for multiple devices.')
+ parser.add_argument('--adb-path', type=os.path.abspath,
+ help='Absolute path to the adb binary to use.')
+ parser.add_argument('--blacklist-file', help='Device blacklist JSON file.')
+ parser.add_argument('-v', '--verbose', action='count',
+ help='Enable verbose logging.')
+ parser.add_argument('--downgrade', action='store_true',
+ help='If set, allows downgrading of apk.')
+ parser.add_argument('--timeout', type=int,
+ default=device_utils.DeviceUtils.INSTALL_DEFAULT_TIMEOUT,
+ help='Seconds to wait for APK installation. '
+ '(default: %(default)s)')
+
+ args = parser.parse_args()
+
+ run_tests_helper.SetLogLevel(args.verbose)
+ constants.SetBuildType(args.build_type)
+
+ devil_chromium.Initialize(
+ output_directory=constants.GetOutDirectory(),
+ adb_path=args.adb_path)
+
+ apk = args.apk_path or args.apk_name
+ if not apk.endswith('.apk'):
+ apk += '.apk'
+ if not os.path.exists(apk):
+ apk = os.path.join(constants.GetOutDirectory(), 'apks', apk)
+ if not os.path.exists(apk):
+ parser.error('%s not found.' % apk)
+
+ if args.splits:
+ splits = []
+ base_apk_package = apk_helper.ApkHelper(apk).GetPackageName()
+ for split_glob in args.splits:
+ apks = [f for f in glob.glob(split_glob) if f.endswith('.apk')]
+ if not apks:
+ logging.warning('No apks matched for %s.', split_glob)
+ for f in apks:
+ helper = apk_helper.ApkHelper(f)
+ if (helper.GetPackageName() == base_apk_package
+ and helper.GetSplitName()):
+ splits.append(f)
+
+ blacklist = (device_blacklist.Blacklist(args.blacklist_file)
+ if args.blacklist_file
+ else None)
+ devices = device_utils.DeviceUtils.HealthyDevices(blacklist=blacklist,
+ device_arg=args.devices)
+
+ def blacklisting_install(device):
+ try:
+ if args.splits:
+ device.InstallSplitApk(apk, splits, reinstall=args.keep_data,
+ allow_downgrade=args.downgrade)
+ else:
+ device.Install(apk, reinstall=args.keep_data,
+ allow_downgrade=args.downgrade,
+ timeout=args.timeout)
+ except (device_errors.CommandFailedError,
+ device_errors.DeviceUnreachableError):
+ logging.exception('Failed to install %s', apk)
+ if blacklist:
+ blacklist.Extend([str(device)], reason='install_failure')
+ logging.warning('Blacklisting %s', str(device))
+ except device_errors.CommandTimeoutError:
+ logging.exception('Timed out while installing %s', apk)
+ if blacklist:
+ blacklist.Extend([str(device)], reason='install_timeout')
+ logging.warning('Blacklisting %s', str(device))
+
+ device_utils.DeviceUtils.parallel(devices).pMap(blacklisting_install)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/adb_logcat_monitor.py b/deps/v8/build/android/adb_logcat_monitor.py
new file mode 100755
index 0000000000..d3cc67dbcc
--- /dev/null
+++ b/deps/v8/build/android/adb_logcat_monitor.py
@@ -0,0 +1,156 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Saves logcats from all connected devices.
+
+Usage: adb_logcat_monitor.py <base_dir> [<adb_binary_path>]
+
+This script will repeatedly poll adb for new devices and save logcats
+inside the <base_dir> directory, which it attempts to create. The
+script will run until killed by an external signal. To test, run the
+script in a shell and <Ctrl>-C it after a while. It should be
+resilient across phone disconnects and reconnects and start the logcat
+early enough to not miss anything.
+"""
+
+import logging
+import os
+import re
+import shutil
+import signal
+import subprocess
+import sys
+import time
+
+# Map from device_id -> (process, logcat_num)
+devices = {}
+
+
+class TimeoutException(Exception):
+ """Exception used to signal a timeout."""
+ pass
+
+
+class SigtermError(Exception):
+ """Exception used to catch a sigterm."""
+ pass
+
+
+def StartLogcatIfNecessary(device_id, adb_cmd, base_dir):
+ """Spawns a adb logcat process if one is not currently running."""
+ process, logcat_num = devices[device_id]
+ if process:
+ if process.poll() is None:
+ # Logcat process is still happily running
+ return
+ else:
+ logging.info('Logcat for device %s has died', device_id)
+ error_filter = re.compile('- waiting for device -')
+ for line in process.stderr:
+ if not error_filter.match(line):
+ logging.error(device_id + ': ' + line)
+
+ logging.info('Starting logcat %d for device %s', logcat_num,
+ device_id)
+ logcat_filename = 'logcat_%s_%03d' % (device_id, logcat_num)
+ logcat_file = open(os.path.join(base_dir, logcat_filename), 'w')
+ process = subprocess.Popen([adb_cmd, '-s', device_id,
+ 'logcat', '-v', 'threadtime'],
+ stdout=logcat_file,
+ stderr=subprocess.PIPE)
+ devices[device_id] = (process, logcat_num + 1)
+
+
+def GetAttachedDevices(adb_cmd):
+ """Gets the device list from adb.
+
+ We use an alarm in this function to avoid deadlocking from an external
+ dependency.
+
+ Args:
+ adb_cmd: binary to run adb
+
+ Returns:
+ list of devices or an empty list on timeout
+ """
+ signal.alarm(2)
+ try:
+ out, err = subprocess.Popen([adb_cmd, 'devices'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE).communicate()
+ if err:
+ logging.warning('adb device error %s', err.strip())
+ return re.findall('^(\\S+)\tdevice$', out, re.MULTILINE)
+ except TimeoutException:
+ logging.warning('"adb devices" command timed out')
+ return []
+ except (IOError, OSError):
+ logging.exception('Exception from "adb devices"')
+ return []
+ finally:
+ signal.alarm(0)
+
+
+def main(base_dir, adb_cmd='adb'):
+ """Monitor adb forever. Expects a SIGINT (Ctrl-C) to kill."""
+ # We create the directory to ensure 'run once' semantics
+ if os.path.exists(base_dir):
+ print 'adb_logcat_monitor: %s already exists? Cleaning' % base_dir
+ shutil.rmtree(base_dir, ignore_errors=True)
+
+ os.makedirs(base_dir)
+ logging.basicConfig(filename=os.path.join(base_dir, 'eventlog'),
+ level=logging.INFO,
+ format='%(asctime)-2s %(levelname)-8s %(message)s')
+
+ # Set up the alarm for calling 'adb devices'. This is to ensure
+ # our script doesn't get stuck waiting for a process response
+ def TimeoutHandler(_signum, _unused_frame):
+ raise TimeoutException()
+ signal.signal(signal.SIGALRM, TimeoutHandler)
+
+ # Handle SIGTERMs to ensure clean shutdown
+ def SigtermHandler(_signum, _unused_frame):
+ raise SigtermError()
+ signal.signal(signal.SIGTERM, SigtermHandler)
+
+ logging.info('Started with pid %d', os.getpid())
+ pid_file_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+
+ try:
+ with open(pid_file_path, 'w') as f:
+ f.write(str(os.getpid()))
+ while True:
+ for device_id in GetAttachedDevices(adb_cmd):
+ if not device_id in devices:
+ subprocess.call([adb_cmd, '-s', device_id, 'logcat', '-c'])
+ devices[device_id] = (None, 0)
+
+ for device in devices:
+ # This will spawn logcat watchers for any device ever detected
+ StartLogcatIfNecessary(device, adb_cmd, base_dir)
+
+ time.sleep(5)
+ except SigtermError:
+ logging.info('Received SIGTERM, shutting down')
+ except: # pylint: disable=bare-except
+ logging.exception('Unexpected exception in main.')
+ finally:
+ for process, _ in devices.itervalues():
+ if process:
+ try:
+ process.terminate()
+ except OSError:
+ pass
+ os.remove(pid_file_path)
+
+
+if __name__ == '__main__':
+ if 2 <= len(sys.argv) <= 3:
+ print 'adb_logcat_monitor: Initializing'
+ sys.exit(main(*sys.argv[1:3]))
+
+ print 'Usage: %s <base_dir> [<adb_binary_path>]' % sys.argv[0]
diff --git a/deps/v8/build/android/adb_logcat_printer.py b/deps/v8/build/android/adb_logcat_printer.py
new file mode 100755
index 0000000000..a715170759
--- /dev/null
+++ b/deps/v8/build/android/adb_logcat_printer.py
@@ -0,0 +1,222 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shutdown adb_logcat_monitor and print accumulated logs.
+
+To test, call './adb_logcat_printer.py <base_dir>' where
+<base_dir> contains 'adb logcat -v threadtime' files named as
+logcat_<deviceID>_<sequenceNum>
+
+The script will print the files to out, and will combine multiple
+logcats from a single device if there is overlap.
+
+Additionally, if a <base_dir>/LOGCAT_MONITOR_PID exists, the script
+will attempt to terminate the contained PID by sending a SIGINT and
+monitoring for the deletion of the aforementioned file.
+"""
+# pylint: disable=W0702
+
+import cStringIO
+import logging
+import optparse
+import os
+import re
+import signal
+import sys
+import time
+
+
+# Set this to debug for more verbose output
+LOG_LEVEL = logging.INFO
+
+
+def CombineLogFiles(list_of_lists, logger):
+ """Splices together multiple logcats from the same device.
+
+ Args:
+ list_of_lists: list of pairs (filename, list of timestamped lines)
+ logger: handler to log events
+
+ Returns:
+ list of lines with duplicates removed
+ """
+ cur_device_log = ['']
+ for cur_file, cur_file_lines in list_of_lists:
+ # Ignore files with just the logcat header
+ if len(cur_file_lines) < 2:
+ continue
+ common_index = 0
+ # Skip this step if list just has empty string
+ if len(cur_device_log) > 1:
+ try:
+ line = cur_device_log[-1]
+ # Used to make sure we only splice on a timestamped line
+ if re.match(r'^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line):
+ common_index = cur_file_lines.index(line)
+ else:
+ logger.warning('splice error - no timestamp in "%s"?', line.strip())
+ except ValueError:
+ # The last line was valid but wasn't found in the next file
+ cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****']
+ logger.info('Unable to splice %s. Incomplete logcat?', cur_file)
+
+ cur_device_log += ['*'*30 + ' %s' % cur_file]
+ cur_device_log.extend(cur_file_lines[common_index:])
+
+ return cur_device_log
+
+
+def FindLogFiles(base_dir):
+ """Search a directory for logcat files.
+
+ Args:
+ base_dir: directory to search
+
+ Returns:
+ Mapping of device_id to a sorted list of file paths for a given device
+ """
+ logcat_filter = re.compile(r'^logcat_(\S+)_(\d+)$')
+ # list of tuples (<device_id>, <seq num>, <full file path>)
+ filtered_list = []
+ for cur_file in os.listdir(base_dir):
+ matcher = logcat_filter.match(cur_file)
+ if matcher:
+ filtered_list += [(matcher.group(1), int(matcher.group(2)),
+ os.path.join(base_dir, cur_file))]
+ filtered_list.sort()
+ file_map = {}
+ for device_id, _, cur_file in filtered_list:
+ if device_id not in file_map:
+ file_map[device_id] = []
+
+ file_map[device_id] += [cur_file]
+ return file_map
+
+
+def GetDeviceLogs(log_filenames, logger):
+ """Read log files, combine and format.
+
+ Args:
+ log_filenames: mapping of device_id to sorted list of file paths
+ logger: logger handle for logging events
+
+ Returns:
+ list of formatted device logs, one for each device.
+ """
+ device_logs = []
+
+ for device, device_files in log_filenames.iteritems():
+ logger.debug('%s: %s', device, str(device_files))
+ device_file_lines = []
+ for cur_file in device_files:
+ with open(cur_file) as f:
+ device_file_lines += [(cur_file, f.read().splitlines())]
+ combined_lines = CombineLogFiles(device_file_lines, logger)
+ # Prepend each line with a short unique ID so it's easy to see
+ # when the device changes. We don't use the start of the device
+ # ID because it can be the same among devices. Example lines:
+ # AB324: foo
+ # AB324: blah
+ device_logs += [('\n' + device[-5:] + ': ').join(combined_lines)]
+ return device_logs
+
+
+def ShutdownLogcatMonitor(base_dir, logger):
+ """Attempts to shutdown adb_logcat_monitor and blocks while waiting."""
+ try:
+ monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+ with open(monitor_pid_path) as f:
+ monitor_pid = int(f.readline())
+
+ logger.info('Sending SIGTERM to %d', monitor_pid)
+ os.kill(monitor_pid, signal.SIGTERM)
+ i = 0
+ while True:
+ time.sleep(.2)
+ if not os.path.exists(monitor_pid_path):
+ return
+ if not os.path.exists('/proc/%d' % monitor_pid):
+ logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid)
+ return
+ logger.info('Waiting for logcat process to terminate.')
+ i += 1
+ if i >= 10:
+ logger.warning('Monitor pid did not terminate. Continuing anyway.')
+ return
+
+ except (ValueError, IOError, OSError):
+ logger.exception('Error signaling logcat monitor - continuing')
+
+
+def main(argv):
+ parser = optparse.OptionParser(usage='Usage: %prog [options] <log dir>')
+ parser.add_option('--output-path',
+ help='Output file path (if unspecified, prints to stdout)')
+ options, args = parser.parse_args(argv)
+ if len(args) != 1:
+ parser.error('Wrong number of unparsed args')
+ base_dir = args[0]
+
+ log_stringio = cStringIO.StringIO()
+ logger = logging.getLogger('LogcatPrinter')
+ logger.setLevel(LOG_LEVEL)
+ sh = logging.StreamHandler(log_stringio)
+ sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s'
+ ' %(message)s'))
+ logger.addHandler(sh)
+
+ if options.output_path:
+ if not os.path.exists(os.path.dirname(options.output_path)):
+ logger.warning('Output dir %s doesn\'t exist. Creating it.',
+ os.path.dirname(options.output_path))
+ os.makedirs(os.path.dirname(options.output_path))
+ output_file = open(options.output_path, 'w')
+ logger.info('Dumping logcat to local file %s. If running in a build, '
+ 'this file will likely will be uploaded to google storage '
+ 'in a later step. It can be downloaded from there.',
+ options.output_path)
+ else:
+ output_file = sys.stdout
+
+ try:
+ # Wait at least 5 seconds after base_dir is created before printing.
+ #
+ # The idea is that 'adb logcat > file' output consists of 2 phases:
+ # 1 Dump all the saved logs to the file
+ # 2 Stream log messages as they are generated
+ #
+ # We want to give enough time for phase 1 to complete. There's no
+ # good method to tell how long to wait, but it usually only takes a
+ # second. On most bots, this code path won't occur at all, since
+ # adb_logcat_monitor.py command will have spawned more than 5 seconds
+ # prior to called this shell script.
+ try:
+ sleep_time = 5 - (time.time() - os.path.getctime(base_dir))
+ except OSError:
+ sleep_time = 5
+ if sleep_time > 0:
+ logger.warning('Monitor just started? Sleeping %.1fs', sleep_time)
+ time.sleep(sleep_time)
+
+ assert os.path.exists(base_dir), '%s does not exist' % base_dir
+ ShutdownLogcatMonitor(base_dir, logger)
+ separator = '\n' + '*' * 80 + '\n\n'
+ for log in GetDeviceLogs(FindLogFiles(base_dir), logger):
+ output_file.write(log)
+ output_file.write(separator)
+ with open(os.path.join(base_dir, 'eventlog')) as f:
+ output_file.write('\nLogcat Monitor Event Log\n')
+ output_file.write(f.read())
+ except:
+ logger.exception('Unexpected exception')
+
+ logger.info('Done.')
+ sh.flush()
+ output_file.write('\nLogcat Printer Event Log\n')
+ output_file.write(log_stringio.getvalue())
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/adb_profile_chrome b/deps/v8/build/android/adb_profile_chrome
new file mode 100755
index 0000000000..d3244ffdf6
--- /dev/null
+++ b/deps/v8/build/android/adb_profile_chrome
@@ -0,0 +1,9 @@
+#!/bin/bash
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Start / stop profiling in chrome.
+CATAPULT_DIR="$(dirname "$0")"/../../third_party/catapult
+exec "${CATAPULT_DIR}"/systrace/bin/adb_profile_chrome "$@"
diff --git a/deps/v8/build/android/adb_profile_chrome_startup b/deps/v8/build/android/adb_profile_chrome_startup
new file mode 100755
index 0000000000..d5836cdf70
--- /dev/null
+++ b/deps/v8/build/android/adb_profile_chrome_startup
@@ -0,0 +1,9 @@
+#!/bin/bash
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Start / stop profiling for chrome startup.
+CATAPULT_DIR="$(dirname "$0")"/../../third_party/catapult
+exec "${CATAPULT_DIR}"/systrace/bin/adb_profile_chrome_startup "$@"
diff --git a/deps/v8/build/android/adb_reverse_forwarder.py b/deps/v8/build/android/adb_reverse_forwarder.py
new file mode 100755
index 0000000000..6edb43ae5b
--- /dev/null
+++ b/deps/v8/build/android/adb_reverse_forwarder.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Command line tool for forwarding ports from a device to the host.
+
+Allows an Android device to connect to services running on the host machine,
+i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder|
+to be built.
+"""
+
+import argparse
+import sys
+import time
+
+import devil_chromium
+
+from devil.android import device_blacklist
+from devil.android import device_utils
+from devil.android import forwarder
+from devil.utils import run_tests_helper
+
+from pylib import constants
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(
+ usage='Usage: %(prog)s [options] device_port '
+ 'host_port [device_port_2 host_port_2] ...',
+ description=__doc__)
+ parser.add_argument(
+ '-v', '--verbose',
+ dest='verbose_count',
+ default=0,
+ action='count',
+ help='Verbose level (multiple times for more)')
+ parser.add_argument(
+ '--device',
+ help='Serial number of device we should use.')
+ parser.add_argument(
+ '--blacklist-file',
+ help='Device blacklist JSON file.')
+ parser.add_argument(
+ '--debug',
+ action='store_const',
+ const='Debug',
+ dest='build_type',
+ default='Release',
+ help='DEPRECATED: use --output-directory instead.')
+ parser.add_argument(
+ '--output-directory',
+ help='Path to the root build directory.')
+ parser.add_argument(
+ 'ports',
+ nargs='+',
+ type=int,
+ help='Port pair to reverse forward.')
+
+ args = parser.parse_args(argv)
+ run_tests_helper.SetLogLevel(args.verbose_count)
+
+ if len(args.ports) < 2 or len(args.ports) % 2:
+ parser.error('Need even number of port pairs')
+
+ port_pairs = zip(args.ports[::2], args.ports[1::2])
+
+ if args.build_type:
+ constants.SetBuildType(args.build_type)
+ if args.output_directory:
+ constants.SetOutputDirectory(args.output_directory)
+ devil_chromium.Initialize(output_directory=constants.GetOutDirectory())
+
+ blacklist = (device_blacklist.Blacklist(args.blacklist_file)
+ if args.blacklist_file
+ else None)
+ device = device_utils.DeviceUtils.HealthyDevices(
+ blacklist=blacklist, device_arg=args.device)[0]
+ try:
+ forwarder.Forwarder.Map(port_pairs, device)
+ while True:
+ time.sleep(60)
+ except KeyboardInterrupt:
+ sys.exit(0)
+ finally:
+ forwarder.Forwarder.UnmapAllDevicePorts(device)
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/adb_system_webview_command_line b/deps/v8/build/android/adb_system_webview_command_line
new file mode 100755
index 0000000000..a0d2705821
--- /dev/null
+++ b/deps/v8/build/android/adb_system_webview_command_line
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current content shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the content shell
+# flags. For example:
+# adb_system_webview_command_line --enable-webgl
+#
+# To remove all content shell flags, pass an empty string for the flags:
+# adb_system_webview_command_line ""
+
+exec $(dirname $0)/adb_command_line.py --name webview-command-line "$@"
diff --git a/deps/v8/build/android/android_only_explicit_jni_exports.lst b/deps/v8/build/android/android_only_explicit_jni_exports.lst
new file mode 100644
index 0000000000..f989691865
--- /dev/null
+++ b/deps/v8/build/android/android_only_explicit_jni_exports.lst
@@ -0,0 +1,13 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Linker script that exports only JNI_OnLoad.
+# Should be used for libraries that do explicit JNI registration.
+
+{
+ global:
+ JNI_OnLoad;
+ local:
+ *;
+};
diff --git a/deps/v8/build/android/android_only_jni_exports.lst b/deps/v8/build/android/android_only_jni_exports.lst
new file mode 100644
index 0000000000..1336fee145
--- /dev/null
+++ b/deps/v8/build/android/android_only_jni_exports.lst
@@ -0,0 +1,13 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Linker script that exports only symbols required for JNI to work.
+
+{
+ global:
+ JNI_OnLoad;
+ Java_*;
+ local:
+ *;
+};
diff --git a/deps/v8/build/android/apk_operations.py b/deps/v8/build/android/apk_operations.py
new file mode 100755
index 0000000000..91f6851c73
--- /dev/null
+++ b/deps/v8/build/android/apk_operations.py
@@ -0,0 +1,1678 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Using colorama.Fore/Back/Style members
+# pylint: disable=no-member
+
+import argparse
+import collections
+import json
+import logging
+import os
+import pipes
+import posixpath
+import random
+import re
+import shlex
+import shutil
+import sys
+import tempfile
+import textwrap
+
+import devil_chromium
+from devil import devil_env
+from devil.android import apk_helper
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android import flag_changer
+from devil.android.sdk import adb_wrapper
+from devil.android.sdk import intent
+from devil.android.sdk import version_codes
+from devil.utils import run_tests_helper
+
+with devil_env.SysPath(os.path.join(os.path.dirname(__file__), '..', '..',
+ 'third_party', 'colorama', 'src')):
+ import colorama
+
+from incremental_install import installer
+from pylib import constants
+from pylib.symbols import deobfuscator
+from pylib.utils import simpleperf
+from pylib.utils import app_bundle_utils
+
+with devil_env.SysPath(os.path.join(os.path.dirname(__file__), '..', '..',
+ 'build', 'android', 'gyp')):
+ import bundletool
+
+# Matches messages only on pre-L (Dalvik) that are spammy and unimportant.
+_DALVIK_IGNORE_PATTERN = re.compile('|'.join([
+ r'^Added shared lib',
+ r'^Could not find ',
+ r'^DexOpt:',
+ r'^GC_',
+ r'^Late-enabling CheckJNI',
+ r'^Link of class',
+ r'^No JNI_OnLoad found in',
+ r'^Trying to load lib',
+ r'^Unable to resolve superclass',
+ r'^VFY:',
+ r'^WAIT_',
+ ]))
+
+BASE_MODULE = 'base'
+
+
+def _Colorize(text, style=''):
+ return (style
+ + text
+ + colorama.Style.RESET_ALL)
+
+
+def _InstallApk(devices, apk, install_dict):
+ def install(device):
+ if install_dict:
+ installer.Install(device, install_dict, apk=apk)
+ else:
+ device.Install(apk, allow_downgrade=True, reinstall=True)
+
+ logging.info('Installing %sincremental apk.', '' if install_dict else 'non-')
+ device_utils.DeviceUtils.parallel(devices).pMap(install)
+
+
+# A named tuple containing the information needed to convert a bundle into
+# an installable .apks archive.
+# Fields:
+# bundle_path: Path to input bundle file.
+# bundle_apk_path: Path to output bundle .apks archive file.
+# aapt2_path: Path to aapt2 tool.
+# keystore_path: Path to keystore file.
+# keystore_password: Password for the keystore file.
+# keystore_alias: Signing key name alias within the keystore file.
+# system_image_locales: List of Chromium locales to include in system .apks.
+BundleGenerationInfo = collections.namedtuple(
+ 'BundleGenerationInfo',
+ 'bundle_path,bundle_apks_path,aapt2_path,keystore_path,keystore_password,'
+ 'keystore_alias,system_image_locales')
+
+
+def _GenerateBundleApks(info,
+ output_path,
+ minimal=False,
+ minimal_sdk_version=None,
+ mode=None):
+ """Generate an .apks archive from a bundle on demand.
+
+ Args:
+ info: A BundleGenerationInfo instance.
+ output_path: Path of output .apks archive.
+ minimal: Create the minimal set of apks possible (english-only).
+ minimal_sdk_version: When minimal=True, use this sdkVersion.
+ mode: Build mode, either None, or one of app_bundle_utils.BUILD_APKS_MODES.
+ """
+ app_bundle_utils.GenerateBundleApks(
+ info.bundle_path,
+ output_path,
+ info.aapt2_path,
+ info.keystore_path,
+ info.keystore_password,
+ info.keystore_alias,
+ system_image_locales=info.system_image_locales,
+ mode=mode,
+ minimal=minimal,
+ minimal_sdk_version=minimal_sdk_version)
+
+
+def _InstallBundle(devices, bundle_apks, package_name, command_line_flags_file,
+ modules, fake_modules):
+ # Path to push fake modules for Chrome to pick up.
+ MODULES_SRC_DIRECTORY_PATH = '/data/local/tmp/modules'
+ # Path Chrome creates after validating fake modules. This needs to be cleared
+ # for pushed fake modules to be picked up.
+ SPLITCOMPAT_PATH = '/data/data/' + package_name + '/files/splitcompat'
+ # Chrome command line flag needed for fake modules to work.
+ FAKE_FEATURE_MODULE_INSTALL = '--fake-feature-module-install'
+
+ def ShouldWarnFakeFeatureModuleInstallFlag(device):
+ if command_line_flags_file:
+ changer = flag_changer.FlagChanger(device, command_line_flags_file)
+ return FAKE_FEATURE_MODULE_INSTALL not in changer.GetCurrentFlags()
+ return False
+
+ def ClearFakeModules(device):
+ if device.PathExists(SPLITCOMPAT_PATH, as_root=True):
+ device.RemovePath(
+ SPLITCOMPAT_PATH, force=True, recursive=True, as_root=True)
+ logging.info('Removed %s', SPLITCOMPAT_PATH)
+ else:
+ logging.info('Skipped removing nonexistent %s', SPLITCOMPAT_PATH)
+
+ def InstallFakeModules(device):
+ try:
+ temp_path = tempfile.mkdtemp()
+
+ if not fake_modules:
+ # Push empty temp_path to clear folder on device and update the cache.
+ device.PushChangedFiles([(temp_path, MODULES_SRC_DIRECTORY_PATH)],
+ delete_device_stale=True)
+ return
+
+ # Device-spec JSON is needed, so create that first.
+ device_spec_filename = os.path.join(temp_path, 'device_spec.json')
+ get_device_spec_cmd_args = [
+ 'get-device-spec', '--adb=' + adb_wrapper.AdbWrapper.GetAdbPath(),
+ '--device-id=' + device.serial, '--output=' + device_spec_filename
+ ]
+ bundletool.RunBundleTool(get_device_spec_cmd_args)
+
+ # Extract fake modules to temp directory. For now, installation
+ # requires running 'bundletool extract-apks'. Unfortunately, this leads
+ # to unneeded compression of module files.
+ extract_apks_cmd_args = [
+ 'extract-apks', '--apks=' + bundle_apks,
+ '--device-spec=' + device_spec_filename,
+ '--modules=' + ','.join(fake_modules), '--output-dir=' + temp_path
+ ]
+ bundletool.RunBundleTool(extract_apks_cmd_args)
+
+ # Push fake modules, with renames.
+ fake_module_apks = set()
+ for fake_module in fake_modules:
+ found_master = False
+
+ for filename in os.listdir(temp_path):
+ # If file matches expected format, rename it to follow conventions
+ # required by splitcompatting.
+ match = re.match(r'%s-([a-z_0-9]+)\.apk' % fake_module, filename)
+ local_path = os.path.join(temp_path, filename)
+
+ if not match:
+ continue
+
+ module_suffix = match.group(1)
+ remote = os.path.join(
+ temp_path, '%s.config.%s.apk' % (fake_module, module_suffix))
+ # Check if filename matches a master apk.
+ if 'master' in module_suffix:
+ if found_master:
+ raise Exception('Expect 1 master apk file for %s' % fake_module)
+ found_master = True
+ remote = os.path.join(temp_path, '%s.apk' % fake_module)
+
+ os.rename(local_path, remote)
+ fake_module_apks.add(os.path.basename(remote))
+
+ # Files that weren't renamed should not be pushed, remove from temp_path.
+ for filename in os.listdir(temp_path):
+ if filename not in fake_module_apks:
+ os.remove(os.path.join(temp_path, filename))
+
+ device.PushChangedFiles([(temp_path, MODULES_SRC_DIRECTORY_PATH)],
+ delete_device_stale=True)
+
+ finally:
+ shutil.rmtree(temp_path, ignore_errors=True)
+
+ def Install(device):
+ ClearFakeModules(device)
+ if fake_modules:
+ # Print warning if command line is not set up for fake modules.
+ if ShouldWarnFakeFeatureModuleInstallFlag(device):
+ msg = ('Command line has no %s: Fake modules will be ignored.' %
+ FAKE_FEATURE_MODULE_INSTALL)
+ print _Colorize(msg, colorama.Fore.YELLOW + colorama.Style.BRIGHT)
+
+ InstallFakeModules(device)
+
+ # NOTE: For now, installation requires running 'bundletool install-apks'.
+ # TODO(digit): Add proper support for bundles to devil instead, then use it.
+ install_cmd_args = [
+ 'install-apks', '--apks=' + bundle_apks,
+ '--adb=' + adb_wrapper.AdbWrapper.GetAdbPath(),
+ '--device-id=' + device.serial
+ ]
+ if modules:
+ install_cmd_args += ['--modules=' + ','.join(modules)]
+ bundletool.RunBundleTool(install_cmd_args)
+
+ # Basic checks for |modules| and |fake_modules|.
+ # * |fake_modules| cannot include 'base'.
+ # * If |fake_modules| is given, ensure |modules| includes 'base'.
+ # * They must be disjoint.
+ modules_set = set(modules) if modules else set()
+ fake_modules_set = set(fake_modules) if fake_modules else set()
+ if BASE_MODULE in fake_modules_set:
+ raise Exception('\'-f {}\' is disallowed.'.format(BASE_MODULE))
+ if fake_modules_set and BASE_MODULE not in modules_set:
+ raise Exception(
+ '\'-f FAKE\' must be accompanied by \'-m {}\''.format(BASE_MODULE))
+ if fake_modules_set.intersection(modules_set):
+ raise Exception('\'-m\' and \'-f\' entries must be disjoint.')
+
+ logging.info('Installing bundle.')
+ device_utils.DeviceUtils.parallel(devices).pMap(Install)
+
+
+def _UninstallApk(devices, install_dict, package_name):
+ def uninstall(device):
+ if install_dict:
+ installer.Uninstall(device, package_name)
+ else:
+ device.Uninstall(package_name)
+ device_utils.DeviceUtils.parallel(devices).pMap(uninstall)
+
+
+def _IsWebViewProvider(apk_helper_instance):
+ meta_data = apk_helper_instance.GetAllMetadata()
+ meta_data_keys = [pair[0] for pair in meta_data]
+ return 'com.android.webview.WebViewLibrary' in meta_data_keys
+
+
+def _SetWebViewProvider(devices, package_name):
+
+ def switch_provider(device):
+ if device.build_version_sdk < version_codes.NOUGAT:
+ logging.error('No need to switch provider on pre-Nougat devices (%s)',
+ device.serial)
+ else:
+ device.SetWebViewImplementation(package_name)
+
+ device_utils.DeviceUtils.parallel(devices).pMap(switch_provider)
+
+
+def _NormalizeProcessName(debug_process_name, package_name):
+ if not debug_process_name:
+ debug_process_name = package_name
+ elif debug_process_name.startswith(':'):
+ debug_process_name = package_name + debug_process_name
+ elif '.' not in debug_process_name:
+ debug_process_name = package_name + ':' + debug_process_name
+ return debug_process_name
+
+
+def _LaunchUrl(devices, package_name, argv=None, command_line_flags_file=None,
+ url=None, apk=None, wait_for_java_debugger=False,
+ debug_process_name=None, nokill=None):
+ if argv and command_line_flags_file is None:
+ raise Exception('This apk does not support any flags.')
+ if url:
+ # TODO(agrieve): Launch could be changed to require only package name by
+ # parsing "dumpsys package" rather than relying on the apk.
+ if not apk:
+ raise Exception('Launching with URL is not supported when using '
+ '--package-name. Use --apk-path instead.')
+ view_activity = apk.GetViewActivityName()
+ if not view_activity:
+ raise Exception('APK does not support launching with URLs.')
+
+ debug_process_name = _NormalizeProcessName(debug_process_name, package_name)
+
+ def launch(device):
+ # --persistent is required to have Settings.Global.DEBUG_APP be set, which
+ # we currently use to allow reading of flags. https://crbug.com/784947
+ if not nokill:
+ cmd = ['am', 'set-debug-app', '--persistent', debug_process_name]
+ if wait_for_java_debugger:
+ cmd[-1:-1] = ['-w']
+ # Ignore error since it will fail if apk is not debuggable.
+ device.RunShellCommand(cmd, check_return=False)
+
+ # The flags are first updated with input args.
+ if command_line_flags_file:
+ changer = flag_changer.FlagChanger(device, command_line_flags_file)
+ flags = []
+ if argv:
+ flags = shlex.split(argv)
+ try:
+ changer.ReplaceFlags(flags)
+ except device_errors.AdbShellCommandFailedError:
+ logging.exception('Failed to set flags')
+
+ if url is None:
+ # Simulate app icon click if no url is present.
+ cmd = [
+ 'am', 'start', '-p', package_name, '-c',
+ 'android.intent.category.LAUNCHER', '-a', 'android.intent.action.MAIN'
+ ]
+ device.RunShellCommand(cmd, check_return=True)
+ else:
+ launch_intent = intent.Intent(action='android.intent.action.VIEW',
+ activity=view_activity, data=url,
+ package=package_name)
+ device.StartActivity(launch_intent)
+ device_utils.DeviceUtils.parallel(devices).pMap(launch)
+ if wait_for_java_debugger:
+ print ('Waiting for debugger to attach to process: ' +
+ _Colorize(debug_process_name, colorama.Fore.YELLOW))
+
+
+def _ChangeFlags(devices, argv, command_line_flags_file):
+ if argv is None:
+ _DisplayArgs(devices, command_line_flags_file)
+ else:
+ flags = shlex.split(argv)
+ def update(device):
+ changer = flag_changer.FlagChanger(device, command_line_flags_file)
+ changer.ReplaceFlags(flags)
+ device_utils.DeviceUtils.parallel(devices).pMap(update)
+
+
+def _TargetCpuToTargetArch(target_cpu):
+ if target_cpu == 'x64':
+ return 'x86_64'
+ if target_cpu == 'mipsel':
+ return 'mips'
+ return target_cpu
+
+
+def _RunGdb(device, package_name, debug_process_name, pid, output_directory,
+ target_cpu, port, ide, verbose):
+ if not pid:
+ debug_process_name = _NormalizeProcessName(debug_process_name, package_name)
+ pid = device.GetApplicationPids(debug_process_name, at_most_one=True)
+ if not pid:
+ # Attaching gdb makes the app run so slow that it takes *minutes* to start
+ # up (as of 2018). Better to just fail than to start & attach.
+ raise Exception('App not running.')
+
+ gdb_script_path = os.path.dirname(__file__) + '/adb_gdb'
+ cmd = [
+ gdb_script_path,
+ '--package-name=%s' % package_name,
+ '--output-directory=%s' % output_directory,
+ '--adb=%s' % adb_wrapper.AdbWrapper.GetAdbPath(),
+ '--device=%s' % device.serial,
+ '--pid=%s' % pid,
+ '--port=%d' % port,
+ ]
+ if ide:
+ cmd.append('--ide')
+ # Enable verbose output of adb_gdb if it's set for this script.
+ if verbose:
+ cmd.append('--verbose')
+ if target_cpu:
+ cmd.append('--target-arch=%s' % _TargetCpuToTargetArch(target_cpu))
+ logging.warning('Running: %s', ' '.join(pipes.quote(x) for x in cmd))
+ print _Colorize(
+ 'All subsequent output is from adb_gdb script.', colorama.Fore.YELLOW)
+ os.execv(gdb_script_path, cmd)
+
+
+def _PrintPerDeviceOutput(devices, results, single_line=False):
+ for d, result in zip(devices, results):
+ if not single_line and d is not devices[0]:
+ sys.stdout.write('\n')
+ sys.stdout.write(
+ _Colorize('{} ({}):'.format(d, d.build_description),
+ colorama.Fore.YELLOW))
+ sys.stdout.write(' ' if single_line else '\n')
+ yield result
+
+
+def _RunMemUsage(devices, package_name, query_app=False):
+ cmd_args = ['dumpsys', 'meminfo']
+ if not query_app:
+ cmd_args.append('--local')
+
+ def mem_usage_helper(d):
+ ret = []
+ for process in sorted(_GetPackageProcesses(d, package_name)):
+ meminfo = d.RunShellCommand(cmd_args + [str(process.pid)])
+ ret.append((process.name, '\n'.join(meminfo)))
+ return ret
+
+ parallel_devices = device_utils.DeviceUtils.parallel(devices)
+ all_results = parallel_devices.pMap(mem_usage_helper).pGet(None)
+ for result in _PrintPerDeviceOutput(devices, all_results):
+ if not result:
+ print 'No processes found.'
+ else:
+ for name, usage in sorted(result):
+ print _Colorize(
+ '==== Output of "dumpsys meminfo %s" ====' % name,
+ colorama.Fore.GREEN)
+ print usage
+
+
+def _DuHelper(device, path_spec, run_as=None):
+ """Runs "du -s -k |path_spec|" on |device| and returns parsed result.
+
+ Args:
+ device: A DeviceUtils instance.
+ path_spec: The list of paths to run du on. May contain shell expansions
+ (will not be escaped).
+ run_as: Package name to run as, or None to run as shell user. If not None
+ and app is not android:debuggable (run-as fails), then command will be
+ run as root.
+
+ Returns:
+ A dict of path->size in KiB containing all paths in |path_spec| that exist
+ on device. Paths that do not exist are silently ignored.
+ """
+ # Example output for: du -s -k /data/data/org.chromium.chrome/{*,.*}
+ # 144 /data/data/org.chromium.chrome/cache
+ # 8 /data/data/org.chromium.chrome/files
+ # <snip>
+ # du: .*: No such file or directory
+
+ # The -d flag works differently across android version, so use -s instead.
+ # Without the explicit 2>&1, stderr and stdout get combined at random :(.
+ cmd_str = 'du -s -k ' + path_spec + ' 2>&1'
+ lines = device.RunShellCommand(cmd_str, run_as=run_as, shell=True,
+ check_return=False)
+ output = '\n'.join(lines)
+ # run-as: Package 'com.android.chrome' is not debuggable
+ if output.startswith('run-as:'):
+ # check_return=False needed for when some paths in path_spec do not exist.
+ lines = device.RunShellCommand(cmd_str, as_root=True, shell=True,
+ check_return=False)
+ ret = {}
+ try:
+ for line in lines:
+ # du: .*: No such file or directory
+ if line.startswith('du:'):
+ continue
+ size, subpath = line.split(None, 1)
+ ret[subpath] = int(size)
+ return ret
+ except ValueError:
+ logging.error('du command was: %s', cmd_str)
+ logging.error('Failed to parse du output:\n%s', output)
+ raise
+
+
+def _RunDiskUsage(devices, package_name):
+ # Measuring dex size is a bit complicated:
+ # https://source.android.com/devices/tech/dalvik/jit-compiler
+ #
+ # For KitKat and below:
+ # dumpsys package contains:
+ # dataDir=/data/data/org.chromium.chrome
+ # codePath=/data/app/org.chromium.chrome-1.apk
+ # resourcePath=/data/app/org.chromium.chrome-1.apk
+ # nativeLibraryPath=/data/app-lib/org.chromium.chrome-1
+ # To measure odex:
+ # ls -l /data/dalvik-cache/data@app@org.chromium.chrome-1.apk@classes.dex
+ #
+ # For Android L and M (and maybe for N+ system apps):
+ # dumpsys package contains:
+ # codePath=/data/app/org.chromium.chrome-1
+ # resourcePath=/data/app/org.chromium.chrome-1
+ # legacyNativeLibraryDir=/data/app/org.chromium.chrome-1/lib
+ # To measure odex:
+ # # Option 1:
+ # /data/dalvik-cache/arm/data@app@org.chromium.chrome-1@base.apk@classes.dex
+ # /data/dalvik-cache/arm/data@app@org.chromium.chrome-1@base.apk@classes.vdex
+ # ls -l /data/dalvik-cache/profiles/org.chromium.chrome
+ # (these profiles all appear to be 0 bytes)
+ # # Option 2:
+ # ls -l /data/app/org.chromium.chrome-1/oat/arm/base.odex
+ #
+ # For Android N+:
+ # dumpsys package contains:
+ # dataDir=/data/user/0/org.chromium.chrome
+ # codePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==
+ # resourcePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==
+ # legacyNativeLibraryDir=/data/app/org.chromium.chrome-GUID/lib
+ # Instruction Set: arm
+ # path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk
+ # status: /data/.../oat/arm/base.odex[status=kOatUpToDate, compilation_f
+ # ilter=quicken]
+ # Instruction Set: arm64
+ # path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk
+ # status: /data/.../oat/arm64/base.odex[status=..., compilation_filter=q
+ # uicken]
+ # To measure odex:
+ # ls -l /data/app/.../oat/arm/base.odex
+ # ls -l /data/app/.../oat/arm/base.vdex (optional)
+ # To measure the correct odex size:
+ # cmd package compile -m speed org.chromium.chrome # For webview
+ # cmd package compile -m speed-profile org.chromium.chrome # For others
+ def disk_usage_helper(d):
+ package_output = '\n'.join(d.RunShellCommand(
+ ['dumpsys', 'package', package_name], check_return=True))
+ # Does not return error when apk is not installed.
+ if not package_output or 'Unable to find package:' in package_output:
+ return None
+
+ # Ignore system apks that have updates installed.
+ package_output = re.sub(r'Hidden system packages:.*?^\b', '',
+ package_output, flags=re.S | re.M)
+
+ try:
+ data_dir = re.search(r'dataDir=(.*)', package_output).group(1)
+ code_path = re.search(r'codePath=(.*)', package_output).group(1)
+ lib_path = re.search(r'(?:legacyN|n)ativeLibrary(?:Dir|Path)=(.*)',
+ package_output).group(1)
+ except AttributeError:
+ raise Exception('Error parsing dumpsys output: ' + package_output)
+
+ if code_path.startswith('/system'):
+ logging.warning('Measurement of system image apks can be innacurate')
+
+ compilation_filters = set()
+ # Match "compilation_filter=value", where a line break can occur at any spot
+ # (refer to examples above).
+ awful_wrapping = r'\s*'.join('compilation_filter=')
+ for m in re.finditer(awful_wrapping + r'([\s\S]+?)[\],]', package_output):
+ compilation_filters.add(re.sub(r'\s+', '', m.group(1)))
+ compilation_filter = ','.join(sorted(compilation_filters))
+
+ data_dir_sizes = _DuHelper(d, '%s/{*,.*}' % data_dir, run_as=package_name)
+ # Measure code_cache separately since it can be large.
+ code_cache_sizes = {}
+ code_cache_dir = next(
+ (k for k in data_dir_sizes if k.endswith('/code_cache')), None)
+ if code_cache_dir:
+ data_dir_sizes.pop(code_cache_dir)
+ code_cache_sizes = _DuHelper(d, '%s/{*,.*}' % code_cache_dir,
+ run_as=package_name)
+
+ apk_path_spec = code_path
+ if not apk_path_spec.endswith('.apk'):
+ apk_path_spec += '/*.apk'
+ apk_sizes = _DuHelper(d, apk_path_spec)
+ if lib_path.endswith('/lib'):
+ # Shows architecture subdirectory.
+ lib_sizes = _DuHelper(d, '%s/{*,.*}' % lib_path)
+ else:
+ lib_sizes = _DuHelper(d, lib_path)
+
+ # Look at all possible locations for odex files.
+ odex_paths = []
+ for apk_path in apk_sizes:
+ mangled_apk_path = apk_path[1:].replace('/', '@')
+ apk_basename = posixpath.basename(apk_path)[:-4]
+ for ext in ('dex', 'odex', 'vdex', 'art'):
+ # Easier to check all architectures than to determine active ones.
+ for arch in ('arm', 'arm64', 'x86', 'x86_64', 'mips', 'mips64'):
+ odex_paths.append(
+ '%s/oat/%s/%s.%s' % (code_path, arch, apk_basename, ext))
+ # No app could possibly have more than 6 dex files.
+ for suffix in ('', '2', '3', '4', '5'):
+ odex_paths.append('/data/dalvik-cache/%s/%s@classes%s.%s' % (
+ arch, mangled_apk_path, suffix, ext))
+ # This path does not have |arch|, so don't repeat it for every arch.
+ if arch == 'arm':
+ odex_paths.append('/data/dalvik-cache/%s@classes%s.dex' % (
+ mangled_apk_path, suffix))
+
+ odex_sizes = _DuHelper(d, ' '.join(pipes.quote(p) for p in odex_paths))
+
+ return (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes,
+ compilation_filter)
+
+ def print_sizes(desc, sizes):
+ print '%s: %d KiB' % (desc, sum(sizes.itervalues()))
+ for path, size in sorted(sizes.iteritems()):
+ print ' %s: %s KiB' % (path, size)
+
+ parallel_devices = device_utils.DeviceUtils.parallel(devices)
+ all_results = parallel_devices.pMap(disk_usage_helper).pGet(None)
+ for result in _PrintPerDeviceOutput(devices, all_results):
+ if not result:
+ print 'APK is not installed.'
+ continue
+
+ (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes,
+ compilation_filter) = result
+ total = sum(sum(sizes.itervalues()) for sizes in result[:-1])
+
+ print_sizes('Apk', apk_sizes)
+ print_sizes('App Data (non-code cache)', data_dir_sizes)
+ print_sizes('App Data (code cache)', code_cache_sizes)
+ print_sizes('Native Libs', lib_sizes)
+ show_warning = compilation_filter and 'speed' not in compilation_filter
+ compilation_filter = compilation_filter or 'n/a'
+ print_sizes('odex (compilation_filter=%s)' % compilation_filter, odex_sizes)
+ if show_warning:
+ logging.warning('For a more realistic odex size, run:')
+ logging.warning(' %s compile-dex [speed|speed-profile]', sys.argv[0])
+ print 'Total: %s KiB (%.1f MiB)' % (total, total / 1024.0)
+
+
+class _LogcatProcessor(object):
+ ParsedLine = collections.namedtuple(
+ 'ParsedLine',
+ ['date', 'invokation_time', 'pid', 'tid', 'priority', 'tag', 'message'])
+
+ def __init__(self, device, package_name, deobfuscate=None, verbose=False):
+ self._device = device
+ self._package_name = package_name
+ self._verbose = verbose
+ self._deobfuscator = deobfuscate
+ self._primary_pid = None
+ self._my_pids = set()
+ self._seen_pids = set()
+ self._UpdateMyPids()
+
+ def _UpdateMyPids(self):
+ # We intentionally do not clear self._my_pids to make sure that the
+ # ProcessLine method below also includes lines from processes which may
+ # have already exited.
+ self._primary_pid = None
+ for process in _GetPackageProcesses(self._device, self._package_name):
+ # We take only the first "main" process found in order to account for
+ # possibly forked() processes.
+ if ':' not in process.name and self._primary_pid is None:
+ self._primary_pid = process.pid
+ self._my_pids.add(process.pid)
+
+ def _GetPidStyle(self, pid, dim=False):
+ if pid == self._primary_pid:
+ return colorama.Fore.WHITE
+ elif pid in self._my_pids:
+ # TODO(wnwen): Use one separate persistent color per process, pop LRU
+ return colorama.Fore.YELLOW
+ elif dim:
+ return colorama.Style.DIM
+ return ''
+
+ def _GetPriorityStyle(self, priority, dim=False):
+ # pylint:disable=no-self-use
+ if dim:
+ return ''
+ style = ''
+ if priority == 'E' or priority == 'F':
+ style = colorama.Back.RED
+ elif priority == 'W':
+ style = colorama.Back.YELLOW
+ elif priority == 'I':
+ style = colorama.Back.GREEN
+ elif priority == 'D':
+ style = colorama.Back.BLUE
+ return style + colorama.Fore.BLACK
+
+ def _ParseLine(self, line):
+ tokens = line.split(None, 6)
+ date = tokens[0]
+ invokation_time = tokens[1]
+ pid = int(tokens[2])
+ tid = int(tokens[3])
+ priority = tokens[4]
+ tag = tokens[5]
+ if len(tokens) > 6:
+ original_message = tokens[6]
+ else: # Empty log message
+ original_message = ''
+ # Example:
+ # 09-19 06:35:51.113 9060 9154 W GCoreFlp: No location...
+ # 09-19 06:01:26.174 9060 10617 I Auth : [ReflectiveChannelBinder]...
+ # Parsing "GCoreFlp:" vs "Auth :", we only want tag to contain the word,
+ # and we don't want to keep the colon for the message.
+ if tag[-1] == ':':
+ tag = tag[:-1]
+ else:
+ original_message = original_message[2:]
+ return self.ParsedLine(
+ date, invokation_time, pid, tid, priority, tag, original_message)
+
+ def _PrintParsedLine(self, parsed_line, dim=False):
+ tid_style = ''
+ # Make the main thread bright.
+ if not dim and parsed_line.pid == parsed_line.tid:
+ tid_style = colorama.Style.BRIGHT
+ pid_style = self._GetPidStyle(parsed_line.pid, dim)
+ # We have to pad before adding color as that changes the width of the tag.
+ pid_str = _Colorize('{:5}'.format(parsed_line.pid), pid_style)
+ tid_str = _Colorize('{:5}'.format(parsed_line.tid), tid_style)
+ tag = _Colorize('{:8}'.format(parsed_line.tag),
+ pid_style + ('' if dim else colorama.Style.BRIGHT))
+ priority = _Colorize(parsed_line.priority,
+ self._GetPriorityStyle(parsed_line.priority))
+ messages = [parsed_line.message]
+ if self._deobfuscator:
+ messages = self._deobfuscator.TransformLines(messages)
+ for message in messages:
+ message = _Colorize(message, pid_style)
+ sys.stdout.write('{} {} {} {} {} {}: {}\n'.format(
+ parsed_line.date, parsed_line.invokation_time, pid_str, tid_str,
+ priority, tag, message))
+
+ def ProcessLine(self, line, fast=False):
+ if not line or line.startswith('------'):
+ return
+ log = self._ParseLine(line)
+ if log.pid not in self._seen_pids:
+ self._seen_pids.add(log.pid)
+ if not fast:
+ self._UpdateMyPids()
+
+ owned_pid = log.pid in self._my_pids
+ if fast and not owned_pid:
+ return
+ if owned_pid and not self._verbose and log.tag == 'dalvikvm':
+ if _DALVIK_IGNORE_PATTERN.match(log.message):
+ return
+
+ if owned_pid or self._verbose or (
+ log.priority == 'F' or # Java crash dump
+ log.tag == 'ActivityManager' or # Android system
+ log.tag == 'DEBUG'): # Native crash dump
+ self._PrintParsedLine(log, not owned_pid)
+
+
+def _RunLogcat(device, package_name, mapping_path, verbose):
+ deobfuscate = None
+ if mapping_path:
+ try:
+ deobfuscate = deobfuscator.Deobfuscator(mapping_path)
+ except OSError:
+ sys.stderr.write('Error executing "bin/java_deobfuscate". '
+ 'Did you forget to build it?\n')
+ sys.exit(1)
+
+ try:
+ logcat_processor = _LogcatProcessor(
+ device, package_name, deobfuscate, verbose)
+ nonce = 'apk_wrappers.py nonce={}'.format(random.random())
+ device.RunShellCommand(['log', nonce])
+ fast = True
+ for line in device.adb.Logcat(logcat_format='threadtime'):
+ try:
+ logcat_processor.ProcessLine(line, fast)
+ except:
+ sys.stderr.write('Failed to process line: ' + line)
+ raise
+ if fast and nonce in line:
+ fast = False
+ except KeyboardInterrupt:
+ pass # Don't show stack trace upon Ctrl-C
+ finally:
+ if mapping_path:
+ deobfuscate.Close()
+
+
+def _GetPackageProcesses(device, package_name):
+ return [
+ p for p in device.ListProcesses(package_name)
+ if p.name == package_name or p.name.startswith(package_name + ':')]
+
+
+def _RunPs(devices, package_name):
+ parallel_devices = device_utils.DeviceUtils.parallel(devices)
+ all_processes = parallel_devices.pMap(
+ lambda d: _GetPackageProcesses(d, package_name)).pGet(None)
+ for processes in _PrintPerDeviceOutput(devices, all_processes):
+ if not processes:
+ print 'No processes found.'
+ else:
+ proc_map = collections.defaultdict(list)
+ for p in processes:
+ proc_map[p.name].append(str(p.pid))
+ for name, pids in sorted(proc_map.items()):
+ print name, ','.join(pids)
+
+
+def _RunShell(devices, package_name, cmd):
+ if cmd:
+ parallel_devices = device_utils.DeviceUtils.parallel(devices)
+ outputs = parallel_devices.RunShellCommand(
+ cmd, run_as=package_name).pGet(None)
+ for output in _PrintPerDeviceOutput(devices, outputs):
+ for line in output:
+ print line
+ else:
+ adb_path = adb_wrapper.AdbWrapper.GetAdbPath()
+ cmd = [adb_path, '-s', devices[0].serial, 'shell']
+ # Pre-N devices do not support -t flag.
+ if devices[0].build_version_sdk >= version_codes.NOUGAT:
+ cmd += ['-t', 'run-as', package_name]
+ else:
+ print 'Upon entering the shell, run:'
+ print 'run-as', package_name
+ print
+ os.execv(adb_path, cmd)
+
+
+def _RunCompileDex(devices, package_name, compilation_filter):
+ cmd = ['cmd', 'package', 'compile', '-f', '-m', compilation_filter,
+ package_name]
+ parallel_devices = device_utils.DeviceUtils.parallel(devices)
+ outputs = parallel_devices.RunShellCommand(cmd, timeout=120).pGet(None)
+ for output in _PrintPerDeviceOutput(devices, outputs):
+ for line in output:
+ print line
+
+
+def _RunProfile(device, package_name, host_build_directory, pprof_out_path,
+ process_specifier, thread_specifier, extra_args):
+ simpleperf.PrepareDevice(device)
+ device_simpleperf_path = simpleperf.InstallSimpleperf(device, package_name)
+ with tempfile.NamedTemporaryFile() as fh:
+ host_simpleperf_out_path = fh.name
+
+ with simpleperf.RunSimpleperf(device, device_simpleperf_path, package_name,
+ process_specifier, thread_specifier,
+ extra_args, host_simpleperf_out_path):
+ sys.stdout.write('Profiler is running; press Enter to stop...')
+ sys.stdin.read(1)
+ sys.stdout.write('Post-processing data...')
+ sys.stdout.flush()
+
+ simpleperf.ConvertSimpleperfToPprof(host_simpleperf_out_path,
+ host_build_directory, pprof_out_path)
+ print textwrap.dedent("""
+ Profile data written to %(s)s.
+
+ To view profile as a call graph in browser:
+ pprof -web %(s)s
+
+ To print the hottest methods:
+ pprof -top %(s)s
+
+ pprof has many useful customization options; `pprof --help` for details.
+ """ % {'s': pprof_out_path})
+
+
+def _GenerateAvailableDevicesMessage(devices):
+ devices_obj = device_utils.DeviceUtils.parallel(devices)
+ descriptions = devices_obj.pMap(lambda d: d.build_description).pGet(None)
+ msg = 'Available devices:\n'
+ for d, desc in zip(devices, descriptions):
+ msg += ' %s (%s)\n' % (d, desc)
+ return msg
+
+
+# TODO(agrieve):add "--all" in the MultipleDevicesError message and use it here.
+def _GenerateMissingAllFlagMessage(devices):
+ return ('More than one device available. Use --all to select all devices, ' +
+ 'or use --device to select a device by serial.\n\n' +
+ _GenerateAvailableDevicesMessage(devices))
+
+
+def _DisplayArgs(devices, command_line_flags_file):
+ def flags_helper(d):
+ changer = flag_changer.FlagChanger(d, command_line_flags_file)
+ return changer.GetCurrentFlags()
+
+ parallel_devices = device_utils.DeviceUtils.parallel(devices)
+ outputs = parallel_devices.pMap(flags_helper).pGet(None)
+ print 'Existing flags per-device (via /data/local/tmp/{}):'.format(
+ command_line_flags_file)
+ for flags in _PrintPerDeviceOutput(devices, outputs, single_line=True):
+ quoted_flags = ' '.join(pipes.quote(f) for f in flags)
+ print quoted_flags or 'No flags set.'
+
+
+def _DeviceCachePath(device, output_directory):
+ file_name = 'device_cache_%s.json' % device.serial
+ return os.path.join(output_directory, file_name)
+
+
+def _LoadDeviceCaches(devices, output_directory):
+ if not output_directory:
+ return
+ for d in devices:
+ cache_path = _DeviceCachePath(d, output_directory)
+ if os.path.exists(cache_path):
+ logging.debug('Using device cache: %s', cache_path)
+ with open(cache_path) as f:
+ d.LoadCacheData(f.read())
+ # Delete the cached file so that any exceptions cause it to be cleared.
+ os.unlink(cache_path)
+ else:
+ logging.debug('No cache present for device: %s', d)
+
+
+def _SaveDeviceCaches(devices, output_directory):
+ if not output_directory:
+ return
+ for d in devices:
+ cache_path = _DeviceCachePath(d, output_directory)
+ with open(cache_path, 'w') as f:
+ f.write(d.DumpCacheData())
+ logging.info('Wrote device cache: %s', cache_path)
+
+
+class _Command(object):
+ name = None
+ description = None
+ long_description = None
+ needs_package_name = False
+ needs_output_directory = False
+ needs_apk_path = False
+ supports_incremental = False
+ accepts_command_line_flags = False
+ accepts_args = False
+ need_device_args = True
+ all_devices_by_default = False
+ calls_exec = False
+ supports_multiple_devices = True
+
+ def __init__(self, from_wrapper_script, is_bundle):
+ self._parser = None
+ self._from_wrapper_script = from_wrapper_script
+ self.args = None
+ self.apk_helper = None
+ self.install_dict = None
+ self.devices = None
+ self.is_bundle = is_bundle
+ self.bundle_generation_info = None
+ # Only support incremental install from APK wrapper scripts.
+ if is_bundle or not from_wrapper_script:
+ self.supports_incremental = False
+
+ def RegisterBundleGenerationInfo(self, bundle_generation_info):
+ self.bundle_generation_info = bundle_generation_info
+
+ def _RegisterExtraArgs(self, subp):
+ pass
+
+ def RegisterArgs(self, parser):
+ subp = parser.add_parser(
+ self.name, help=self.description,
+ description=self.long_description or self.description,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
+ self._parser = subp
+ subp.set_defaults(command=self)
+ if self.need_device_args:
+ subp.add_argument('--all',
+ action='store_true',
+ default=self.all_devices_by_default,
+ help='Operate on all connected devices.',)
+ subp.add_argument('-d',
+ '--device',
+ action='append',
+ default=[],
+ dest='devices',
+ help='Target device for script to work on. Enter '
+ 'multiple times for multiple devices.')
+ subp.add_argument('-v',
+ '--verbose',
+ action='count',
+ default=0,
+ dest='verbose_count',
+ help='Verbose level (multiple times for more)')
+ group = subp.add_argument_group('%s arguments' % self.name)
+
+ if self.needs_package_name:
+ # Three cases to consider here, since later code assumes
+ # self.args.package_name always exists, even if None:
+ #
+ # - Called from a bundle wrapper script, the package_name is already
+ # set through parser.set_defaults(), so don't call add_argument()
+ # to avoid overriding its value.
+ #
+ # - Called from an apk wrapper script. The --package-name argument
+ # should not appear, but self.args.package_name will be gleaned from
+ # the --apk-path file later.
+ #
+ # - Called directly, then --package-name is required on the command-line.
+ #
+ if not self.is_bundle:
+ group.add_argument(
+ '--package-name',
+ help=argparse.SUPPRESS if self._from_wrapper_script else (
+ "App's package name."))
+
+ if self.needs_apk_path or self.needs_package_name:
+ # Adding this argument to the subparser would override the set_defaults()
+ # value set by on the parent parser (even if None).
+ if not self._from_wrapper_script and not self.is_bundle:
+ group.add_argument('--apk-path',
+ required=self.needs_apk_path,
+ help='Path to .apk')
+
+ if self.supports_incremental:
+ group.add_argument('--incremental',
+ action='store_true',
+ default=False,
+ help='Always install an incremental apk.')
+ group.add_argument('--non-incremental',
+ action='store_true',
+ default=False,
+ help='Always install a non-incremental apk.')
+
+ # accepts_command_line_flags and accepts_args are mutually exclusive.
+ # argparse will throw if they are both set.
+ if self.accepts_command_line_flags:
+ group.add_argument(
+ '--args', help='Command-line flags. Use = to assign args.')
+
+ if self.accepts_args:
+ group.add_argument(
+ '--args', help='Extra arguments. Use = to assign args')
+
+ if not self._from_wrapper_script and self.accepts_command_line_flags:
+ # Provided by wrapper scripts.
+ group.add_argument(
+ '--command-line-flags-file',
+ help='Name of the command-line flags file')
+
+ self._RegisterExtraArgs(group)
+
+ def ProcessArgs(self, args):
+ self.args = args
+ # Ensure these keys always exist. They are set by wrapper scripts, but not
+ # always added when not using wrapper scripts.
+ args.__dict__.setdefault('apk_path', None)
+ args.__dict__.setdefault('incremental_json', None)
+
+ incremental_apk_path = None
+ if args.incremental_json and not (self.supports_incremental and
+ args.non_incremental):
+ with open(args.incremental_json) as f:
+ install_dict = json.load(f)
+ incremental_apk_path = os.path.join(args.output_directory,
+ install_dict['apk_path'])
+ if not os.path.exists(incremental_apk_path):
+ incremental_apk_path = None
+
+ if self.supports_incremental:
+ if args.incremental and args.non_incremental:
+ self._parser.error('Must use only one of --incremental and '
+ '--non-incremental')
+ elif args.non_incremental:
+ if not args.apk_path:
+ self._parser.error('Apk has not been built.')
+ elif args.incremental:
+ if not incremental_apk_path:
+ self._parser.error('Incremental apk has not been built.')
+ args.apk_path = None
+
+ if args.apk_path and incremental_apk_path:
+ self._parser.error('Both incremental and non-incremental apks exist. '
+ 'Select using --incremental or --non-incremental')
+
+ if ((self.needs_apk_path and not self.is_bundle) or args.apk_path or
+ incremental_apk_path):
+ if args.apk_path:
+ self.apk_helper = apk_helper.ToHelper(args.apk_path)
+ elif incremental_apk_path:
+ self.install_dict = install_dict
+ self.apk_helper = apk_helper.ToHelper(incremental_apk_path)
+ else:
+ self._parser.error('Apk is not built.')
+
+ if self.needs_package_name and not args.package_name:
+ if self.apk_helper:
+ args.package_name = self.apk_helper.GetPackageName()
+ elif self._from_wrapper_script:
+ self._parser.error('Apk is not built.')
+ else:
+ self._parser.error('One of --package-name or --apk-path is required.')
+
+ self.devices = []
+ if self.need_device_args:
+ # See https://crbug.com/887964 regarding bundle support in apk_helper.
+ abis = None
+ if not self.is_bundle and self.apk_helper is not None:
+ abis = self.apk_helper.GetAbis()
+ self.devices = device_utils.DeviceUtils.HealthyDevices(
+ device_arg=args.devices,
+ enable_device_files_cache=bool(args.output_directory),
+ default_retries=0,
+ abis=abis)
+ # TODO(agrieve): Device cache should not depend on output directory.
+ # Maybe put int /tmp?
+ _LoadDeviceCaches(self.devices, args.output_directory)
+
+ try:
+ if len(self.devices) > 1:
+ if not self.supports_multiple_devices:
+ self._parser.error(device_errors.MultipleDevicesError(self.devices))
+ if not args.all and not args.devices:
+ self._parser.error(_GenerateMissingAllFlagMessage(self.devices))
+ # Save cache now if command will not get a chance to afterwards.
+ if self.calls_exec:
+ _SaveDeviceCaches(self.devices, args.output_directory)
+ except:
+ _SaveDeviceCaches(self.devices, args.output_directory)
+ raise
+
+
+class _DevicesCommand(_Command):
+ name = 'devices'
+ description = 'Describe attached devices.'
+ all_devices_by_default = True
+
+ def Run(self):
+ print _GenerateAvailableDevicesMessage(self.devices)
+
+
+class _PackageInfoCommand(_Command):
+ name = 'package-info'
+ # TODO(ntfschr): Support this by figuring out how to construct
+ # self.apk_helper for bundles (http://crbug.com/952443).
+ description = 'Show various attributes of this APK.'
+ need_device_args = False
+ needs_package_name = True
+ needs_apk_path = True
+
+ def Run(self):
+ # Format all (even ints) as strings, to handle cases where APIs return None
+ print 'Package name: "%s"' % self.args.package_name
+ print 'versionCode: %s' % self.apk_helper.GetVersionCode()
+ print 'versionName: "%s"' % self.apk_helper.GetVersionName()
+ print 'minSdkVersion: %s' % self.apk_helper.GetMinSdkVersion()
+ print 'targetSdkVersion: "%s"' % self.apk_helper.GetTargetSdkVersion()
+ print 'Supported ABIs: %r' % self.apk_helper.GetAbis()
+
+
+class _InstallCommand(_Command):
+ name = 'install'
+ description = 'Installs the APK or bundle to one or more devices.'
+ needs_apk_path = True
+ supports_incremental = True
+
+ def _RegisterExtraArgs(self, group):
+ if self.is_bundle:
+ group.add_argument(
+ '-m',
+ '--module',
+ action='append',
+ help='Module to install. Can be specified multiple times. ' +
+ 'One of them has to be \'{}\''.format(BASE_MODULE))
+ group.add_argument(
+ '-f',
+ '--fake',
+ action='append',
+ help='Fake bundle module install. Can be specified multiple times. '
+ 'Requires \'-m {0}\' to be given, and \'-f {0}\' is illegal.'.format(
+ BASE_MODULE))
+
+ def Run(self):
+ if self.is_bundle:
+ # Store .apks file beside the .aab file so that it gets cached.
+ output_path = self.bundle_generation_info.bundle_apks_path
+ _GenerateBundleApks(self.bundle_generation_info, output_path)
+ _InstallBundle(self.devices, output_path, self.args.package_name,
+ self.args.command_line_flags_file, self.args.module,
+ self.args.fake)
+ else:
+ _InstallApk(self.devices, self.apk_helper, self.install_dict)
+
+
+class _UninstallCommand(_Command):
+ name = 'uninstall'
+ description = 'Removes the APK or bundle from one or more devices.'
+ needs_package_name = True
+
+ def Run(self):
+ _UninstallApk(self.devices, self.install_dict, self.args.package_name)
+
+
+class _SetWebViewProviderCommand(_Command):
+ name = 'set-webview-provider'
+ description = ("Sets the device's WebView provider to this APK's "
+ "package name.")
+ needs_package_name = True
+
+ def Run(self):
+ if self.is_bundle:
+ # TODO(ntfschr): Support this by figuring out how to construct
+ # self.apk_helper for bundles (http://crbug.com/952443).
+ raise Exception(
+ 'Switching WebView providers not supported for bundles yet!')
+ if not _IsWebViewProvider(self.apk_helper):
+ raise Exception('This package does not have a WebViewLibrary meta-data '
+ 'tag. Are you sure it contains a WebView implementation?')
+ _SetWebViewProvider(self.devices, self.args.package_name)
+
+
+class _LaunchCommand(_Command):
+ name = 'launch'
+ description = ('Sends a launch intent for the APK or bundle after first '
+ 'writing the command-line flags file.')
+ needs_package_name = True
+ accepts_command_line_flags = True
+ all_devices_by_default = True
+
+ def _RegisterExtraArgs(self, group):
+ group.add_argument('-w', '--wait-for-java-debugger', action='store_true',
+ help='Pause execution until debugger attaches. Applies '
+ 'only to the main process. To have renderers wait, '
+ 'use --args="--renderer-wait-for-java-debugger"')
+ group.add_argument('--debug-process-name',
+ help='Name of the process to debug. '
+ 'E.g. "privileged_process0", or "foo.bar:baz"')
+ group.add_argument('--nokill', action='store_true',
+ help='Do not set the debug-app, nor set command-line '
+ 'flags. Useful to load a URL without having the '
+ 'app restart.')
+ group.add_argument('url', nargs='?', help='A URL to launch with.')
+
+ def Run(self):
+ if self.args.url and self.is_bundle:
+ # TODO(digit): Support this, maybe by using 'dumpsys' as described
+ # in the _LaunchUrl() comment.
+ raise Exception('Launching with URL not supported for bundles yet!')
+ _LaunchUrl(self.devices, self.args.package_name, argv=self.args.args,
+ command_line_flags_file=self.args.command_line_flags_file,
+ url=self.args.url, apk=self.apk_helper,
+ wait_for_java_debugger=self.args.wait_for_java_debugger,
+ debug_process_name=self.args.debug_process_name,
+ nokill=self.args.nokill)
+
+
+class _StopCommand(_Command):
+ name = 'stop'
+ description = 'Force-stops the app.'
+ needs_package_name = True
+ all_devices_by_default = True
+
+ def Run(self):
+ device_utils.DeviceUtils.parallel(self.devices).ForceStop(
+ self.args.package_name)
+
+
+class _ClearDataCommand(_Command):
+ name = 'clear-data'
+ descriptions = 'Clears all app data.'
+ needs_package_name = True
+ all_devices_by_default = True
+
+ def Run(self):
+ device_utils.DeviceUtils.parallel(self.devices).ClearApplicationState(
+ self.args.package_name)
+
+
+class _ArgvCommand(_Command):
+ name = 'argv'
+ description = 'Display and optionally update command-line flags file.'
+ needs_package_name = True
+ accepts_command_line_flags = True
+ all_devices_by_default = True
+
+ def Run(self):
+ _ChangeFlags(self.devices, self.args.args,
+ self.args.command_line_flags_file)
+
+
+class _GdbCommand(_Command):
+ name = 'gdb'
+ description = 'Runs //build/android/adb_gdb with apk-specific args.'
+ long_description = description + """
+
+To attach to a process other than the APK's main process, use --pid=1234.
+To list all PIDs, use the "ps" command.
+
+If no apk process is currently running, sends a launch intent.
+"""
+ needs_package_name = True
+ needs_output_directory = True
+ calls_exec = True
+ supports_multiple_devices = False
+
+ def Run(self):
+ _RunGdb(self.devices[0], self.args.package_name,
+ self.args.debug_process_name, self.args.pid,
+ self.args.output_directory, self.args.target_cpu, self.args.port,
+ self.args.ide, bool(self.args.verbose_count))
+
+ def _RegisterExtraArgs(self, group):
+ pid_group = group.add_mutually_exclusive_group()
+ pid_group.add_argument('--debug-process-name',
+ help='Name of the process to attach to. '
+ 'E.g. "privileged_process0", or "foo.bar:baz"')
+ pid_group.add_argument('--pid',
+ help='The process ID to attach to. Defaults to '
+ 'the main process for the package.')
+ group.add_argument('--ide', action='store_true',
+ help='Rather than enter a gdb prompt, set up the '
+ 'gdb connection and wait for an IDE to '
+ 'connect.')
+ # Same default port that ndk-gdb.py uses.
+ group.add_argument('--port', type=int, default=5039,
+ help='Use the given port for the GDB connection')
+
+
+class _LogcatCommand(_Command):
+ name = 'logcat'
+ description = 'Runs "adb logcat" with filters relevant the current APK.'
+ long_description = description + """
+
+"Relevant filters" means:
+ * Log messages from processes belonging to the apk,
+ * Plus log messages from log tags: ActivityManager|DEBUG,
+ * Plus fatal logs from any process,
+ * Minus spamy dalvikvm logs (for pre-L devices).
+
+Colors:
+ * Primary process is white
+ * Other processes (gpu, renderer) are yellow
+ * Non-apk processes are grey
+ * UI thread has a bolded Thread-ID
+
+Java stack traces are detected and deobfuscated (for release builds).
+
+To disable filtering, (but keep coloring), use --verbose.
+"""
+ needs_package_name = True
+ supports_multiple_devices = False
+
+ def Run(self):
+ mapping = self.args.proguard_mapping_path
+ if self.args.no_deobfuscate:
+ mapping = None
+ _RunLogcat(self.devices[0], self.args.package_name, mapping,
+ bool(self.args.verbose_count))
+
+ def _RegisterExtraArgs(self, group):
+ if self._from_wrapper_script:
+ group.add_argument('--no-deobfuscate', action='store_true',
+ help='Disables ProGuard deobfuscation of logcat.')
+ else:
+ group.set_defaults(no_deobfuscate=False)
+ group.add_argument('--proguard-mapping-path',
+ help='Path to ProGuard map (enables deobfuscation)')
+
+
+class _PsCommand(_Command):
+ name = 'ps'
+ description = 'Show PIDs of any APK processes currently running.'
+ needs_package_name = True
+ all_devices_by_default = True
+
+ def Run(self):
+ _RunPs(self.devices, self.args.package_name)
+
+
+class _DiskUsageCommand(_Command):
+ name = 'disk-usage'
+ description = 'Show how much device storage is being consumed by the app.'
+ needs_package_name = True
+ all_devices_by_default = True
+
+ def Run(self):
+ _RunDiskUsage(self.devices, self.args.package_name)
+
+
+class _MemUsageCommand(_Command):
+ name = 'mem-usage'
+ description = 'Show memory usage of currently running APK processes.'
+ needs_package_name = True
+ all_devices_by_default = True
+
+ def _RegisterExtraArgs(self, group):
+ group.add_argument('--query-app', action='store_true',
+ help='Do not add --local to "dumpsys meminfo". This will output '
+ 'additional metrics (e.g. Context count), but also cause memory '
+ 'to be used in order to gather the metrics.')
+
+ def Run(self):
+ _RunMemUsage(self.devices, self.args.package_name,
+ query_app=self.args.query_app)
+
+
+class _ShellCommand(_Command):
+ name = 'shell'
+ description = ('Same as "adb shell <command>", but runs as the apk\'s uid '
+ '(via run-as). Useful for inspecting the app\'s data '
+ 'directory.')
+ needs_package_name = True
+
+ @property
+ def calls_exec(self):
+ return not self.args.cmd
+
+ @property
+ def supports_multiple_devices(self):
+ return not self.args.cmd
+
+ def _RegisterExtraArgs(self, group):
+ group.add_argument(
+ 'cmd', nargs=argparse.REMAINDER, help='Command to run.')
+
+ def Run(self):
+ _RunShell(self.devices, self.args.package_name, self.args.cmd)
+
+
+class _CompileDexCommand(_Command):
+ name = 'compile-dex'
+ description = ('Applicable only for Android N+. Forces .odex files to be '
+ 'compiled with the given compilation filter. To see existing '
+ 'filter, use "disk-usage" command.')
+ needs_package_name = True
+ all_devices_by_default = True
+
+ def _RegisterExtraArgs(self, group):
+ group.add_argument(
+ 'compilation_filter',
+ choices=['verify', 'quicken', 'space-profile', 'space',
+ 'speed-profile', 'speed'],
+ help='For WebView/Monochrome, use "speed". For other apks, use '
+ '"speed-profile".')
+
+ def Run(self):
+ _RunCompileDex(self.devices, self.args.package_name,
+ self.args.compilation_filter)
+
+
+class _ProfileCommand(_Command):
+ name = 'profile'
+ description = ('Run the simpleperf sampling CPU profiler on the currently-'
+ 'running APK. If --args is used, the extra arguments will be '
+ 'passed on to simpleperf; otherwise, the following default '
+ 'arguments are used: -g -f 1000 -o /data/local/tmp/perf.data')
+ needs_package_name = True
+ needs_output_directory = True
+ supports_multiple_devices = False
+ accepts_args = True
+
+ def _RegisterExtraArgs(self, group):
+ group.add_argument(
+ '--profile-process', default='browser',
+ help=('Which process to profile. This may be a process name or pid '
+ 'such as you would get from running `%s ps`; or '
+ 'it can be one of (browser, renderer, gpu).' % sys.argv[0]))
+ group.add_argument(
+ '--profile-thread', default=None,
+ help=('(Optional) Profile only a single thread. This may be either a '
+ 'thread ID such as you would get by running `adb shell ps -t` '
+ '(pre-Oreo) or `adb shell ps -e -T` (Oreo and later); or it may '
+ 'be one of (io, compositor, main, render), in which case '
+ '--profile-process is also required. (Note that "render" thread '
+ 'refers to a thread in the browser process that manages a '
+ 'renderer; to profile the main thread of the renderer process, '
+ 'use --profile-thread=main).'))
+ group.add_argument('--profile-output', default='profile.pb',
+ help='Output file for profiling data')
+
+ def Run(self):
+ extra_args = shlex.split(self.args.args or '')
+ _RunProfile(self.devices[0], self.args.package_name,
+ self.args.output_directory, self.args.profile_output,
+ self.args.profile_process, self.args.profile_thread,
+ extra_args)
+
+
+class _RunCommand(_InstallCommand, _LaunchCommand, _LogcatCommand):
+ name = 'run'
+ description = 'Install, launch, and show logcat (when targeting one device).'
+ all_devices_by_default = False
+ supports_multiple_devices = True
+
+ def _RegisterExtraArgs(self, group):
+ _InstallCommand._RegisterExtraArgs(self, group)
+ _LaunchCommand._RegisterExtraArgs(self, group)
+ _LogcatCommand._RegisterExtraArgs(self, group)
+ group.add_argument('--no-logcat', action='store_true',
+ help='Install and launch, but do not enter logcat.')
+
+ def Run(self):
+ logging.warning('Installing...')
+ _InstallCommand.Run(self)
+ logging.warning('Sending launch intent...')
+ _LaunchCommand.Run(self)
+ if len(self.devices) == 1 and not self.args.no_logcat:
+ logging.warning('Entering logcat...')
+ _LogcatCommand.Run(self)
+
+
+class _BuildBundleApks(_Command):
+ name = 'build-bundle-apks'
+ description = ('Build the .apks archive from an Android app bundle, and '
+ 'optionally copy it to a specific destination.')
+ need_device_args = False
+
+ def _RegisterExtraArgs(self, group):
+ group.add_argument(
+ '--output-apks', required=True, help='Destination path for .apks file.')
+ group.add_argument(
+ '--minimal',
+ action='store_true',
+ help='Build .apks archive that targets the bundle\'s minSdkVersion and '
+ 'contains only english splits. It still contains optional splits.')
+ group.add_argument(
+ '--sdk-version',
+ help='Implies --minimal. The sdkVersion to build the .apks for.')
+ group.add_argument(
+ '--build-mode',
+ choices=app_bundle_utils.BUILD_APKS_MODES,
+ help='Specify which type of APKs archive to build. "default" '
+ 'generates regular splits, "universal" generates an archive with a '
+ 'single universal APK, "system" generates an archive with a system '
+ 'image APK, while "system_compressed" generates a compressed system '
+ 'APK, with an additional stub APK for the system image.')
+
+ def Run(self):
+ _GenerateBundleApks(
+ self.bundle_generation_info,
+ self.args.output_apks,
+ minimal=self.args.sdk_version is not None or self.args.minimal,
+ minimal_sdk_version=self.args.sdk_version,
+ mode=self.args.build_mode)
+
+
+class _ManifestCommand(_Command):
+ name = 'dump-manifest'
+ description = 'Dump the android manifest from this bundle, as XML, to stdout.'
+ need_device_args = False
+
+ def Run(self):
+ bundletool.RunBundleTool([
+ 'dump', 'manifest', '--bundle', self.bundle_generation_info.bundle_path
+ ])
+
+
+# Shared commands for regular APKs and app bundles.
+_COMMANDS = [
+ _DevicesCommand,
+ _PackageInfoCommand,
+ _InstallCommand,
+ _UninstallCommand,
+ _SetWebViewProviderCommand,
+ _LaunchCommand,
+ _StopCommand,
+ _ClearDataCommand,
+ _ArgvCommand,
+ _GdbCommand,
+ _LogcatCommand,
+ _PsCommand,
+ _DiskUsageCommand,
+ _MemUsageCommand,
+ _ShellCommand,
+ _CompileDexCommand,
+ _ProfileCommand,
+ _RunCommand,
+]
+
+# Commands specific to app bundles.
+_BUNDLE_COMMANDS = [
+ _BuildBundleApks,
+ _ManifestCommand,
+]
+
+
+def _ParseArgs(parser, from_wrapper_script, is_bundle):
+ subparsers = parser.add_subparsers()
+ command_list = _COMMANDS + (_BUNDLE_COMMANDS if is_bundle else [])
+ commands = [clazz(from_wrapper_script, is_bundle) for clazz in command_list]
+
+ for command in commands:
+ if from_wrapper_script or not command.needs_output_directory:
+ command.RegisterArgs(subparsers)
+
+ # Show extended help when no command is passed.
+ argv = sys.argv[1:]
+ if not argv:
+ argv = ['--help']
+
+ return parser.parse_args(argv)
+
+
+def _RunInternal(parser, output_directory=None, bundle_generation_info=None):
+ colorama.init()
+ parser.set_defaults(output_directory=output_directory)
+ from_wrapper_script = bool(output_directory)
+ args = _ParseArgs(parser, from_wrapper_script, bool(bundle_generation_info))
+ run_tests_helper.SetLogLevel(args.verbose_count)
+ args.command.ProcessArgs(args)
+ if bundle_generation_info:
+ args.command.RegisterBundleGenerationInfo(bundle_generation_info)
+ args.command.Run()
+ # Incremental install depends on the cache being cleared when uninstalling.
+ if args.command.name != 'uninstall':
+ _SaveDeviceCaches(args.command.devices, output_directory)
+
+
+def Run(output_directory, apk_path, incremental_json, command_line_flags_file,
+ target_cpu, proguard_mapping_path):
+ """Entry point for generated wrapper scripts."""
+ constants.SetOutputDirectory(output_directory)
+ devil_chromium.Initialize(output_directory=output_directory)
+ parser = argparse.ArgumentParser()
+ exists_or_none = lambda p: p if p and os.path.exists(p) else None
+ parser.set_defaults(
+ command_line_flags_file=command_line_flags_file,
+ target_cpu=target_cpu,
+ apk_path=exists_or_none(apk_path),
+ incremental_json=exists_or_none(incremental_json),
+ proguard_mapping_path=proguard_mapping_path)
+ _RunInternal(parser, output_directory=output_directory)
+
+
+def RunForBundle(output_directory, bundle_path, bundle_apks_path, aapt2_path,
+ keystore_path, keystore_password, keystore_alias, package_name,
+ command_line_flags_file, proguard_mapping_path, target_cpu,
+ system_image_locales):
+ """Entry point for generated app bundle wrapper scripts.
+
+ Args:
+ output_dir: Chromium output directory path.
+ bundle_path: Input bundle path.
+ bundle_apks_path: Output bundle .apks archive path.
+ aapt2_path: Aapt2 tool path.
+ keystore_path: Keystore file path.
+ keystore_password: Keystore password.
+ keystore_alias: Signing key name alias in keystore file.
+ package_name: Application's package name.
+ command_line_flags_file: Optional. Name of an on-device file that will be
+ used to store command-line flags for this bundle.
+ proguard_mapping_path: Input path to the Proguard mapping file, used to
+ deobfuscate Java stack traces.
+ target_cpu: Chromium target CPU name, used by the 'gdb' command.
+ system_image_locales: List of Chromium locales that should be included in
+ system image APKs.
+ """
+ constants.SetOutputDirectory(output_directory)
+ devil_chromium.Initialize(output_directory=output_directory)
+ bundle_generation_info = BundleGenerationInfo(
+ bundle_path=bundle_path,
+ bundle_apks_path=bundle_apks_path,
+ aapt2_path=aapt2_path,
+ keystore_path=keystore_path,
+ keystore_password=keystore_password,
+ keystore_alias=keystore_alias,
+ system_image_locales=system_image_locales)
+
+ parser = argparse.ArgumentParser()
+ parser.set_defaults(
+ package_name=package_name,
+ command_line_flags_file=command_line_flags_file,
+ proguard_mapping_path=proguard_mapping_path,
+ target_cpu=target_cpu)
+ _RunInternal(parser, output_directory=output_directory,
+ bundle_generation_info=bundle_generation_info)
+
+
+def main():
+ devil_chromium.Initialize()
+ _RunInternal(argparse.ArgumentParser(), output_directory=None)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/apply_shared_preference_file.py b/deps/v8/build/android/apply_shared_preference_file.py
new file mode 100755
index 0000000000..b224081957
--- /dev/null
+++ b/deps/v8/build/android/apply_shared_preference_file.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Manually applies a shared preference JSON file.
+
+If needed during automation, use the --shared-prefs-file in test_runner.py
+instead.
+"""
+
+import argparse
+import sys
+
+# pylint: disable=ungrouped-imports
+from pylib.constants import host_paths
+if host_paths.DEVIL_PATH not in sys.path:
+ sys.path.append(host_paths.DEVIL_PATH)
+
+from devil.android import device_utils
+from devil.android.sdk import shared_prefs
+from pylib.utils import shared_preference_utils
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Manually apply shared preference JSON files.')
+ parser.add_argument('filepaths', nargs='*',
+ help='Any number of paths to shared preference JSON '
+ 'files to apply.')
+ args = parser.parse_args()
+
+ all_devices = device_utils.DeviceUtils.HealthyDevices()
+ if not all_devices:
+ raise RuntimeError('No healthy devices attached')
+
+ for filepath in args.filepaths:
+ all_settings = shared_preference_utils.ExtractSettingsFromJson(filepath)
+ for setting in all_settings:
+ for device in all_devices:
+ shared_pref = shared_prefs.SharedPrefs(
+ device, setting['package'], setting['filename'],
+ use_encrypted_path=setting.get('supports_encrypted_path', False))
+ shared_preference_utils.ApplySharedPreferenceSetting(
+ shared_pref, setting)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/asan_symbolize.py b/deps/v8/build/android/asan_symbolize.py
new file mode 100755
index 0000000000..9f2e88a60d
--- /dev/null
+++ b/deps/v8/build/android/asan_symbolize.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import collections
+import optparse
+import os
+import re
+import sys
+
+from pylib import constants
+from pylib.constants import host_paths
+
+# pylint: disable=wrong-import-order
+# Uses symbol.py from third_party/android_platform, not python's.
+with host_paths.SysPath(
+ host_paths.ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH,
+ position=0):
+ import symbol
+
+
+_RE_ASAN = re.compile(r'(.*?)(#\S*?)\s+(\S*?)\s+\((.*?)\+(.*?)\)')
+
+# This named tuple models a parsed Asan log line.
+AsanParsedLine = collections.namedtuple('AsanParsedLine',
+ 'prefix,library,pos,rel_address')
+
+# This named tuple models an Asan log line. 'raw' is the raw content
+# while 'parsed' is None or an AsanParsedLine instance.
+AsanLogLine = collections.namedtuple('AsanLogLine', 'raw,parsed')
+
+def _ParseAsanLogLine(line):
+ """Parse line into corresponding AsanParsedLine value, if any, or None."""
+ m = re.match(_RE_ASAN, line)
+ if not m:
+ return None
+ return AsanParsedLine(prefix=m.group(1),
+ library=m.group(4),
+ pos=m.group(2),
+ rel_address='%08x' % int(m.group(5), 16))
+
+def _FindASanLibraries():
+ asan_lib_dir = os.path.join(host_paths.DIR_SOURCE_ROOT,
+ 'third_party', 'llvm-build',
+ 'Release+Asserts', 'lib')
+ asan_libs = []
+ for src_dir, _, files in os.walk(asan_lib_dir):
+ asan_libs += [os.path.relpath(os.path.join(src_dir, f))
+ for f in files
+ if f.endswith('.so')]
+ return asan_libs
+
+
+def _TranslateLibPath(library, asan_libs):
+ for asan_lib in asan_libs:
+ if os.path.basename(library) == os.path.basename(asan_lib):
+ return '/' + asan_lib
+ # pylint: disable=no-member
+ return symbol.TranslateLibPath(library)
+
+
+def _PrintSymbolized(asan_input, arch):
+ """Print symbolized logcat output for Asan symbols.
+
+ Args:
+ asan_input: list of input lines.
+ arch: Target CPU architecture.
+ """
+ asan_libs = _FindASanLibraries()
+
+ # Maps library -> [ AsanParsedLine... ]
+ libraries = collections.defaultdict(list)
+
+ asan_log_lines = []
+ for line in asan_input:
+ line = line.rstrip()
+ parsed = _ParseAsanLogLine(line)
+ if parsed:
+ libraries[parsed.library].append(parsed)
+ asan_log_lines.append(AsanLogLine(raw=line, parsed=parsed))
+
+ # Maps library -> { address -> [(symbol, location, obj_sym_with_offset)...] }
+ all_symbols = collections.defaultdict(dict)
+
+ for library, items in libraries.iteritems():
+ libname = _TranslateLibPath(library, asan_libs)
+ lib_relative_addrs = set([i.rel_address for i in items])
+ # pylint: disable=no-member
+ info_dict = symbol.SymbolInformationForSet(libname,
+ lib_relative_addrs,
+ True,
+ cpu_arch=arch)
+ if info_dict:
+ all_symbols[library] = info_dict
+
+ for log_line in asan_log_lines:
+ m = log_line.parsed
+ if (m and m.library in all_symbols and
+ m.rel_address in all_symbols[m.library]):
+ # NOTE: all_symbols[lib][address] is a never-emtpy list of tuples.
+ # NOTE: The documentation for SymbolInformationForSet() indicates
+ # that usually one wants to display the last list item, not the first.
+ # The code below takes the first, is this the best choice here?
+ s = all_symbols[m.library][m.rel_address][0]
+ print '%s%s %s %s' % (m.prefix, m.pos, s[0], s[1])
+ else:
+ print log_line.raw
+
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option('-l', '--logcat',
+ help='File containing adb logcat output with ASan stacks. '
+ 'Use stdin if not specified.')
+ parser.add_option('--output-directory',
+ help='Path to the root build directory.')
+ parser.add_option('--arch', default='arm',
+ help='CPU architecture name')
+ options, _ = parser.parse_args()
+
+ if options.output_directory:
+ constants.SetOutputDirectory(options.output_directory)
+ # Do an up-front test that the output directory is known.
+ constants.CheckOutputDirectory()
+
+ if options.logcat:
+ asan_input = file(options.logcat, 'r')
+ else:
+ asan_input = sys.stdin
+
+ _PrintSymbolized(asan_input.readlines(), options.arch)
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/deps/v8/build/android/binary_size/OWNERS b/deps/v8/build/android/binary_size/OWNERS
new file mode 100644
index 0000000000..c964495a78
--- /dev/null
+++ b/deps/v8/build/android/binary_size/OWNERS
@@ -0,0 +1,4 @@
+agrieve@chromium.org
+estevenson@chromium.org
+
+# COMPONENT: Build
diff --git a/deps/v8/build/android/binary_size/__init__.py b/deps/v8/build/android/binary_size/__init__.py
new file mode 100644
index 0000000000..a22a6ee39a
--- /dev/null
+++ b/deps/v8/build/android/binary_size/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/binary_size/apk_downloader.py b/deps/v8/build/android/binary_size/apk_downloader.py
new file mode 100755
index 0000000000..aa7d12fdc1
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apk_downloader.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import sys
+import zipfile
+
+_BUILD_ANDROID = os.path.join(os.path.dirname(__file__), os.pardir)
+sys.path.append(_BUILD_ANDROID)
+from pylib.constants import host_paths
+
+sys.path.append(os.path.join(_BUILD_ANDROID, 'gyp'))
+from util import build_utils
+
+sys.path.append(
+ os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'depot_tools'))
+import download_from_google_storage
+import upload_to_google_storage
+
+CURRENT_MILESTONE = '67'
+DEFAULT_BUCKET = 'gs://chromium-android-tools/apks'
+DEFAULT_DOWNLOAD_PATH = os.path.join(os.path.dirname(__file__), 'apks')
+DEFAULT_BUILDER = 'Android_Builder'
+DEFAULT_APK = 'MonochromePublic.apk'
+_ALL_BUILDER_APKS = {
+ 'Android Builder': ['ChromePublic.apk', 'ChromeModernPublic.apk',
+ 'MonochromePublic.apk'],
+ 'Android arm64 Builder': ['ChromePublic.apk', 'ChromeModernPublic.apk'],
+}
+
+
+def MaybeDownloadApk(builder, milestone, apk, download_path, bucket):
+ """Returns path to the downloaded APK or None if not found."""
+ apk_path = os.path.join(download_path, builder, milestone, apk)
+ sha1_path = apk_path + '.sha1'
+ base_url = os.path.join(bucket, builder, milestone)
+ if os.path.exists(apk_path):
+ print '%s already exists' % apk_path
+ return apk_path
+ elif not os.path.exists(sha1_path):
+ print 'Skipping %s, file not found' % sha1_path
+ return None
+ else:
+ download_from_google_storage.download_from_google_storage(
+ input_filename=sha1_path,
+ sha1_file=sha1_path,
+ base_url=base_url,
+ gsutil=download_from_google_storage.Gsutil(
+ download_from_google_storage.GSUTIL_DEFAULT_PATH),
+ num_threads=1,
+ directory=False,
+ recursive=False,
+ force=False,
+ output=apk_path,
+ ignore_errors=False,
+ verbose=True,
+ auto_platform=False,
+ extract=False)
+ return apk_path
+
+
+def _UpdateReferenceApks(milestones):
+ """Update reference APKs and creates .sha1 files ready for commit.
+
+ Will fail if perf builders were broken for the given milestone (use next
+ passing build in this case).
+ """
+ with build_utils.TempDir() as temp_dir:
+ for milestone, crrev in milestones:
+ for builder, apks in _ALL_BUILDER_APKS.iteritems():
+ tools_builder_path = builder.replace(' ', '_')
+ zip_path = os.path.join(temp_dir, 'build_product.zip')
+ commit = build_utils.CheckOutput(['git', 'crrev-parse', crrev]).strip()
+ # Download build product from perf builders.
+ build_utils.CheckOutput([
+ 'gsutil', 'cp', 'gs://chrome-perf/%s/full-build-linux_%s.zip' % (
+ builder, commit), zip_path])
+
+ # Extract desired .apks.
+ with zipfile.ZipFile(zip_path) as z:
+ in_zip_paths = z.namelist()
+ out_dir = os.path.commonprefix(in_zip_paths)
+ for apk_name in apks:
+ output_path = os.path.join(
+ DEFAULT_DOWNLOAD_PATH, tools_builder_path, milestone)
+ apk_path = os.path.join(out_dir, 'apks', apk_name)
+ zip_info = z.getinfo(apk_path)
+ zip_info.filename = apk_path.replace(apk_path, apk_name)
+ z.extract(zip_info, output_path)
+ input_files = [os.path.join(output_path, apk_name)]
+ bucket_path = os.path.join(
+ DEFAULT_BUCKET, tools_builder_path, milestone)
+
+ # Upload .apks to chromium-android-tools so that they aren't
+ # automatically removed in the future.
+ upload_to_google_storage.upload_to_google_storage(
+ input_files,
+ bucket_path,
+ upload_to_google_storage.Gsutil(
+ upload_to_google_storage.GSUTIL_DEFAULT_PATH),
+ False, # force
+ False, # use_md5
+ 10, # num_threads
+ False, # skip_hashing
+ None) # gzip
+
+
+def main():
+ argparser = argparse.ArgumentParser(
+ description='Utility for downloading archived APKs used for measuring '
+ 'per-milestone patch size growth.',
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ argparser.add_argument('--download-path', default=DEFAULT_DOWNLOAD_PATH,
+ help='Directory to store downloaded APKs.')
+ argparser.add_argument('--milestone', default=CURRENT_MILESTONE,
+ help='Download reference APK for this milestone.')
+ argparser.add_argument('--apk', default=DEFAULT_APK, help='APK name.')
+ argparser.add_argument('--builder', default=DEFAULT_BUILDER,
+ help='Builder name.')
+ argparser.add_argument('--bucket', default=DEFAULT_BUCKET,
+ help='Google storage bucket where APK is stored.')
+ argparser.add_argument('--update', action='append', nargs=2,
+ help='List of MILESTONE CRREV pairs to upload '
+ 'reference APKs for. Mutally exclusive with '
+ 'downloading reference APKs.')
+ args = argparser.parse_args()
+ if args.update:
+ _UpdateReferenceApks(args.update)
+ else:
+ MaybeDownloadApk(args.builder, args.milestone, args.apk,
+ args.download_path, args.bucket)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/56/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/56/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..c2629a666a
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/56/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+f6a9731abe16df80a4026843a850d3c721414b96 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/57/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/57/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..8c0ab5d5cd
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/57/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+a168708620b6370e0325a00c0bc3b4b53ad86a18 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/57/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/57/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..119dc0ec48
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/57/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+75bc1faae7eff3c3781d1e0343414c1e42d8aeef \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/58/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/58/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..28ddb43013
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/58/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+3e9673008a930aa8bb2bcd7e26f8da91a0448ec3 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/58/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/58/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..935e09d2ba
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/58/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+ec034225a5e637fc83944b5ada634aba8075d1b2 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/58/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/58/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..8ac82edc40
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/58/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+68925703102a2ff5a55e3b00e90a086dfd6d7ee6 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/59/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/59/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..2e24ec0d2f
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/59/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+286671da954e55a708481042276d209f769d3af8 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/59/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/59/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..4ed75c8cec
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/59/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+b7d2c8299b263d33e76b9d845bfea7631cca1fc2 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/59/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/59/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..caee66cb49
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/59/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+e292bd4db30461f755188059a5ca31053b6720b8 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/60/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/60/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..a51316d5b9
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/60/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+bfa2c1c1750c5d6bbe40591ac8914a3b848d4e5b \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/60/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/60/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..e85b68e78c
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/60/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+c546e0cccae5b2da3834466337f7c2872889e6c8 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/60/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/60/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..10cc649d7d
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/60/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+3b847e7aeafc265640d5092ef40852cf47fe743b \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/61/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/61/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..b26d7382ab
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/61/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+d4f1347c5baea6ebb296b450cf54abb629030328 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/61/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/61/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..0de158d0c8
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/61/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+b2644c78846d1297c61bf75b1543d74e24f777b6 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/61/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/61/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..4473f6e275
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/61/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+80413a607555c33960241ef10f5897b02383fe2e \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/62/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/62/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..3c50afe5b3
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/62/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+a819a1ca4da0ae9e3accdbf38a6fad00814ab1c9 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/62/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/62/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..dd379384bb
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/62/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+8023a7eeaf30a4cdbdfcc309e6f89ad99cf0e9d0 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/62/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/62/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..0a1ff4dc77
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/62/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+f726d0a312f934b94fb6b8e9e2e6a81011cdf922 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/63/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/63/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..92ec9d1edf
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/63/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+e7b3d22b0d20ceec9307cbf734bc6ee98b2433e3 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/63/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/63/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..b3f545e615
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/63/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+1ec72262d91b86f3569bdf6b6ca49a6212c629d8 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/63/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/63/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..fb8ac39faa
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/63/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+8fe3e9821006688f8caebfaae912c19338b8641a \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/64/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/64/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..92e9519e36
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/64/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+98af8f2265f68dea89eecd1d93410fd36bf233c4 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/64/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/64/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..7bbf28ae22
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/64/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+d5c209e967fa2f6b98fd4e3d6882a85ad67a9d87 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/64/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/64/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..123d071a52
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/64/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+a5059a9bbe1dd9b6750bee7c2728a366bf9864e9 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/65/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/65/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..696f05d0ec
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/65/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+f7cd8a90f4b0f038e3c4165c9111757bbd357e23 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/65/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/65/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..406a044a3f
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/65/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+415533c239b40780156fed15c4760b748b8752a5 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/65/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/65/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..7ecb21ee98
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/65/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+743e67eda110f303e48826d059352dac7e6837a4 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/66/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/66/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..01556e8fda
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/66/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+f0bdfb932b1136c408f7a4f4ba8054e98b128eb6 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/66/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/66/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..7319f7dcd9
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/66/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+4af2534cd223be64c17f2ffff04340e093690d78 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/66/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/66/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..23b3125bcb
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/66/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+ef9cce8af1cc4fb533e5a19c206cc40185602a7f \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/67/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/67/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..2127fdc963
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/67/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+b266b4168dd50384c39b57a6ac9c6c260edd225f \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/67/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/67/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..05fd194135
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/67/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+443888b240953a9c0cac582b82587e06cc9c1af9 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_Builder/67/MonochromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_Builder/67/MonochromePublic.apk.sha1
new file mode 100644
index 0000000000..563969c3aa
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_Builder/67/MonochromePublic.apk.sha1
@@ -0,0 +1 @@
+7c930a47e9b48c21bd2c09cddc6e9326b8dc15ef \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/56/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/56/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..432f6aef87
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/56/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+c980519b19f3eb010fe0e54a436272e3c94656be \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/57/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/57/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..aa40702ea6
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/57/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+c1f4797decdd33465d671cf2fb5f266f4c9e1c7a \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/57/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/57/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..7e7ddf5b81
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/57/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+47f902113feb297714ba22d25d7cb51754990923 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/58/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/58/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..b97041ab12
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/58/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+255104059ee2e51541d67a504ff22327b945768b \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/58/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/58/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..bd8ffec678
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/58/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+0a18193a6534ccc31deaffecb817b8b6c991647a \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/59/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/59/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..1026ad71b5
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/59/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+62897dbec4761546b9c94e258c99c51dc816c1a9 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/59/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/59/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..05052bd971
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/59/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+9f2e5aee1ca7d8a88e05587137a0f4859e76296b \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/60/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/60/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..ad52e8ef8b
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/60/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+241970643e7cc8a5078946092298229bce422c5c \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/60/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/60/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..3d1dff8612
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/60/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+3dd1e9be5c64c97e3a5d93b7c43e0cddb5548003 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/61/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/61/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..13d4b6a003
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/61/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+8e30941b4fc3a33701dba26aac2d686120bb9588 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/61/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/61/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..4e72ae79e4
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/61/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+d2ed32c20b65f86a839cc01839a6f258e8fe909f \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/62/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/62/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..0f14fdca3d
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/62/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+b7d6363def07dbff34424f7df52bfe492b354569 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/62/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/62/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..9f8d745357
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/62/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+f46c29b53910ec5094860d820b8b99540706e9e4 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/63/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/63/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..a19ee6507e
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/63/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+172e8f53695780960ab50908b262c7763bbd2bad \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/63/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/63/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..739baa857c
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/63/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+4db69a57e34287ab21e8f5fae282cf861316384c \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/64/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/64/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..e0df57bb81
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/64/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+3e4b8fd75aea0a22fa769c6686dd153d72741e91 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/64/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/64/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..e65c26f9ab
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/64/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+b7b222a6650e98dda980dd9bc922763e4102d0b6 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/65/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/65/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..e3921ca31a
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/65/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+112273d8bb6942a89d47f4ada5f85cc9d5e69073 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/65/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/65/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..7c78ee8393
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/65/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+fde7bd26a11ab8d301efe1ae7115c615f03ed3d0 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/66/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/66/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..b9960e07c8
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/66/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+1abaf4fea673cd75031ee6bcb8382abc7ffe2b92 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/66/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/66/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..98445823cf
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/66/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+ce72e18b9a9028cbde71a3ab36580d6bf3c8e531 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/67/ChromeModernPublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/67/ChromeModernPublic.apk.sha1
new file mode 100644
index 0000000000..6d8e57c62f
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/67/ChromeModernPublic.apk.sha1
@@ -0,0 +1 @@
+9ecb48a44c57c9bbbe85dcf0d9cd446529640807 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/67/ChromePublic.apk.sha1 b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/67/ChromePublic.apk.sha1
new file mode 100644
index 0000000000..a06d22d349
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/Android_arm64_Builder/67/ChromePublic.apk.sha1
@@ -0,0 +1 @@
+60f7ca1ebf019bf3a77b372c5f4c80592e36b942 \ No newline at end of file
diff --git a/deps/v8/build/android/binary_size/apks/README.md b/deps/v8/build/android/binary_size/apks/README.md
new file mode 100644
index 0000000000..d6e4f58de6
--- /dev/null
+++ b/deps/v8/build/android/binary_size/apks/README.md
@@ -0,0 +1,45 @@
+## Milestone Reference APKs
+
+This folder contains APKs for official (upstream) builds for each milestone.
+The primary use for these APKs is per-milestone binary size analysis.
+ * `//build/android/resource_sizes.py` uses them for calculating patch size
+ * They can be used with `tools/binary_size/diagnose_bloat.py` for analyzing
+ what grew in an APK milestone-to-milestone
+
+## Downloading Reference APKs
+
+```bash
+# Downloads ARM 32 MonochromePublic.apk for the latest milestone that we've
+# uploaded APKs for.
+build/android/binary_size/apk_downloader.py
+
+# Print usage and see all options.
+build/android/binary_size/apk_downloader.py -h
+```
+
+## Updating Reference APKs
+```bash
+# Downloads build products from perf builders and uploads the following APKs
+# for M62 and M63:
+# ARM 32 - ChromePublic.apk, ChromeModernPublic.apk, MonochromePublic.apk
+# ARM 64 - ChromePublic.apk ChromeModernPublic.apk
+build/android/binary_size/apk_downloader.py --update 63 508578 --update 62 499187
+```
+
+ * **Remember to commit the generated .sha1 files, update the
+ CURRENT_MILESTONE variable in apk_downloader.py, and update the list of
+ revisions below**
+
+## Chromium revisions for each APK
+ * [M56](https://crrev.com/433059)
+ * [M57](https://crrev.com/444943)
+ * [M58](https://crrev.com/454471)
+ * [M59](https://crrev.com/464641)
+ * [M60](https://crrev.com/474934)
+ * [M61](https://crrev.com/488528)
+ * [M62](https://crrev.com/499187)
+ * [M63](https://crrev.com/508578)
+ * [M64](https://crrev.com/520840)
+ * [M65](https://crrev.com/530369)
+ * [M66](https://crrev.com/540276)
+ * [M67](https://crrev.com/550428)
diff --git a/deps/v8/build/android/buildhooks/BUILD.gn b/deps/v8/build/android/buildhooks/BUILD.gn
new file mode 100644
index 0000000000..0ccd4ce2d8
--- /dev/null
+++ b/deps/v8/build/android/buildhooks/BUILD.gn
@@ -0,0 +1,58 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+import("//build/config/android/config.gni")
+
+java_library("build_hooks_java") {
+ emma_never_instrument = true
+ java_files = [ "java/org/chromium/build/BuildHooks.java" ]
+
+ # Make all targets pull in the try-with-resources support files.
+ # If an apk ends up not using any such statements, ProGuard will remove
+ # them.
+ deps = [
+ "//third_party/bazel/desugar:desugar_runtime_java",
+ ]
+ srcjar_deps = [ ":base_build_hooks_config" ]
+ no_build_hooks = true
+ supports_android = true
+}
+
+java_cpp_template("base_build_hooks_config") {
+ sources = [
+ "java/templates/BuildHooksConfig.template",
+ ]
+ package_path = "org/chromium/build"
+
+ defines = []
+ if (report_java_assert) {
+ defines += [ "_REPORT_JAVA_ASSERT" ]
+ }
+}
+
+build_hooks_android_impl = "java/org/chromium/build/BuildHooksAndroidImpl.java"
+
+android_library("build_hooks_android_java") {
+ emma_never_instrument = true
+ java_files = [
+ "java/org/chromium/build/BuildHooksAndroid.java",
+ build_hooks_android_impl,
+ ]
+
+ jar_excluded_patterns = [ "*/BuildHooksAndroidImpl.class" ]
+ no_build_hooks = true
+ proguard_configs = [ "proguard/build_hooks_android_impl.flags" ]
+}
+
+# This default implementation is used if an android_apk target doesn't
+# specify a different implementation via build_hooks_android_impl_deps.
+android_library("build_hooks_android_impl_java") {
+ emma_never_instrument = true
+ java_files = [ build_hooks_android_impl ]
+ deps = [
+ ":build_hooks_android_java",
+ ]
+ no_build_hooks = true
+}
diff --git a/deps/v8/build/android/buildhooks/OWNERS b/deps/v8/build/android/buildhooks/OWNERS
new file mode 100644
index 0000000000..c964495a78
--- /dev/null
+++ b/deps/v8/build/android/buildhooks/OWNERS
@@ -0,0 +1,4 @@
+agrieve@chromium.org
+estevenson@chromium.org
+
+# COMPONENT: Build
diff --git a/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooks.java b/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooks.java
new file mode 100644
index 0000000000..7364898699
--- /dev/null
+++ b/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooks.java
@@ -0,0 +1,50 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build;
+
+/**
+ * All Java targets that support android have dependence on this class.
+ */
+public abstract class BuildHooks {
+ /**
+ * Defines an interface for reporting assertion error.
+ */
+ @FunctionalInterface
+ public interface ReportAssertionCallback {
+ void run(AssertionError arg);
+ }
+
+ private static ReportAssertionCallback sReportAssertionCallback;
+
+ /**
+ * This method is used to handle assert failures when asserts are enabled by
+ * //build/android/bytecode:java_bytecode_rewriter. For non-release builds, this is always
+ * enabled and assert failures will result in an assertion error being thrown. For release
+ * builds, this is only enabled when report_java_assert = true. Assert failures will result in
+ * an error report being uploaded to the crash servers only if the callback is set (so that this
+ * can be a no-op for WebView in Monochrome). This also means that asserts hit before the
+ * callback is set will be no-op's as well.
+ */
+ public static void assertFailureHandler(AssertionError assertionError) {
+ if (BuildHooksConfig.REPORT_JAVA_ASSERT) {
+ if (sReportAssertionCallback != null) {
+ sReportAssertionCallback.run(assertionError);
+ }
+ } else {
+ throw assertionError;
+ }
+ }
+
+ /**
+ * Set the callback function that handles assert failure.
+ * This should be called from attachBaseContext.
+ */
+ public static void setReportAssertionCallback(ReportAssertionCallback callback) {
+ if (!BuildHooksConfig.REPORT_JAVA_ASSERT) {
+ throw new AssertionError();
+ }
+ sReportAssertionCallback = callback;
+ }
+}
diff --git a/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooksAndroid.java b/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooksAndroid.java
new file mode 100644
index 0000000000..f6fef20e07
--- /dev/null
+++ b/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooksAndroid.java
@@ -0,0 +1,107 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build;
+
+import android.content.Context;
+import android.content.res.AssetManager;
+import android.content.res.Resources;
+
+/**
+ * All Java targets that require android have dependence on this class. Add methods that do not
+ * require Android to {@link BuildHooks}.
+ *
+ * This class provides hooks needed when bytecode rewriting. Static convenience methods are used to
+ * minimize the amount of code required to be manually generated when bytecode rewriting.
+ *
+ * This class contains default implementations for all methods and is used when no other
+ * implementation is supplied to an android_apk target (via build_hooks_android_impl_deps).
+ */
+public abstract class BuildHooksAndroid {
+ private static BuildHooksAndroid sInstance;
+
+ private static BuildHooksAndroid get() {
+ if (sInstance == null) {
+ sInstance = constructBuildHooksAndroidImpl();
+ }
+ return sInstance;
+ }
+
+ // Creates an instance of BuildHooksAndroidImpl using reflection. Why is this necessary?
+ // The downstream version of BuildHooksAndroidImpl pulls a bunch of methods into the main dex
+ // that don't actually need to be there. This happens because there are @MainDex classes that
+ // have Context methods added (via. bytecode rewriting) that call into BuildHooksAndroid.
+ // Creating the instance via. reflection tricks proguard into thinking BuildHooksAndroidImpl
+ // doesn't need to be in the main dex file.
+ private static BuildHooksAndroid constructBuildHooksAndroidImpl() {
+ try {
+ // Not final to avoid inlining. Without this proguard is able to figure out that
+ // BuildHooksAndroidImpl is actually used.
+ String implClazzName = "org.chromium.build.BuildHooksAndroidImpl";
+ Class<?> implClazz = Class.forName(implClazzName);
+ return (BuildHooksAndroid) implClazz.newInstance();
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public static Resources getResources(Context context) {
+ return get().getResourcesImpl(context);
+ }
+
+ protected abstract Resources getResourcesImpl(Context context);
+
+ public static AssetManager getAssets(Context context) {
+ return get().getAssetsImpl(context);
+ }
+
+ protected abstract AssetManager getAssetsImpl(Context context);
+
+ public static Resources.Theme getTheme(Context context) {
+ return get().getThemeImpl(context);
+ }
+
+ protected abstract Resources.Theme getThemeImpl(Context context);
+
+ public static void setTheme(Context context, int theme) {
+ get().setThemeImpl(context, theme);
+ }
+
+ protected abstract void setThemeImpl(Context context, int theme);
+
+ public static Context createConfigurationContext(Context context) {
+ return get().createConfigurationContextImpl(context);
+ }
+
+ protected abstract Context createConfigurationContextImpl(Context context);
+
+ public static int getIdentifier(
+ Resources resources, String name, String defType, String defPackage) {
+ return get().getIdentifierImpl(resources, name, defType, defPackage);
+ }
+
+ protected abstract int getIdentifierImpl(
+ Resources resources, String name, String defType, String defPackage);
+
+ public static boolean isEnabled() {
+ return get().isEnabledImpl();
+ }
+
+ protected abstract boolean isEnabledImpl();
+
+ public static void initCustomResources(Context context) {
+ get().initCustomResourcesImpl(context);
+ }
+
+ protected abstract void initCustomResourcesImpl(Context context);
+
+ /**
+ * Record custom resources related UMA. Requires native library to be loaded.
+ */
+ public static void maybeRecordResourceMetrics() {
+ get().maybeRecordResourceMetricsImpl();
+ }
+
+ protected abstract void maybeRecordResourceMetricsImpl();
+} \ No newline at end of file
diff --git a/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooksAndroidImpl.java b/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooksAndroidImpl.java
new file mode 100644
index 0000000000..5b9b997161
--- /dev/null
+++ b/deps/v8/build/android/buildhooks/java/org/chromium/build/BuildHooksAndroidImpl.java
@@ -0,0 +1,54 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build;
+
+import android.content.Context;
+import android.content.res.AssetManager;
+import android.content.res.Resources;
+
+/**
+ * Instantiatable version of {@link BuildHooksAndroid} with dummy implementations.
+ */
+public class BuildHooksAndroidImpl extends BuildHooksAndroid {
+ @Override
+ protected final Resources getResourcesImpl(Context context) {
+ return null;
+ }
+
+ @Override
+ protected AssetManager getAssetsImpl(Context context) {
+ return null;
+ }
+
+ @Override
+ protected Resources.Theme getThemeImpl(Context context) {
+ return null;
+ }
+
+ @Override
+ protected void setThemeImpl(Context context, int theme) {}
+
+ @Override
+ protected Context createConfigurationContextImpl(Context context) {
+ return null;
+ }
+
+ @Override
+ protected int getIdentifierImpl(
+ Resources resources, String name, String defType, String defPackage) {
+ return resources.getIdentifier(name, defType, defPackage);
+ }
+
+ @Override
+ protected boolean isEnabledImpl() {
+ return false;
+ }
+
+ @Override
+ protected void initCustomResourcesImpl(Context context) {}
+
+ @Override
+ protected void maybeRecordResourceMetricsImpl() {}
+}
diff --git a/deps/v8/build/android/buildhooks/java/templates/BuildHooksConfig.template b/deps/v8/build/android/buildhooks/java/templates/BuildHooksConfig.template
new file mode 100644
index 0000000000..bdaa550110
--- /dev/null
+++ b/deps/v8/build/android/buildhooks/java/templates/BuildHooksConfig.template
@@ -0,0 +1,16 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build;
+
+/**
+ * BuildHooks configuration. Generated on a per-target basis.
+ */
+public class BuildHooksConfig {
+#if defined(_REPORT_JAVA_ASSERT)
+ public static final boolean REPORT_JAVA_ASSERT = true;
+#else
+ public static final boolean REPORT_JAVA_ASSERT = false;
+#endif
+}
diff --git a/deps/v8/build/android/buildhooks/proguard/build_hooks_android_impl.flags b/deps/v8/build/android/buildhooks/proguard/build_hooks_android_impl.flags
new file mode 100644
index 0000000000..62dc1aa0f3
--- /dev/null
+++ b/deps/v8/build/android/buildhooks/proguard/build_hooks_android_impl.flags
@@ -0,0 +1,5 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+-keep class org.chromium.build.BuildHooksAndroidImpl
diff --git a/deps/v8/build/android/bytecode/BUILD.gn b/deps/v8/build/android/bytecode/BUILD.gn
new file mode 100644
index 0000000000..1584becd43
--- /dev/null
+++ b/deps/v8/build/android/bytecode/BUILD.gn
@@ -0,0 +1,27 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+assert(current_toolchain == default_toolchain)
+
+java_binary("java_bytecode_rewriter") {
+ emma_never_instrument = true
+ java_files = [
+ "java/org/chromium/bytecode/AssertionEnablerClassAdapter.java",
+ "java/org/chromium/bytecode/ByteCodeProcessor.java",
+ "java/org/chromium/bytecode/ClassPathValidator.java",
+ "java/org/chromium/bytecode/CustomClassLoaderClassWriter.java",
+ "java/org/chromium/bytecode/CustomResourcesClassAdapter.java",
+ "java/org/chromium/bytecode/SplitCompatClassAdapter.java",
+ "java/org/chromium/bytecode/ThreadAssertionClassAdapter.java",
+ "java/org/chromium/bytecode/TypeUtils.java",
+ ]
+ main_class = "org.chromium.bytecode.ByteCodeProcessor"
+ deps = [
+ "//third_party/ow2_asm:asm_java",
+ "//third_party/ow2_asm:asm_util_java",
+ ]
+ wrapper_script_name = "helper/java_bytecode_rewriter"
+}
diff --git a/deps/v8/build/android/bytecode/OWNERS b/deps/v8/build/android/bytecode/OWNERS
new file mode 100644
index 0000000000..c964495a78
--- /dev/null
+++ b/deps/v8/build/android/bytecode/OWNERS
@@ -0,0 +1,4 @@
+agrieve@chromium.org
+estevenson@chromium.org
+
+# COMPONENT: Build
diff --git a/deps/v8/build/android/bytecode/java/org/chromium/bytecode/AssertionEnablerClassAdapter.java b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/AssertionEnablerClassAdapter.java
new file mode 100644
index 0000000000..0a903a60f9
--- /dev/null
+++ b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/AssertionEnablerClassAdapter.java
@@ -0,0 +1,109 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import static org.chromium.bytecode.TypeUtils.ASSERTION_ERROR;
+import static org.chromium.bytecode.TypeUtils.BUILD_HOOKS;
+import static org.chromium.bytecode.TypeUtils.VOID;
+
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.Label;
+import org.objectweb.asm.MethodVisitor;
+import org.objectweb.asm.Opcodes;
+
+/**
+ * An ClassVisitor for replacing Java ASSERT statements with a function by modifying Java bytecode.
+ *
+ * We do this in two steps, first step is to enable assert.
+ * Following bytecode is generated for each class with ASSERT statements:
+ * 0: ldc #8 // class CLASSNAME
+ * 2: invokevirtual #9 // Method java/lang/Class.desiredAssertionStatus:()Z
+ * 5: ifne 12
+ * 8: iconst_1
+ * 9: goto 13
+ * 12: iconst_0
+ * 13: putstatic #2 // Field $assertionsDisabled:Z
+ * Replaces line #13 to the following:
+ * 13: pop
+ * Consequently, $assertionsDisabled is assigned the default value FALSE.
+ * This is done in the first if statement in overridden visitFieldInsn. We do this per per-assert.
+ *
+ * Second step is to replace assert statement with a function:
+ * The followed instructions are generated by a java assert statement:
+ * getstatic #3 // Field $assertionsDisabled:Z
+ * ifne 118 // Jump to instruction as if assertion if not enabled
+ * ...
+ * ifne 19
+ * new #4 // class java/lang/AssertionError
+ * dup
+ * ldc #5 // String (don't have this line if no assert message given)
+ * invokespecial #6 // Method java/lang/AssertionError.
+ * athrow
+ * Replace athrow with:
+ * invokestatic #7 // Method org/chromium/base/JavaExceptionReporter.assertFailureHandler
+ * goto 118
+ * JavaExceptionReporter.assertFailureHandler is a function that handles the AssertionError,
+ * 118 is the instruction to execute as if assertion if not enabled.
+ */
+class AssertionEnablerClassAdapter extends ClassVisitor {
+ AssertionEnablerClassAdapter(ClassVisitor visitor) {
+ super(Opcodes.ASM5, visitor);
+ }
+
+ @Override
+ public MethodVisitor visitMethod(final int access, final String name, String desc,
+ String signature, String[] exceptions) {
+ return new RewriteAssertMethodVisitor(
+ Opcodes.ASM5, super.visitMethod(access, name, desc, signature, exceptions));
+ }
+
+ static class RewriteAssertMethodVisitor extends MethodVisitor {
+ static final String ASSERTION_DISABLED_NAME = "$assertionsDisabled";
+ static final String INSERT_INSTRUCTION_NAME = "assertFailureHandler";
+ static final String INSERT_INSTRUCTION_DESC =
+ TypeUtils.getMethodDescriptor(VOID, ASSERTION_ERROR);
+ static final boolean INSERT_INSTRUCTION_ITF = false;
+
+ boolean mStartLoadingAssert;
+ Label mGotoLabel;
+
+ public RewriteAssertMethodVisitor(int api, MethodVisitor mv) {
+ super(api, mv);
+ }
+
+ @Override
+ public void visitFieldInsn(int opcode, String owner, String name, String desc) {
+ if (opcode == Opcodes.PUTSTATIC && name.equals(ASSERTION_DISABLED_NAME)) {
+ super.visitInsn(Opcodes.POP); // enable assert
+ } else if (opcode == Opcodes.GETSTATIC && name.equals(ASSERTION_DISABLED_NAME)) {
+ mStartLoadingAssert = true;
+ super.visitFieldInsn(opcode, owner, name, desc);
+ } else {
+ super.visitFieldInsn(opcode, owner, name, desc);
+ }
+ }
+
+ @Override
+ public void visitJumpInsn(int opcode, Label label) {
+ if (mStartLoadingAssert && opcode == Opcodes.IFNE && mGotoLabel == null) {
+ mGotoLabel = label;
+ }
+ super.visitJumpInsn(opcode, label);
+ }
+
+ @Override
+ public void visitInsn(int opcode) {
+ if (!mStartLoadingAssert || opcode != Opcodes.ATHROW) {
+ super.visitInsn(opcode);
+ } else {
+ super.visitMethodInsn(Opcodes.INVOKESTATIC, BUILD_HOOKS, INSERT_INSTRUCTION_NAME,
+ INSERT_INSTRUCTION_DESC, INSERT_INSTRUCTION_ITF);
+ super.visitJumpInsn(Opcodes.GOTO, mGotoLabel);
+ mStartLoadingAssert = false;
+ mGotoLabel = null;
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
new file mode 100644
index 0000000000..37dc192d81
--- /dev/null
+++ b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java
@@ -0,0 +1,293 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import static org.objectweb.asm.ClassWriter.COMPUTE_FRAMES;
+
+import org.objectweb.asm.ClassReader;
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.ClassWriter;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.StandardCopyOption;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.zip.CRC32;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+import java.util.zip.ZipOutputStream;
+
+/**
+ * Java application that takes in an input jar, performs a series of bytecode transformations,
+ * and generates an output jar.
+ *
+ * Two types of transformations are performed:
+ * 1) Enabling assertions via {@link AssertionEnablerClassAdapter}
+ * 2) Providing support for custom resources via {@link CustomResourcesClassAdapter}
+ */
+class ByteCodeProcessor {
+ private static final String CLASS_FILE_SUFFIX = ".class";
+ private static final String TEMPORARY_FILE_SUFFIX = ".temp";
+ private static final int BUFFER_SIZE = 16384;
+ private static boolean sVerbose;
+ private static boolean sIsPrebuilt;
+ private static boolean sShouldAssert;
+ private static boolean sShouldUseCustomResources;
+ private static boolean sShouldUseThreadAnnotations;
+ private static boolean sShouldCheckClassPath;
+ private static ClassLoader sDirectClassPathClassLoader;
+ private static ClassLoader sFullClassPathClassLoader;
+ private static Set<String> sFullClassPathJarPaths;
+ private static Set<String> sSplitCompatClassNames;
+ private static ClassPathValidator sValidator;
+
+ private static class EntryDataPair {
+ private final ZipEntry mEntry;
+ private final byte[] mData;
+
+ private EntryDataPair(ZipEntry mEntry, byte[] mData) {
+ this.mEntry = mEntry;
+ this.mData = mData;
+ }
+
+ private static EntryDataPair create(String zipPath, byte[] data) {
+ ZipEntry entry = new ZipEntry(zipPath);
+ entry.setMethod(ZipEntry.STORED);
+ entry.setTime(0);
+ entry.setSize(data.length);
+ CRC32 crc = new CRC32();
+ crc.update(data);
+ entry.setCrc(crc.getValue());
+ return new EntryDataPair(entry, data);
+ }
+ }
+
+ private static EntryDataPair processEntry(ZipEntry entry, byte[] data)
+ throws ClassPathValidator.ClassNotLoadedException {
+ // Copy all non-.class files to the output jar.
+ if (entry.isDirectory() || !entry.getName().endsWith(CLASS_FILE_SUFFIX)) {
+ return new EntryDataPair(entry, data);
+ }
+
+ ClassReader reader = new ClassReader(data);
+
+ if (sShouldCheckClassPath) {
+ sValidator.validateClassPathsAndOutput(reader, sDirectClassPathClassLoader,
+ sFullClassPathClassLoader, sFullClassPathJarPaths, sIsPrebuilt, sVerbose);
+ }
+
+ ClassWriter writer;
+ if (sShouldUseCustomResources) {
+ // Use the COMPUTE_FRAMES flag to have asm figure out the stack map frames.
+ // This is necessary because GCMBaseIntentService in android_gcm_java contains
+ // incorrect stack map frames. This option slows down processing time by 2x.
+ writer = new CustomClassLoaderClassWriter(
+ sFullClassPathClassLoader, reader, COMPUTE_FRAMES);
+ } else {
+ writer = new ClassWriter(reader, 0);
+ }
+ ClassVisitor chain = writer;
+ /* DEBUGGING:
+ To see the bytecode for a specific class:
+ if (entry.getName().contains("YourClassName")) {
+ chain = new TraceClassVisitor(chain, new PrintWriter(System.out));
+ }
+ To see objectweb.asm code that will generate bytecode for a given class:
+ java -cp "third_party/ow2_asm/lib/asm-5.0.1.jar:third_party/ow2_asm/lib/"\
+ "asm-util-5.0.1.jar:out/Debug/lib.java/jar_containing_yourclass.jar" \
+ org.objectweb.asm.util.ASMifier org.package.YourClassName
+ */
+ if (sShouldUseThreadAnnotations) {
+ chain = new ThreadAssertionClassAdapter(chain);
+ }
+ if (sShouldAssert) {
+ chain = new AssertionEnablerClassAdapter(chain);
+ }
+ if (sShouldUseCustomResources) {
+ chain = new CustomResourcesClassAdapter(
+ chain, reader.getClassName(), reader.getSuperName(), sFullClassPathClassLoader);
+ }
+ if (!sSplitCompatClassNames.isEmpty()) {
+ chain = new SplitCompatClassAdapter(
+ chain, sSplitCompatClassNames, sFullClassPathClassLoader);
+ }
+ reader.accept(chain, 0);
+ byte[] patchedByteCode = writer.toByteArray();
+ return EntryDataPair.create(entry.getName(), patchedByteCode);
+ }
+
+ private static void process(String inputJarPath, String outputJarPath)
+ throws ClassPathValidator.ClassNotLoadedException, ExecutionException,
+ InterruptedException {
+ String tempJarPath = outputJarPath + TEMPORARY_FILE_SUFFIX;
+ ExecutorService executorService =
+ Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
+ try (ZipInputStream inputStream = new ZipInputStream(
+ new BufferedInputStream(new FileInputStream(inputJarPath)));
+ ZipOutputStream tempStream = new ZipOutputStream(
+ new BufferedOutputStream(new FileOutputStream(tempJarPath)))) {
+ List<Future<EntryDataPair>> list = new ArrayList<>();
+ while (true) {
+ ZipEntry entry = inputStream.getNextEntry();
+ if (entry == null) {
+ break;
+ }
+ byte[] data = readAllBytes(inputStream);
+ list.add(executorService.submit(() -> processEntry(entry, data)));
+ }
+ executorService.shutdown(); // This is essential in order to avoid waiting infinitely.
+ // Write the zip file entries in order to preserve determinism.
+ for (Future<EntryDataPair> futurePair : list) {
+ EntryDataPair pair = futurePair.get();
+ tempStream.putNextEntry(pair.mEntry);
+ tempStream.write(pair.mData);
+ tempStream.closeEntry();
+ }
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ try {
+ Path src = Paths.get(tempJarPath);
+ Path dest = Paths.get(outputJarPath);
+ Files.move(src, dest, StandardCopyOption.REPLACE_EXISTING);
+ } catch (IOException ioException) {
+ throw new RuntimeException(ioException);
+ }
+
+ if (sValidator.hasErrors()) {
+ System.err.println("Direct classpath is incomplete. To fix, add deps on the "
+ + "GN target(s) that provide:");
+ for (Map.Entry<String, Map<String, Set<String>>> entry :
+ sValidator.getErrors().entrySet()) {
+ printValidationError(System.err, entry.getKey(), entry.getValue());
+ }
+ System.exit(1);
+ }
+ }
+
+ private static void printValidationError(
+ PrintStream out, String jarName, Map<String, Set<String>> missingClasses) {
+ out.print(" * ");
+ out.println(jarName);
+ int i = 0;
+ final int numErrorsPerJar = 2;
+ // The list of missing classes is non-exhaustive because each class that fails to validate
+ // reports only the first missing class.
+ for (Map.Entry<String, Set<String>> entry : missingClasses.entrySet()) {
+ String missingClass = entry.getKey();
+ Set<String> filesThatNeededIt = entry.getValue();
+ out.print(" * ");
+ if (i == numErrorsPerJar) {
+ out.print(String.format("And %d more...", missingClasses.size() - numErrorsPerJar));
+ break;
+ }
+ out.print(missingClass.replace('/', '.'));
+ out.print(" (needed by ");
+ out.print(filesThatNeededIt.iterator().next().replace('/', '.'));
+ if (filesThatNeededIt.size() > 1) {
+ out.print(String.format(" and %d more", filesThatNeededIt.size() - 1));
+ }
+ out.println(")");
+ i++;
+ }
+ }
+
+ private static byte[] readAllBytes(InputStream inputStream) throws IOException {
+ ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+ int numRead = 0;
+ byte[] data = new byte[BUFFER_SIZE];
+ while ((numRead = inputStream.read(data, 0, data.length)) != -1) {
+ buffer.write(data, 0, numRead);
+ }
+ return buffer.toByteArray();
+ }
+
+ /**
+ * Loads a list of jars and returns a ClassLoader capable of loading all classes found in the
+ * given jars.
+ */
+ static ClassLoader loadJars(Collection<String> paths) {
+ URL[] jarUrls = new URL[paths.size()];
+ int i = 0;
+ for (String path : paths) {
+ try {
+ jarUrls[i++] = new File(path).toURI().toURL();
+ } catch (MalformedURLException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ return new URLClassLoader(jarUrls);
+ }
+
+ public static void main(String[] args) throws ClassPathValidator.ClassNotLoadedException,
+ ExecutionException, InterruptedException {
+ // Invoke this script using //build/android/gyp/bytecode_processor.py
+ int currIndex = 0;
+ String inputJarPath = args[currIndex++];
+ String outputJarPath = args[currIndex++];
+ sVerbose = args[currIndex++].equals("--verbose");
+ sIsPrebuilt = args[currIndex++].equals("--is-prebuilt");
+ sShouldAssert = args[currIndex++].equals("--enable-assert");
+ sShouldUseCustomResources = args[currIndex++].equals("--enable-custom-resources");
+ sShouldUseThreadAnnotations = args[currIndex++].equals("--enable-thread-annotations");
+ sShouldCheckClassPath = args[currIndex++].equals("--enable-check-class-path");
+ int sdkJarsLength = Integer.parseInt(args[currIndex++]);
+ List<String> sdkJarPaths =
+ Arrays.asList(Arrays.copyOfRange(args, currIndex, currIndex + sdkJarsLength));
+ currIndex += sdkJarsLength;
+
+ int directJarsLength = Integer.parseInt(args[currIndex++]);
+ ArrayList<String> directClassPathJarPaths = new ArrayList<>();
+ directClassPathJarPaths.add(inputJarPath);
+ directClassPathJarPaths.addAll(sdkJarPaths);
+ directClassPathJarPaths.addAll(
+ Arrays.asList(Arrays.copyOfRange(args, currIndex, currIndex + directJarsLength)));
+ currIndex += directJarsLength;
+ sDirectClassPathClassLoader = loadJars(directClassPathJarPaths);
+
+ // Load list of class names that need to be fixed.
+ int splitCompatClassNamesLength = Integer.parseInt(args[currIndex++]);
+ sSplitCompatClassNames = new HashSet<>();
+ sSplitCompatClassNames.addAll(Arrays.asList(
+ Arrays.copyOfRange(args, currIndex, currIndex + splitCompatClassNamesLength)));
+ currIndex += splitCompatClassNamesLength;
+
+ // Load all jars that are on the classpath for the input jar for analyzing class hierarchy.
+ sFullClassPathJarPaths = new HashSet<>();
+ sFullClassPathJarPaths.clear();
+ sFullClassPathJarPaths.add(inputJarPath);
+ sFullClassPathJarPaths.addAll(sdkJarPaths);
+ sFullClassPathJarPaths.addAll(
+ Arrays.asList(Arrays.copyOfRange(args, currIndex, args.length)));
+ sFullClassPathClassLoader = loadJars(sFullClassPathJarPaths);
+ sFullClassPathJarPaths.removeAll(directClassPathJarPaths);
+
+ sValidator = new ClassPathValidator();
+ process(inputJarPath, outputJarPath);
+ }
+}
diff --git a/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
new file mode 100644
index 0000000000..c35c3f6820
--- /dev/null
+++ b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java
@@ -0,0 +1,167 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassReader;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+/**
+ * Checks classpaths (given as ClassLoaders) by reading the constant pool of the class file and
+ * attempting to load every referenced class. If there are some that are unable to be found, it
+ * stores a helpful error message if it knows where it might find them, and exits the program if it
+ * can't find the class with any given classpath.
+ */
+public class ClassPathValidator {
+ // Map of missing .jar -> Missing class -> Classes that failed.
+ // TreeMap so that error messages have sorted list of jars.
+ private final Map<String, Map<String, Set<String>>> mErrors = new TreeMap<>();
+
+ static class ClassNotLoadedException extends ClassNotFoundException {
+ private final String mClassName;
+
+ ClassNotLoadedException(String className, Throwable ex) {
+ super("Couldn't load " + className, ex);
+ mClassName = className;
+ }
+
+ public String getClassName() {
+ return mClassName;
+ }
+ }
+
+ private static void printAndQuit(ClassNotLoadedException e, ClassReader classReader,
+ boolean verbose) throws ClassNotLoadedException {
+ System.err.println("Class \"" + e.getClassName()
+ + "\" not found on any classpath. Used by class \"" + classReader.getClassName()
+ + "\"");
+ if (verbose) {
+ throw e;
+ }
+ System.exit(1);
+ }
+
+ private static void validateClass(ClassLoader classLoader, String className)
+ throws ClassNotLoadedException {
+ if (className.startsWith("[")) {
+ // Dealing with an array type which isn't encoded nicely in the constant pool.
+ // For example, [[Lorg/chromium/Class$1;
+ className = className.substring(className.lastIndexOf('[') + 1);
+ if (className.charAt(0) == 'L' && className.endsWith(";")) {
+ className = className.substring(1, className.length() - 1);
+ } else {
+ // Bailing out if we have an non-class array type.
+ // This could be something like [B
+ return;
+ }
+ }
+ if (className.matches(".*\\bR(\\$\\w+)?$")) {
+ // Resources in R.java files are not expected to be valid at this stage in the build.
+ return;
+ }
+ if (className.matches("^libcore\\b.*")) {
+ // libcore exists on devices, but is not included in the Android sdk as it is a private
+ // API.
+ return;
+ }
+ try {
+ classLoader.loadClass(className.replace('/', '.'));
+ } catch (ClassNotFoundException e) {
+ throw new ClassNotLoadedException(className, e);
+ } catch (NoClassDefFoundError e) {
+ // We assume that this is caused by another class that is not going to able to be
+ // loaded, so we will skip this and let that class fail with ClassNotFoundException.
+ }
+ }
+
+ /**
+ * Given a .class file, see if every class referenced in the main class' constant pool can be
+ * loaded by the given ClassLoader.
+ *
+ * @param classReader .class file interface for reading the constant pool.
+ * @param classLoader classpath you wish to validate.
+ * @throws ClassNotLoadedException thrown if it can't load a certain class.
+ */
+ private static void validateClassPath(ClassReader classReader, ClassLoader classLoader)
+ throws ClassNotLoadedException {
+ char[] charBuffer = new char[classReader.getMaxStringLength()];
+ // According to the Java spec, the constant pool is indexed from 1 to constant_pool_count -
+ // 1. See https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4
+ for (int i = 1; i < classReader.getItemCount(); i++) {
+ int offset = classReader.getItem(i);
+ // Class entries correspond to 7 in the constant pool
+ // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4
+ if (offset > 0 && classReader.readByte(offset - 1) == 7) {
+ validateClass(classLoader, classReader.readUTF8(offset, charBuffer));
+ }
+ }
+ }
+
+ public void validateClassPathsAndOutput(ClassReader classReader,
+ ClassLoader directClassPathClassLoader, ClassLoader fullClassPathClassLoader,
+ Collection<String> jarsOnlyInFullClassPath, boolean isPrebuilt, boolean verbose)
+ throws ClassNotLoadedException {
+ if (isPrebuilt) {
+ // Prebuilts only need transitive dependencies checked, not direct dependencies.
+ try {
+ validateClassPath(classReader, fullClassPathClassLoader);
+ } catch (ClassNotLoadedException e) {
+ printAndQuit(e, classReader, verbose);
+ }
+ } else {
+ try {
+ validateClassPath(classReader, directClassPathClassLoader);
+ } catch (ClassNotLoadedException e) {
+ try {
+ validateClass(fullClassPathClassLoader, e.getClassName());
+ } catch (ClassNotLoadedException d) {
+ printAndQuit(d, classReader, verbose);
+ }
+ if (verbose) {
+ System.err.println("Class \"" + e.getClassName()
+ + "\" not found in direct dependencies,"
+ + " but found in indirect dependiences.");
+ }
+ // Iterating through all jars that are in the full classpath but not the direct
+ // classpath to find which one provides the class we are looking for.
+ for (String jarPath : jarsOnlyInFullClassPath) {
+ try {
+ ClassLoader smallLoader =
+ ByteCodeProcessor.loadJars(Collections.singletonList(jarPath));
+ validateClass(smallLoader, e.getClassName());
+ Map<String, Set<String>> failedClassesByMissingClass = mErrors.get(jarPath);
+ if (failedClassesByMissingClass == null) {
+ // TreeMap so that error messages have sorted list of classes.
+ failedClassesByMissingClass = new TreeMap<>();
+ mErrors.put(jarPath, failedClassesByMissingClass);
+ }
+ Set<String> failedClasses =
+ failedClassesByMissingClass.get(e.getClassName());
+ if (failedClasses == null) {
+ failedClasses = new TreeSet<>();
+ failedClassesByMissingClass.put(e.getClassName(), failedClasses);
+ }
+ failedClasses.add(classReader.getClassName());
+ break;
+ } catch (ClassNotLoadedException f) {
+ }
+ }
+ }
+ }
+ }
+
+ public Map<String, Map<String, Set<String>>> getErrors() {
+ return mErrors;
+ }
+
+ public boolean hasErrors() {
+ return !mErrors.isEmpty();
+ }
+}
diff --git a/deps/v8/build/android/bytecode/java/org/chromium/bytecode/CustomClassLoaderClassWriter.java b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/CustomClassLoaderClassWriter.java
new file mode 100644
index 0000000000..3a52c85d56
--- /dev/null
+++ b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/CustomClassLoaderClassWriter.java
@@ -0,0 +1,51 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.ClassReader;
+import org.objectweb.asm.ClassWriter;
+
+/**
+ * A ClassWriter that uses a custom class loader.
+ */
+class CustomClassLoaderClassWriter extends ClassWriter {
+ private ClassLoader mClassLoader;
+
+ public CustomClassLoaderClassWriter(ClassLoader classLoader, ClassReader reader, int flags) {
+ super(reader, flags);
+ this.mClassLoader = classLoader;
+ }
+
+ /**
+ * The only modifications from the org.objectweb.asm.ClassWriter implementations is that this
+ * method is final and it uses a custom ClassLoader.
+ *
+ * See https://github.com/llbit/ow2-asm/blob/master/src/org/objectweb/asm/ClassWriter.java.
+ */
+ @Override
+ protected final String getCommonSuperClass(final String type1, final String type2) {
+ Class<?> c, d;
+ try {
+ c = Class.forName(type1.replace('/', '.'), false, mClassLoader);
+ d = Class.forName(type2.replace('/', '.'), false, mClassLoader);
+ } catch (Exception e) {
+ throw new RuntimeException(e.toString());
+ }
+ if (c.isAssignableFrom(d)) {
+ return type1;
+ }
+ if (d.isAssignableFrom(c)) {
+ return type2;
+ }
+ if (c.isInterface() || d.isInterface()) {
+ return "java/lang/Object";
+ } else {
+ do {
+ c = c.getSuperclass();
+ } while (!c.isAssignableFrom(d));
+ return c.getName().replace('.', '/');
+ }
+ }
+}
diff --git a/deps/v8/build/android/bytecode/java/org/chromium/bytecode/CustomResourcesClassAdapter.java b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/CustomResourcesClassAdapter.java
new file mode 100644
index 0000000000..96205b8815
--- /dev/null
+++ b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/CustomResourcesClassAdapter.java
@@ -0,0 +1,302 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import static org.objectweb.asm.Opcodes.ACC_PUBLIC;
+import static org.objectweb.asm.Opcodes.ACONST_NULL;
+import static org.objectweb.asm.Opcodes.ALOAD;
+import static org.objectweb.asm.Opcodes.ARETURN;
+import static org.objectweb.asm.Opcodes.ASM5;
+import static org.objectweb.asm.Opcodes.BIPUSH;
+import static org.objectweb.asm.Opcodes.GETSTATIC;
+import static org.objectweb.asm.Opcodes.IFNE;
+import static org.objectweb.asm.Opcodes.IF_ICMPGE;
+import static org.objectweb.asm.Opcodes.ILOAD;
+import static org.objectweb.asm.Opcodes.INVOKESPECIAL;
+import static org.objectweb.asm.Opcodes.INVOKESTATIC;
+import static org.objectweb.asm.Opcodes.INVOKEVIRTUAL;
+import static org.objectweb.asm.Opcodes.RETURN;
+
+import static org.chromium.bytecode.TypeUtils.ASSET_MANAGER;
+import static org.chromium.bytecode.TypeUtils.BOOLEAN;
+import static org.chromium.bytecode.TypeUtils.BUILD_HOOKS_ANDROID;
+import static org.chromium.bytecode.TypeUtils.CONFIGURATION;
+import static org.chromium.bytecode.TypeUtils.CONTEXT;
+import static org.chromium.bytecode.TypeUtils.CONTEXT_WRAPPER;
+import static org.chromium.bytecode.TypeUtils.INT;
+import static org.chromium.bytecode.TypeUtils.RESOURCES;
+import static org.chromium.bytecode.TypeUtils.STRING;
+import static org.chromium.bytecode.TypeUtils.THEME;
+import static org.chromium.bytecode.TypeUtils.VOID;
+
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.Label;
+import org.objectweb.asm.MethodVisitor;
+import org.objectweb.asm.Opcodes;
+
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * A ClassVisitor for providing access to custom resources via BuildHooksAndroid.
+ *
+ * The goal of this class is to provide hooks into all places where android resources
+ * are available so that they can be modified before use. This is done by rewriting the bytecode
+ * for all callable definitions of certain Context methods, specifically:
+ * - getResources
+ * - getAssets
+ * - getTheme
+ * - setTheme
+ * - createConfigurationContext
+ *
+ * Only classes at the framework boundary are rewritten since presumably all other indirect Context
+ * subclasses will end up calling their respective super methods (i.e. we bytecode rewrite
+ * BaseChromiumApplication since it extends Application, but not ContentApplication since it
+ * extends a non-framework subclass.
+ */
+class CustomResourcesClassAdapter extends ClassVisitor {
+ private static final String IS_ENABLED_METHOD = "isEnabled";
+ private static final String IS_ENABLED_DESCRIPTOR = TypeUtils.getMethodDescriptor(BOOLEAN);
+ // Cached since this is used so often.
+ private static final String GET_IDENTIFIER_DESCRIPTOR =
+ TypeUtils.getMethodDescriptor(INT, STRING, STRING, STRING);
+
+ // Existing methods are more difficult to handle, and not currently needed.
+ private static final List<String> PROHIBITED_METHODS = Arrays.asList(
+ TypeUtils.getMethodSignature("getResources", RESOURCES),
+ TypeUtils.getMethodSignature("getAssets", ASSET_MANAGER),
+ TypeUtils.getMethodSignature("getTheme", THEME),
+ TypeUtils.getMethodSignature("createConfigurationContext", CONTEXT, CONFIGURATION),
+ TypeUtils.getMethodSignature("setTheme", VOID, INT));
+
+ private boolean mShouldTransform;
+ private String mClassName;
+ private String mSuperClassName;
+ private ClassLoader mClassLoader;
+
+ CustomResourcesClassAdapter(ClassVisitor visitor, String className, String superClassName,
+ ClassLoader classLoader) {
+ super(ASM5, visitor);
+ this.mClassName = className;
+ this.mSuperClassName = superClassName;
+ this.mClassLoader = classLoader;
+ }
+
+ @Override
+ public void visit(int version, int access, String name, String signature, String superName,
+ String[] interfaces) {
+ super.visit(version, access, name, signature, superName, interfaces);
+ mShouldTransform = shouldTransform();
+ }
+
+ @Override
+ public MethodVisitor visitMethod(final int access, final String name, String desc,
+ String signature, String[] exceptions) {
+ if (mShouldTransform) {
+ String methodSignature = name + desc;
+ if (requiresModifyingExisting(methodSignature)) {
+ throw new RuntimeException("Rewriting existing methods not supported: " + mClassName
+ + "#" + methodSignature);
+ }
+ }
+ return new RewriteGetIdentifierMethodVisitor(
+ super.visitMethod(access, name, desc, signature, exceptions));
+ }
+
+ @Override
+ public void visitEnd() {
+ if (mShouldTransform) {
+ delegateCreateConfigurationContext();
+ delegateSetTheme();
+ delegateGet("getAssets", ASSET_MANAGER);
+ delegateGet("getTheme", THEME);
+ delegateGet("getResources", RESOURCES);
+ }
+ super.visitEnd();
+ }
+
+ private boolean requiresModifyingExisting(String methodDescriptor) {
+ return PROHIBITED_METHODS.contains(methodDescriptor);
+ }
+
+ private boolean shouldTransform() {
+ if (!isDescendantOfContext()) {
+ return false;
+ }
+ if (!superClassIsFrameworkClass()) {
+ return false;
+ }
+ return !superClassIsContextWrapper();
+ }
+
+ private boolean superClassIsFrameworkClass() {
+ return loadClass(mSuperClassName).getProtectionDomain().toString().contains("android.jar");
+ }
+
+ private boolean isDescendantOfContext() {
+ return isSubClass(mClassName, CONTEXT);
+ }
+
+ private boolean superClassIsContextWrapper() {
+ return mSuperClassName.equals(CONTEXT_WRAPPER);
+ }
+
+ private boolean isSubClass(String candidate, String other) {
+ Class<?> candidateClazz = loadClass(candidate);
+ Class<?> parentClazz = loadClass(other);
+ return parentClazz.isAssignableFrom(candidateClazz);
+ }
+
+ private Class<?> loadClass(String className) {
+ try {
+ return mClassLoader.loadClass(className.replace('/', '.'));
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Remaps Resources.getIdentifier() method calls to use BuildHooksAndroid.
+ *
+ * resourceObj.getIdentifier(String, String, String) becomes:
+ * BuildHooksAndroid.getIdentifier(resourceObj, String, String, String);
+ */
+ private static final class RewriteGetIdentifierMethodVisitor extends MethodVisitor {
+ RewriteGetIdentifierMethodVisitor(MethodVisitor mv) {
+ super(ASM5, mv);
+ }
+
+ @Override
+ public void visitMethodInsn(
+ int opcode, String owner, String name, String desc, boolean itf) {
+ String methodName = "getIdentifier";
+ if (opcode == INVOKEVIRTUAL && owner.equals(RESOURCES) && name.equals(methodName)
+ && desc.equals(GET_IDENTIFIER_DESCRIPTOR)) {
+ super.visitMethodInsn(INVOKESTATIC, BUILD_HOOKS_ANDROID, methodName,
+ TypeUtils.getMethodDescriptor(INT, RESOURCES, STRING, STRING, STRING), itf);
+ } else {
+ super.visitMethodInsn(opcode, owner, name, desc, itf);
+ }
+ }
+ }
+
+ /**
+ * Generates:
+ *
+ * <pre>
+ * public Context createConfigurationContext(Configuration configuration) {
+ * // createConfigurationContext does not exist before API level 17.
+ * if (Build.VERSION.SDK_INT < 17) return null;
+ * if (!BuildHooksAndroid.isEnabled()) return super.createConfigurationContext(configuration);
+ * return BuildHooksAndroid.createConfigurationContext(
+ * super.createConfigurationContext(configuration));
+ * }
+ * </pre>
+ * }
+ */
+ private void delegateCreateConfigurationContext() {
+ String methodName = "createConfigurationContext";
+ String methodDescriptor = TypeUtils.getMethodDescriptor(CONTEXT, CONFIGURATION);
+ MethodVisitor mv = super.visitMethod(ACC_PUBLIC, methodName, methodDescriptor, null, null);
+ mv.visitCode();
+ mv.visitFieldInsn(GETSTATIC, "android/os/Build$VERSION", "SDK_INT", INT);
+ mv.visitIntInsn(BIPUSH, 17);
+ Label l0 = new Label();
+ mv.visitJumpInsn(IF_ICMPGE, l0);
+ mv.visitInsn(ACONST_NULL);
+ mv.visitInsn(ARETURN);
+ mv.visitLabel(l0);
+ mv.visitFrame(Opcodes.F_SAME, 0, null, 0, null);
+ mv.visitMethodInsn(
+ INVOKESTATIC, BUILD_HOOKS_ANDROID, IS_ENABLED_METHOD, IS_ENABLED_DESCRIPTOR, false);
+ Label l1 = new Label();
+ mv.visitJumpInsn(IFNE, l1);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitVarInsn(ALOAD, 1);
+ mv.visitMethodInsn(INVOKESPECIAL, mSuperClassName, methodName, methodDescriptor, false);
+ mv.visitInsn(ARETURN);
+ mv.visitLabel(l1);
+ mv.visitFrame(Opcodes.F_SAME, 0, null, 0, null);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitVarInsn(ALOAD, 1);
+ mv.visitMethodInsn(INVOKESPECIAL, mSuperClassName, methodName, methodDescriptor, false);
+ mv.visitMethodInsn(INVOKESTATIC, BUILD_HOOKS_ANDROID, methodName,
+ TypeUtils.getMethodDescriptor(CONTEXT, CONTEXT), false);
+ mv.visitInsn(ARETURN);
+ mv.visitMaxs(2, 2);
+ mv.visitEnd();
+ }
+
+ /**
+ * Generates:
+ *
+ * <pre>
+ * public void setTheme(int theme) {
+ * if (!BuildHooksAndroid.isEnabled()) {
+ * super.setTheme(theme);
+ * return;
+ * }
+ * BuildHooksAndroid.setTheme(this, theme);
+ * }
+ * </pre>
+ */
+ private void delegateSetTheme() {
+ String methodName = "setTheme";
+ String methodDescriptor = TypeUtils.getMethodDescriptor(VOID, INT);
+ String buildHooksMethodDescriptor = TypeUtils.getMethodDescriptor(VOID, CONTEXT, INT);
+ MethodVisitor mv = super.visitMethod(ACC_PUBLIC, methodName, methodDescriptor, null, null);
+ mv.visitCode();
+ mv.visitMethodInsn(
+ INVOKESTATIC, BUILD_HOOKS_ANDROID, IS_ENABLED_METHOD, IS_ENABLED_DESCRIPTOR, false);
+ Label l0 = new Label();
+ mv.visitJumpInsn(IFNE, l0);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitVarInsn(ILOAD, 1);
+ mv.visitMethodInsn(INVOKESPECIAL, mSuperClassName, methodName, methodDescriptor, false);
+ mv.visitInsn(RETURN);
+ mv.visitLabel(l0);
+ mv.visitFrame(Opcodes.F_SAME, 0, null, 0, null);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitVarInsn(ILOAD, 1);
+ mv.visitMethodInsn(
+ INVOKESTATIC, BUILD_HOOKS_ANDROID, methodName, buildHooksMethodDescriptor, false);
+ mv.visitInsn(RETURN);
+ mv.visitMaxs(2, 2);
+ mv.visitEnd();
+ }
+
+ /**
+ * Generates:
+ *
+ * <pre>
+ * public returnType methodName() {
+ * if (!BuildHooksAndroid.isEnabled()) return super.methodName();
+ * return BuildHooksAndroid.methodName(this);
+ * }
+ * </pre>
+ */
+ private void delegateGet(String methodName, String returnType) {
+ String getMethodDescriptor = TypeUtils.getMethodDescriptor(returnType);
+ String buildHooksGetMethodDescriptor = TypeUtils.getMethodDescriptor(returnType, CONTEXT);
+ MethodVisitor mv =
+ super.visitMethod(ACC_PUBLIC, methodName, getMethodDescriptor, null, null);
+ mv.visitCode();
+ mv.visitMethodInsn(
+ INVOKESTATIC, BUILD_HOOKS_ANDROID, IS_ENABLED_METHOD, IS_ENABLED_DESCRIPTOR, false);
+ Label l0 = new Label();
+ mv.visitJumpInsn(IFNE, l0);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitMethodInsn(INVOKESPECIAL, mSuperClassName, methodName, getMethodDescriptor, false);
+ mv.visitInsn(ARETURN);
+ mv.visitLabel(l0);
+ mv.visitFrame(Opcodes.F_SAME, 0, null, 0, null);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitMethodInsn(INVOKESTATIC, BUILD_HOOKS_ANDROID, methodName,
+ buildHooksGetMethodDescriptor, false);
+ mv.visitInsn(ARETURN);
+ mv.visitMaxs(1, 1);
+ mv.visitEnd();
+ }
+}
diff --git a/deps/v8/build/android/bytecode/java/org/chromium/bytecode/SplitCompatClassAdapter.java b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/SplitCompatClassAdapter.java
new file mode 100644
index 0000000000..8d6ae69483
--- /dev/null
+++ b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/SplitCompatClassAdapter.java
@@ -0,0 +1,149 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import static org.objectweb.asm.Opcodes.ACC_PROTECTED;
+import static org.objectweb.asm.Opcodes.ALOAD;
+import static org.objectweb.asm.Opcodes.INVOKESPECIAL;
+import static org.objectweb.asm.Opcodes.INVOKESTATIC;
+import static org.objectweb.asm.Opcodes.RETURN;
+
+import static org.chromium.bytecode.TypeUtils.CONTEXT;
+import static org.chromium.bytecode.TypeUtils.VOID;
+
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.MethodVisitor;
+import org.objectweb.asm.Opcodes;
+
+import java.util.Set;
+
+/**
+ * A ClassVisitor for injecting ModuleInstaller.initActivity(activity) method call
+ * into Activity's attachBaseContext() method. The goal is to eventually invoke
+ * SplitCompat.install() method if running with the binary that has bundle support
+ * enabled. This needs to happen for activities that were not built with SplitCompat
+ * support.
+ */
+class SplitCompatClassAdapter extends ClassVisitor {
+ private static final String ANDROID_APP_ACTIVITY_CLASS_NAME = "android/app/Activity";
+ private static final String ATTACH_BASE_CONTEXT_METHOD_NAME = "attachBaseContext";
+ private static final String ATTACH_BASE_CONTEXT_DESCRIPTOR =
+ TypeUtils.getMethodDescriptor(VOID, CONTEXT);
+
+ private static final String MODULE_INSTALLER_CLASS_NAME =
+ "org/chromium/components/module_installer/ModuleInstaller";
+ private static final String INIT_ACTIVITY_METHOD_NAME = "initActivity";
+ private static final String INIT_ACTIVITY_DESCRIPTOR =
+ TypeUtils.getMethodDescriptor(VOID, CONTEXT);
+
+ private boolean mShouldTransform;
+
+ private Set<String> mClassNames;
+
+ private ClassLoader mClassLoader;
+
+ /**
+ * Creates instance of SplitCompatClassAdapter.
+ *
+ * @param visitor
+ * @param classNames Names of classes into which the attachBaseContext method will be
+ * injected. Currently, we'll only consider classes for bytecode rewriting only if
+ * they inherit directly from android.app.Activity & not already contain
+ * attachBaseContext method.
+ * @param classLoader
+ */
+ SplitCompatClassAdapter(ClassVisitor visitor, Set<String> classNames, ClassLoader classLoader) {
+ super(Opcodes.ASM5, visitor);
+
+ mShouldTransform = false;
+ mClassNames = classNames;
+ mClassLoader = classLoader;
+ }
+
+ @Override
+ public void visit(int version, int access, String name, String signature, String superName,
+ String[] interfaces) {
+ super.visit(version, access, name, signature, superName, interfaces);
+
+ if (mClassNames.contains(name)) {
+ if (!isSubclassOfActivity(name)) {
+ throw new RuntimeException(name
+ + " should be transformed but does not inherit from android.app.Activity");
+ }
+
+ mShouldTransform = true;
+ }
+ }
+
+ @Override
+ public MethodVisitor visitMethod(
+ int access, String name, String descriptor, String signature, String[] exceptions) {
+ // Check if current method matches attachBaseContext & we're supposed to emit code - if so,
+ // fail.
+ if (mShouldTransform && name.equals(ATTACH_BASE_CONTEXT_METHOD_NAME)) {
+ throw new RuntimeException(ATTACH_BASE_CONTEXT_METHOD_NAME + " method already exists");
+ }
+
+ return super.visitMethod(access, name, descriptor, signature, exceptions);
+ }
+
+ @Override
+ public void visitEnd() {
+ if (mShouldTransform) {
+ // If we reached this place, it means we're rewriting a class that inherits from
+ // Activity and there was no exception thrown due to existence of attachBaseContext
+ // method - emit code.
+ emitAttachBaseContext();
+ }
+
+ super.visitEnd();
+ }
+
+ /**
+ * Generates:
+ *
+ * <pre>
+ * protected void attachBaseContext(Context base) {
+ * super.attachBaseContext(base);
+ * ModuleInstaller.initActivity(this);
+ * }
+ * </pre>
+ */
+ private void emitAttachBaseContext() {
+ MethodVisitor mv = super.visitMethod(ACC_PROTECTED, ATTACH_BASE_CONTEXT_METHOD_NAME,
+ ATTACH_BASE_CONTEXT_DESCRIPTOR, null, null);
+ mv.visitCode();
+ mv.visitVarInsn(ALOAD, 0); // load "this" on stack
+ mv.visitVarInsn(ALOAD, 1); // load first method parameter on stack (Context)
+ mv.visitMethodInsn(INVOKESPECIAL, ANDROID_APP_ACTIVITY_CLASS_NAME,
+ ATTACH_BASE_CONTEXT_METHOD_NAME,
+ ATTACH_BASE_CONTEXT_DESCRIPTOR); // invoke super's attach base context
+ mv.visitVarInsn(ALOAD, 0); // load "this" on stack
+ mv.visitMethodInsn(INVOKESTATIC, MODULE_INSTALLER_CLASS_NAME, INIT_ACTIVITY_METHOD_NAME,
+ INIT_ACTIVITY_DESCRIPTOR);
+ mv.visitInsn(RETURN);
+ mv.visitMaxs(2, 2); // max stack size - 2, max locals - 2
+ mv.visitEnd();
+ }
+
+ /**
+ * Checks whether passed in class inherits from android.app.Activity.
+ * @param name Name of the class to be checked.
+ * @return true if class inherits from android.app.Activity, false otherwise.
+ */
+ private boolean isSubclassOfActivity(String name) {
+ Class<?> activityClass = loadClass(ANDROID_APP_ACTIVITY_CLASS_NAME);
+ Class<?> candidateClass = loadClass(name);
+ return activityClass.isAssignableFrom(candidateClass);
+ }
+
+ private Class<?> loadClass(String className) {
+ try {
+ return mClassLoader.loadClass(className.replace('/', '.'));
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException(e);
+ }
+ }
+}
diff --git a/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ThreadAssertionClassAdapter.java b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ThreadAssertionClassAdapter.java
new file mode 100644
index 0000000000..3f50b25f3e
--- /dev/null
+++ b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/ThreadAssertionClassAdapter.java
@@ -0,0 +1,83 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import static org.objectweb.asm.Opcodes.ASM5;
+import static org.objectweb.asm.Opcodes.INVOKESTATIC;
+
+import org.objectweb.asm.AnnotationVisitor;
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.MethodVisitor;
+
+/**
+ * A ClassVisitor which adds calls to
+ * {@link org.chromium.base.ThreadUtils}'s assertOnUiThread/assertOnBackgroundThread when the
+ * corresponding {@link android.support.annotation.UiThread} or
+ * {@link android.support.annotation.WorkerThread} annotations are present. The function calls
+ * are placed at the start of the method.
+ */
+class ThreadAssertionClassAdapter extends ClassVisitor {
+ private static final String THREAD_UTILS_DESCRIPTOR = "org/chromium/base/ThreadUtils";
+ private static final String THREAD_UTILS_SIGNATURE = "()V";
+ private static final String UI_THREAD_ANNOTATION_DESCRIPTOR =
+ "Landroid/support/annotation/UiThread;";
+ private static final String WORKER_THREAD_ANNOTATION_DESCRIPTOR =
+ "Landroid/support/annotation/WorkerThread;";
+
+ ThreadAssertionClassAdapter(ClassVisitor visitor) {
+ super(ASM5, visitor);
+ }
+
+ @Override
+ public MethodVisitor visitMethod(final int access, final String name, String desc,
+ String signature, String[] exceptions) {
+ return new AddAssertMethodVisitor(
+ super.visitMethod(access, name, desc, signature, exceptions));
+ }
+
+ private static class AddAssertMethodVisitor extends MethodVisitor {
+ String mAssertMethodName = "";
+
+ AddAssertMethodVisitor(MethodVisitor mv) {
+ super(ASM5, mv);
+ }
+
+ /**
+ * Call for annotations on the method. Checks if the annotation is @UiThread
+ * or @WorkerThread, and if so will set the mAssertMethodName property to the name of the
+ * method to call in order to assert that a method is running on the intented thread.
+ *
+ * @param descriptor Annotation descriptor containing its name and package.
+ */
+ @Override
+ public AnnotationVisitor visitAnnotation(String descriptor, boolean visible) {
+ switch (descriptor) {
+ case UI_THREAD_ANNOTATION_DESCRIPTOR:
+ mAssertMethodName = "assertOnUiThread";
+ break;
+ case WORKER_THREAD_ANNOTATION_DESCRIPTOR:
+ mAssertMethodName = "assertOnBackgroundThread";
+ break;
+ default:
+ break;
+ }
+
+ return super.visitAnnotation(descriptor, visible);
+ }
+
+ /**
+ * Called to start visiting code. Will also insert the assertOnXThread methods at the start
+ * of the method if needed.
+ */
+ @Override
+ public void visitCode() {
+ super.visitCode();
+ if (!mAssertMethodName.equals("")) {
+ visitMethodInsn(INVOKESTATIC, THREAD_UTILS_DESCRIPTOR, mAssertMethodName,
+ THREAD_UTILS_SIGNATURE, false);
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/deps/v8/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java
new file mode 100644
index 0000000000..ed2dc2dc24
--- /dev/null
+++ b/deps/v8/build/android/bytecode/java/org/chromium/bytecode/TypeUtils.java
@@ -0,0 +1,87 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.bytecode;
+
+import org.objectweb.asm.Type;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Utility methods for accessing {@link Type}s Strings.
+ *
+ * Useful definitions to keep in mind when using this class:
+ * Internal name - The fully qualified name for a type with dots replaced by slashes. Not really
+ * relevant for primitive types.
+ * Type descriptor - Single letters for primitive types, "L" + internal name + ";" for class types.
+ *
+ * The methods in this class accept internal names or primitive type descriptors.
+ */
+class TypeUtils {
+ static final String ASSERTION_ERROR = "java/lang/AssertionError";
+ static final String ASSET_MANAGER = "android/content/res/AssetManager";
+ static final String BUILD_HOOKS = "org/chromium/build/BuildHooks";
+ static final String BUILD_HOOKS_ANDROID = "org/chromium/build/BuildHooksAndroid";
+ static final String CONFIGURATION = "android/content/res/Configuration";
+ static final String CONTEXT = "android/content/Context";
+ static final String CONTEXT_WRAPPER = "android/content/ContextWrapper";
+ static final String RESOURCES = "android/content/res/Resources";
+ static final String STRING = "java/lang/String";
+ static final String THEME = "android/content/res/Resources$Theme";
+
+ static final String BOOLEAN = "Z";
+ static final String INT = "I";
+ static final String VOID = "V";
+ private static final Map<String, Type> PRIMITIVE_DESCRIPTORS;
+ static {
+ PRIMITIVE_DESCRIPTORS = new HashMap<>();
+ PRIMITIVE_DESCRIPTORS.put(Type.BOOLEAN_TYPE.toString(), Type.BOOLEAN_TYPE);
+ PRIMITIVE_DESCRIPTORS.put(Type.INT_TYPE.toString(), Type.INT_TYPE);
+ PRIMITIVE_DESCRIPTORS.put(Type.VOID_TYPE.toString(), Type.VOID_TYPE);
+ }
+
+ /**
+ * Returns the full method signature with internal names.
+ *
+ * @param methodName Name of the method (ex. "getResources").
+ * @param returnType Internal name for the return type.
+ * @param argumentTypes List of internal names for argument types.
+ * @return String representation of the method signature.
+ */
+ static String getMethodSignature(
+ String methodName, String returnType, String... argumentTypes) {
+ return methodName + getMethodDescriptor(returnType, argumentTypes);
+ }
+
+ /**
+ * Builds a method descriptor suitable for use with {@link org.objectweb.asm.MethodVisitor}.
+ *
+ * @param returnType Internal name for the return type of the method (primitive or class).
+ * @param argumentTypes Internal names for the argument types (primitive or class).
+ * @return The generated method descriptor.
+ */
+ static String getMethodDescriptor(String returnType, String... argumentTypes) {
+ Type[] typedArguments = new Type[argumentTypes.length];
+ for (int i = 0; i < argumentTypes.length; ++i) {
+ // Argument list should be empty in this case, not V (void).
+ assert !Type.VOID_TYPE.toString().equals(argumentTypes[i]);
+ typedArguments[i] = convert(argumentTypes[i]);
+ }
+ return Type.getMethodDescriptor(convert(returnType), typedArguments);
+ }
+
+ /**
+ * Converts an internal name for a type to a {@link Type}.
+ *
+ * @param type Internal name for a type (primitive or class).
+ * @return The resulting Type.
+ */
+ private static Type convert(String type) {
+ if (PRIMITIVE_DESCRIPTORS.containsKey(type)) {
+ return PRIMITIVE_DESCRIPTORS.get(type);
+ }
+ return Type.getObjectType(type);
+ }
+}
diff --git a/deps/v8/build/android/chromium-debug.keystore b/deps/v8/build/android/chromium-debug.keystore
new file mode 100644
index 0000000000..67eb0aa34c
--- /dev/null
+++ b/deps/v8/build/android/chromium-debug.keystore
Binary files differ
diff --git a/deps/v8/build/android/convert_dex_profile.py b/deps/v8/build/android/convert_dex_profile.py
new file mode 100755
index 0000000000..f9fdeb6793
--- /dev/null
+++ b/deps/v8/build/android/convert_dex_profile.py
@@ -0,0 +1,557 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import logging
+import re
+import subprocess
+import sys
+
+DEX_CLASS_NAME_RE = re.compile(r'\'L(?P<class_name>[^;]+);\'')
+DEX_METHOD_NAME_RE = re.compile(r'\'(?P<method_name>[^\']+)\'')
+DEX_METHOD_TYPE_RE = re.compile( # type descriptor method signature re
+ r'\''
+ r'\('
+ r'(?P<method_params>[^)]*)'
+ r'\)'
+ r'(?P<method_return_type>[^\']+)'
+ r'\'')
+DEX_METHOD_LINE_NR_RE = re.compile(r'line=(?P<line_number>\d+)')
+
+PROFILE_METHOD_RE = re.compile(
+ r'(?P<tags>[HSP]+)' # tags such as H/S/P
+ r'(?P<class_name>L[^;]+;)' # class name in type descriptor format
+ r'->(?P<method_name>[^(]+)'
+ r'\((?P<method_params>[^)]*)\)'
+ r'(?P<method_return_type>.+)')
+
+PROGUARD_CLASS_MAPPING_RE = re.compile(
+ r'(?P<original_name>[^ ]+)'
+ r' -> '
+ r'(?P<obfuscated_name>[^:]+):')
+PROGUARD_METHOD_MAPPING_RE = re.compile(
+ # line_start:line_end: (optional)
+ r'((?P<line_start>\d+):(?P<line_end>\d+):)?'
+ r'(?P<return_type>[^ ]+)' # original method return type
+ # original method class name (if exists)
+ r' (?:(?P<original_method_class>[a-zA-Z_\d.$]+)\.)?'
+ r'(?P<original_method_name>[^.\(]+)'
+ r'\((?P<params>[^\)]*)\)' # original method params
+ r'(?:[^ ]*)' # original method line numbers (ignored)
+ r' -> '
+ r'(?P<obfuscated_name>.+)') # obfuscated method name
+
+TYPE_DESCRIPTOR_RE = re.compile(
+ r'(?P<brackets>\[*)'
+ r'(?:'
+ r'(?P<class_name>L[^;]+;)'
+ r'|'
+ r'[VZBSCIJFD]'
+ r')')
+
+DOT_NOTATION_MAP = {
+ '': '',
+ 'boolean': 'Z',
+ 'byte': 'B',
+ 'void': 'V',
+ 'short': 'S',
+ 'char': 'C',
+ 'int': 'I',
+ 'long': 'J',
+ 'float': 'F',
+ 'double': 'D'
+}
+
+class Method(object):
+ def __init__(self, name, class_name, param_types=None, return_type=None):
+ self.name = name
+ self.class_name = class_name
+ self.param_types = param_types
+ self.return_type = return_type
+
+ def __str__(self):
+ return '{}->{}({}){}'.format(self.class_name, self.name,
+ self.param_types or '', self.return_type or '')
+
+ def __repr__(self):
+ return 'Method<{}->{}({}){}>'.format(self.class_name, self.name,
+ self.param_types or '', self.return_type or '')
+
+ def __cmp__(self, other):
+ return cmp((self.class_name, self.name, self.param_types, self.return_type),
+ (other.class_name, other.name, other.param_types, other.return_type))
+
+ def __hash__(self):
+ # only hash name and class_name since other fields may not be set yet.
+ return hash((self.name, self.class_name))
+
+
+class Class(object):
+ def __init__(self, name):
+ self.name = name
+ self._methods = []
+
+ def AddMethod(self, method, line_numbers):
+ self._methods.append((method, set(line_numbers)))
+
+ def FindMethodsAtLine(self, method_name, line_start, line_end=None):
+ """Searches through dex class for a method given a name and line numbers
+
+ The dex maps methods to line numbers, this method, given the a method name
+ in this class as well as a start line and an optional end line (which act as
+ hints as to which function in the class is being looked for), returns a list
+ of possible matches (or none if none are found).
+
+ Args:
+ method_name: name of method being searched for
+ line_start: start of hint range for lines in this method
+ line_end: end of hint range for lines in this method (optional)
+
+ Returns:
+ A list of Method objects that could match the hints given, or None if no
+ method is found.
+ """
+ found_methods = []
+ if line_end is None:
+ hint_lines = set([line_start])
+ else:
+ hint_lines = set(range(line_start, line_end+1))
+
+ named_methods = [(method, l) for method, l in self._methods
+ if method.name == method_name]
+
+ if len(named_methods) == 1:
+ return [method for method, l in named_methods]
+ if len(named_methods) == 0:
+ return None
+
+ for method, line_numbers in named_methods:
+ if not hint_lines.isdisjoint(line_numbers):
+ found_methods.append(method)
+
+ if len(found_methods) > 0:
+ if len(found_methods) > 1:
+ logging.warning('ambigous methods in dex %s at lines %s in class "%s"',
+ found_methods, hint_lines, self.name)
+ return found_methods
+
+ for method, line_numbers in named_methods:
+ if (max(hint_lines) >= min(line_numbers)
+ and min(hint_lines) <= max(line_numbers)):
+ found_methods.append(method)
+
+ if len(found_methods) > 0:
+ if len(found_methods) > 1:
+ logging.warning('ambigous methods in dex %s at lines %s in class "%s"',
+ found_methods, hint_lines, self.name)
+ return found_methods
+ else:
+ logging.warning('No method named "%s" in class "%s" is '
+ 'mapped to lines %s', method_name, self.name, hint_lines)
+ return None
+
+
+class Profile(object):
+ def __init__(self):
+ # {Method: set(char)}
+ self._methods = collections.defaultdict(set)
+ self._classes = []
+
+ def AddMethod(self, method, tags):
+ for tag in tags:
+ self._methods[method].add(tag)
+
+ def AddClass(self, cls):
+ self._classes.append(cls)
+
+ def WriteToFile(self, path):
+ with open(path, 'w') as output_profile:
+ for cls in sorted(self._classes):
+ output_profile.write(cls + '\n')
+ for method in sorted(self._methods):
+ tags = sorted(self._methods[method])
+ line = '{}{}\n'.format(''.join(tags), str(method))
+ output_profile.write(line)
+
+
+class ProguardMapping(object):
+ def __init__(self):
+ # {Method: set(Method)}
+ self._method_mapping = collections.defaultdict(set)
+ # {String: String} String is class name in type descriptor format
+ self._class_mapping = dict()
+
+ def AddMethodMapping(self, from_method, to_method):
+ self._method_mapping[from_method].add(to_method)
+
+ def AddClassMapping(self, from_class, to_class):
+ self._class_mapping[from_class] = to_class
+
+ def GetMethodMapping(self, from_method):
+ return self._method_mapping.get(from_method)
+
+ def GetClassMapping(self, from_class):
+ return self._class_mapping.get(from_class, from_class)
+
+ def MapTypeDescriptor(self, type_descriptor):
+ match = TYPE_DESCRIPTOR_RE.search(type_descriptor)
+ assert match is not None
+ class_name = match.group('class_name')
+ if class_name is not None:
+ return match.group('brackets') + self.GetClassMapping(class_name)
+ # just a native type, return as is
+ return match.group()
+
+ def MapTypeDescriptorList(self, type_descriptor_list):
+ return TYPE_DESCRIPTOR_RE.sub(
+ lambda match: self.MapTypeDescriptor(match.group()),
+ type_descriptor_list)
+
+
+class MalformedLineException(Exception):
+ def __init__(self, message, line_number):
+ super(MalformedLineException, self).__init__(message)
+ self.line_number = line_number
+
+ def __str__(self):
+ return self.message + ' at line {}'.format(self.line_number)
+
+
+class MalformedProguardMappingException(MalformedLineException):
+ pass
+
+
+class MalformedProfileException(MalformedLineException):
+ pass
+
+
+def _RunDexDump(dexdump_path, dex_file_path):
+ return subprocess.check_output([dexdump_path, dex_file_path]).splitlines()
+
+
+def _ReadFile(file_path):
+ with open(file_path, 'r') as f:
+ return f.readlines()
+
+
+def _ToTypeDescriptor(dot_notation):
+ """Parses a dot notation type and returns it in type descriptor format
+
+ eg:
+ org.chromium.browser.ChromeActivity -> Lorg/chromium/browser/ChromeActivity;
+ boolean -> Z
+ int[] -> [I
+
+ Args:
+ dot_notation: trimmed string with a single type in dot notation format
+
+ Returns:
+ A string with the type in type descriptor format
+ """
+ dot_notation = dot_notation.strip()
+ prefix = ''
+ while dot_notation.endswith('[]'):
+ prefix += '['
+ dot_notation = dot_notation[:-2]
+ if dot_notation in DOT_NOTATION_MAP:
+ return prefix + DOT_NOTATION_MAP[dot_notation]
+ return prefix + 'L' + dot_notation.replace('.', '/') + ';'
+
+
+def _DotNotationListToTypeDescriptorList(dot_notation_list_string):
+ """Parses a param list of dot notation format and returns it in type
+ descriptor format
+
+ eg:
+ org.chromium.browser.ChromeActivity,boolean,int[] ->
+ Lorg/chromium/browser/ChromeActivity;Z[I
+
+ Args:
+ dot_notation_list_string: single string with multiple comma separated types
+ in dot notation format
+
+ Returns:
+ A string with the param list in type descriptor format
+ """
+ return ''.join(_ToTypeDescriptor(param) for param in
+ dot_notation_list_string.split(','))
+
+
+def ProcessDex(dex_dump):
+ """Parses dexdump output returning a dict of class names to Class objects
+
+ Parses output of the dexdump command on a dex file and extracts information
+ about classes and their respective methods and which line numbers a method is
+ mapped to.
+
+ Methods that are not mapped to any line number are ignored and not listed
+ inside their respective Class objects.
+
+ Args:
+ dex_dump: An array of lines of dexdump output
+
+ Returns:
+ A dict that maps from class names in type descriptor format (but without the
+ surrounding 'L' and ';') to Class objects.
+ """
+ # class_name: Class
+ classes_by_name = {}
+ current_class = None
+ current_method = None
+ reading_positions = False
+ reading_methods = False
+ method_line_numbers = []
+ for line in dex_dump:
+ line = line.strip()
+ if line.startswith('Class descriptor'):
+ # New class started, no longer reading methods.
+ reading_methods = False
+ current_class = Class(DEX_CLASS_NAME_RE.search(line).group('class_name'))
+ classes_by_name[current_class.name] = current_class
+ elif (line.startswith('Direct methods')
+ or line.startswith('Virtual methods')):
+ reading_methods = True
+ elif reading_methods and line.startswith('name'):
+ assert current_class is not None
+ current_method = Method(
+ DEX_METHOD_NAME_RE.search(line).group('method_name'),
+ "L" + current_class.name + ";")
+ elif reading_methods and line.startswith('type'):
+ assert current_method is not None
+ match = DEX_METHOD_TYPE_RE.search(line)
+ current_method.param_types = match.group('method_params')
+ current_method.return_type = match.group('method_return_type')
+ elif line.startswith('positions'):
+ assert reading_methods
+ reading_positions = True
+ method_line_numbers = []
+ elif reading_positions and line.startswith('0x'):
+ line_number = DEX_METHOD_LINE_NR_RE.search(line).group('line_number')
+ method_line_numbers.append(int(line_number))
+ elif reading_positions and line.startswith('locals'):
+ if len(method_line_numbers) > 0:
+ current_class.AddMethod(current_method, method_line_numbers)
+ # finished reading method line numbers
+ reading_positions = False
+ return classes_by_name
+
+
+def ProcessProguardMapping(proguard_mapping_lines, dex):
+ """Parses a proguard mapping file
+
+ This takes proguard mapping file lines and then uses the obfuscated dex to
+ create a mapping of unobfuscated methods to obfuscated ones and vice versa.
+
+ The dex is used because the proguard mapping file only has the name of the
+ obfuscated methods but not their signature, thus the dex is read to look up
+ which method with a specific name was mapped to the lines mentioned in the
+ proguard mapping file.
+
+ Args:
+ proguard_mapping_lines: Array of strings, each is a line from the proguard
+ mapping file (in order).
+ dex: a dict of class name (in type descriptor format but without the
+ enclosing 'L' and ';') to a Class object.
+ Returns:
+ Two dicts the first maps from obfuscated methods to a set of non-obfuscated
+ ones. It also maps the obfuscated class names to original class names, both
+ in type descriptor format (with the enclosing 'L' and ';')
+ """
+ mapping = ProguardMapping()
+ reverse_mapping = ProguardMapping()
+ to_be_obfuscated = []
+ current_class_orig = None
+ current_class_obfs = None
+ for index, line in enumerate(proguard_mapping_lines):
+ if line.strip() == '':
+ continue
+ if not line.startswith(' '):
+ match = PROGUARD_CLASS_MAPPING_RE.search(line)
+ if match is None:
+ raise MalformedProguardMappingException(
+ 'Malformed class mapping', index)
+ current_class_orig = match.group('original_name')
+ current_class_obfs = match.group('obfuscated_name')
+ mapping.AddClassMapping(_ToTypeDescriptor(current_class_obfs),
+ _ToTypeDescriptor(current_class_orig))
+ reverse_mapping.AddClassMapping(_ToTypeDescriptor(current_class_orig),
+ _ToTypeDescriptor(current_class_obfs))
+ continue
+
+ assert current_class_orig is not None
+ assert current_class_obfs is not None
+ line = line.strip()
+ match = PROGUARD_METHOD_MAPPING_RE.search(line)
+ # check if is a method mapping (we ignore field mappings)
+ if match is not None:
+ # check if this line is an inlining by reading ahead 1 line.
+ if index + 1 < len(proguard_mapping_lines):
+ next_match = PROGUARD_METHOD_MAPPING_RE.search(
+ proguard_mapping_lines[index+1].strip())
+ if (next_match and match.group('line_start') is not None
+ and next_match.group('line_start') == match.group('line_start')
+ and next_match.group('line_end') == match.group('line_end')):
+ continue # This is an inlining, skip
+
+ original_method = Method(
+ match.group('original_method_name'),
+ _ToTypeDescriptor(
+ match.group('original_method_class') or current_class_orig),
+ _DotNotationListToTypeDescriptorList(match.group('params')),
+ _ToTypeDescriptor(match.group('return_type')))
+
+ if match.group('line_start') is not None:
+ obfs_methods = (dex[current_class_obfs.replace('.', '/')]
+ .FindMethodsAtLine(
+ match.group('obfuscated_name'),
+ int(match.group('line_start')),
+ int(match.group('line_end'))))
+
+ if obfs_methods is None:
+ continue
+
+ for obfs_method in obfs_methods:
+ mapping.AddMethodMapping(obfs_method, original_method)
+ reverse_mapping.AddMethodMapping(original_method, obfs_method)
+ else:
+ to_be_obfuscated.append(
+ (original_method, match.group('obfuscated_name')))
+
+ for original_method, obfuscated_name in to_be_obfuscated:
+ obfuscated_method = Method(
+ obfuscated_name,
+ reverse_mapping.GetClassMapping(original_method.class_name),
+ reverse_mapping.MapTypeDescriptorList(original_method.param_types),
+ reverse_mapping.MapTypeDescriptor(original_method.return_type))
+ mapping.AddMethodMapping(obfuscated_method, original_method)
+ reverse_mapping.AddMethodMapping(original_method, obfuscated_method)
+ return mapping, reverse_mapping
+
+
+def ProcessProfile(input_profile, proguard_mapping):
+ """Parses an android profile and uses the proguard mapping to (de)obfuscate it
+
+ This takes the android profile lines and for each method or class for the
+ profile, it uses the mapping to either obfuscate or deobfuscate (based on the
+ provided mapping) and returns a Profile object that stores this information.
+
+ Args:
+ input_profile: array of lines of the input profile
+ proguard_mapping: a proguard mapping that would map from the classes and
+ methods in the input profile to the classes and methods
+ that should be in the output profile.
+
+ Returns:
+ A Profile object that stores the information (ie list of mapped classes and
+ methods + tags)
+ """
+ profile = Profile()
+ for index, line in enumerate(input_profile):
+ line = line.strip()
+ if line.startswith('L'):
+ profile.AddClass(proguard_mapping.GetClassMapping(line))
+ continue
+ match = PROFILE_METHOD_RE.search(line)
+ if not match:
+ raise MalformedProfileException("Malformed line", index)
+
+ method = Method(
+ match.group('method_name'),
+ match.group('class_name'),
+ match.group('method_params'),
+ match.group('method_return_type'))
+
+ mapped_methods = proguard_mapping.GetMethodMapping(method)
+ if mapped_methods is None:
+ logging.warning('No method matching "%s" has been found in the proguard '
+ 'mapping file', method)
+ continue
+
+ for original_method in mapped_methods:
+ profile.AddMethod(original_method, match.group('tags'))
+
+ return profile
+
+
+def ObfuscateProfile(nonobfuscated_profile, dex_file, proguard_mapping,
+ dexdump_path, output_filename):
+ """Helper method for obfuscating a profile.
+
+ Args:
+ nonobfuscated_profile: a profile with nonobfuscated symbols.
+ dex_file: path to the dex file matching the mapping.
+ proguard_mapping: a mapping from nonobfuscated to obfuscated symbols used
+ in the dex file.
+ dexdump_path: path to the dexdump utility.
+ output_filename: output filename in which to write the obfuscated profile.
+ """
+ dexinfo = ProcessDex(_RunDexDump(dexdump_path, dex_file))
+ _, reverse_mapping = ProcessProguardMapping(
+ _ReadFile(proguard_mapping), dexinfo)
+ obfuscated_profile = ProcessProfile(
+ _ReadFile(nonobfuscated_profile), reverse_mapping)
+ obfuscated_profile.WriteToFile(output_filename)
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--dexdump-path',
+ required=True,
+ help='Path to dexdump binary.')
+ parser.add_argument(
+ '--dex-path',
+ required=True,
+ help='Path to dex file corresponding to the proguard mapping file.')
+ parser.add_argument(
+ '--proguard-mapping-path',
+ required=True,
+ help='Path to input proguard mapping file corresponding to the dex file.')
+ parser.add_argument(
+ '--output-profile-path',
+ required=True,
+ help='Path to output profile.')
+ parser.add_argument(
+ '--input-profile-path',
+ required=True,
+ help='Path to output profile.')
+ parser.add_argument(
+ '--verbose',
+ action='store_true',
+ default=False,
+ help='Print verbose output.')
+ obfuscation = parser.add_mutually_exclusive_group(required=True)
+ obfuscation.add_argument('--obfuscate', action='store_true',
+ help='Indicates to output an obfuscated profile given a deobfuscated '
+ 'one.')
+ obfuscation.add_argument('--deobfuscate', dest='obfuscate',
+ action='store_false', help='Indicates to output a deobfuscated profile '
+ 'given an obfuscated one.')
+ options = parser.parse_args(args)
+
+ if options.verbose:
+ log_level = logging.WARNING
+ else:
+ log_level = logging.ERROR
+ logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level)
+
+ dex = ProcessDex(_RunDexDump(options.dexdump_path, options.dex_path))
+ proguard_mapping, reverse_proguard_mapping = ProcessProguardMapping(
+ _ReadFile(options.proguard_mapping_path), dex)
+ if options.obfuscate:
+ profile = ProcessProfile(
+ _ReadFile(options.input_profile_path),
+ reverse_proguard_mapping)
+ else:
+ profile = ProcessProfile(
+ _ReadFile(options.input_profile_path),
+ proguard_mapping)
+ profile.WriteToFile(options.output_profile_path)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/convert_dex_profile_tests.py b/deps/v8/build/android/convert_dex_profile_tests.py
new file mode 100644
index 0000000000..0ddc5ce4a1
--- /dev/null
+++ b/deps/v8/build/android/convert_dex_profile_tests.py
@@ -0,0 +1,276 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for convert_dex_profile.
+
+Can be run from build/android/:
+ $ cd build/android
+ $ python convert_dex_profile_tests.py
+"""
+
+import os
+import sys
+import tempfile
+import unittest
+
+import convert_dex_profile as cp
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), 'gyp'))
+from util import build_utils
+
+cp.logging.disable(cp.logging.CRITICAL)
+
+# There are two obfuscations used in the tests below, each with the same
+# unobfuscated profile. The first, corresponding to DEX_DUMP, PROGUARD_MAPPING,
+# and OBFUSCATED_PROFILE, has an ambiguous method a() which is mapped to both
+# getInstance and initialize. The second, corresponding to DEX_DUMP_2,
+# PROGUARD_MAPPING_2 and OBFUSCATED_PROFILE_2, removes the ambiguity.
+
+DEX_DUMP = """
+
+Class descriptor : 'La;'
+ Direct methods -
+ #0 : (in La;)
+ name : '<clinit>'
+ type : '(Ljava/lang/String;)V'
+ code -
+ catches : 1
+ 0x000f - 0x001e
+ <any> -> 0x0093
+ positions :
+ 0x0001 line=310
+ 0x0057 line=313
+ locals :
+ #1 : (in La;)
+ name : '<init>'
+ type : '()V'
+ positions :
+ locals :
+ Virtual methods -
+ #0 : (in La;)
+ name : 'a'
+ type : '(Ljava/lang/String;)I'
+ positions :
+ 0x0000 line=2
+ 0x0003 line=3
+ 0x001b line=8
+ locals :
+ 0x0000 - 0x0021 reg=3 this La;
+ #1 : (in La;)
+ name : 'a'
+ type : '(Ljava/lang/Object;)I'
+ positions :
+ 0x0000 line=8
+ 0x0003 line=9
+ locals :
+ 0x0000 - 0x0021 reg=3 this La;
+ #2 : (in La;)
+ name : 'b'
+ type : '()La;'
+ positions :
+ 0x0000 line=1
+ locals :
+"""
+
+# pylint: disable=line-too-long
+PROGUARD_MAPPING = \
+"""org.chromium.Original -> a:
+ org.chromium.Original sDisplayAndroidManager -> e
+ org.chromium.Original another() -> b
+ 4:4:void inlined():237:237 -> a
+ 4:4:org.chromium.Original getInstance():203 -> a
+ 5:5:void org.chromium.Original$Subclass.<init>(org.chromium.Original,byte):130:130 -> a
+ 5:5:void initialize():237 -> a
+ 5:5:org.chromium.Original getInstance():203 -> a
+ 6:6:void initialize():237:237 -> a
+ 9:9:android.content.Context org.chromium.base.ContextUtils.getApplicationContext():49:49 -> a
+ 9:9:android.content.Context getContext():219 -> a
+ 9:9:void initialize():245 -> a
+ 9:9:org.chromium.Original getInstance():203 -> a"""
+
+OBFUSCATED_PROFILE = \
+"""La;
+PLa;->b()La;
+SLa;->a(Ljava/lang/Object;)I
+HPLa;->a(Ljava/lang/String;)I"""
+
+DEX_DUMP_2 = """
+
+Class descriptor : 'La;'
+ Direct methods -
+ #0 : (in La;)
+ name : '<clinit>'
+ type : '(Ljava/lang/String;)V'
+ code -
+ catches : 1
+ 0x000f - 0x001e
+ <any> -> 0x0093
+ positions :
+ 0x0001 line=310
+ 0x0057 line=313
+ locals :
+ #1 : (in La;)
+ name : '<init>'
+ type : '()V'
+ positions :
+ locals :
+ Virtual methods -
+ #0 : (in La;)
+ name : 'a'
+ type : '(Ljava/lang/String;)I'
+ positions :
+ 0x0000 line=2
+ 0x0003 line=3
+ 0x001b line=8
+ locals :
+ 0x0000 - 0x0021 reg=3 this La;
+ #1 : (in La;)
+ name : 'c'
+ type : '(Ljava/lang/Object;)I'
+ positions :
+ 0x0000 line=8
+ 0x0003 line=9
+ locals :
+ 0x0000 - 0x0021 reg=3 this La;
+ #2 : (in La;)
+ name : 'b'
+ type : '()La;'
+ positions :
+ 0x0000 line=1
+ locals :
+"""
+
+# pylint: disable=line-too-long
+PROGUARD_MAPPING_2 = \
+"""org.chromium.Original -> a:
+ org.chromium.Original sDisplayAndroidManager -> e
+ org.chromium.Original another() -> b
+ void initialize() -> c
+ org.chromium.Original getInstance():203 -> a
+ 4:4:void inlined():237:237 -> a"""
+
+OBFUSCATED_PROFILE_2 = \
+"""La;
+PLa;->b()La;
+HPSLa;->a()La;
+HPLa;->c()V"""
+
+UNOBFUSCATED_PROFILE = \
+"""Lorg/chromium/Original;
+PLorg/chromium/Original;->another()Lorg/chromium/Original;
+HPSLorg/chromium/Original;->getInstance()Lorg/chromium/Original;
+HPLorg/chromium/Original;->initialize()V"""
+
+class GenerateProfileTests(unittest.TestCase):
+ def testProcessDex(self):
+ dex = cp.ProcessDex(DEX_DUMP.splitlines())
+ self.assertIsNotNone(dex['a'])
+
+ self.assertEquals(len(dex['a'].FindMethodsAtLine('<clinit>', 311, 313)), 1)
+ self.assertEquals(len(dex['a'].FindMethodsAtLine('<clinit>', 309, 315)), 1)
+ clinit = dex['a'].FindMethodsAtLine('<clinit>', 311, 313)[0]
+ self.assertEquals(clinit.name, '<clinit>')
+ self.assertEquals(clinit.return_type, 'V')
+ self.assertEquals(clinit.param_types, 'Ljava/lang/String;')
+
+ self.assertEquals(len(dex['a'].FindMethodsAtLine('a', 8, None)), 2)
+ self.assertIsNone(dex['a'].FindMethodsAtLine('a', 100, None))
+
+# pylint: disable=protected-access
+ def testProcessProguardMapping(self):
+ dex = cp.ProcessDex(DEX_DUMP.splitlines())
+ mapping, reverse = cp.ProcessProguardMapping(
+ PROGUARD_MAPPING.splitlines(), dex)
+
+ self.assertEquals('La;', reverse.GetClassMapping('Lorg/chromium/Original;'))
+
+ getInstance = cp.Method(
+ 'getInstance', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+ initialize = cp.Method('initialize', 'Lorg/chromium/Original;', '', 'V')
+ another = cp.Method(
+ 'another', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+ subclassInit = cp.Method(
+ '<init>', 'Lorg/chromium/Original$Subclass;',
+ 'Lorg/chromium/Original;B', 'V')
+
+ mapped = mapping.GetMethodMapping(
+ cp.Method('a', 'La;', 'Ljava/lang/String;', 'I'))
+ self.assertEquals(len(mapped), 2)
+ self.assertIn(getInstance, mapped)
+ self.assertNotIn(subclassInit, mapped)
+ self.assertNotIn(
+ cp.Method('inlined', 'Lorg/chromium/Original;', '', 'V'), mapped)
+ self.assertIn(initialize, mapped)
+
+ mapped = mapping.GetMethodMapping(
+ cp.Method('a', 'La;', 'Ljava/lang/Object;', 'I'))
+ self.assertEquals(len(mapped), 1)
+ self.assertIn(getInstance, mapped)
+
+ mapped = mapping.GetMethodMapping(cp.Method('b', 'La;', '', 'La;'))
+ self.assertEquals(len(mapped), 1)
+ self.assertIn(another, mapped)
+
+ for from_method, to_methods in mapping._method_mapping.iteritems():
+ for to_method in to_methods:
+ self.assertIn(from_method, reverse.GetMethodMapping(to_method))
+ for from_class, to_class in mapping._class_mapping.iteritems():
+ self.assertEquals(from_class, reverse.GetClassMapping(to_class))
+
+ def testProcessProfile(self):
+ dex = cp.ProcessDex(DEX_DUMP.splitlines())
+ mapping, _ = cp.ProcessProguardMapping(PROGUARD_MAPPING.splitlines(), dex)
+ profile = cp.ProcessProfile(OBFUSCATED_PROFILE.splitlines(), mapping)
+
+ getInstance = cp.Method(
+ 'getInstance', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+ initialize = cp.Method('initialize', 'Lorg/chromium/Original;', '', 'V')
+ another = cp.Method(
+ 'another', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;')
+
+ self.assertIn('Lorg/chromium/Original;', profile._classes)
+ self.assertIn(getInstance, profile._methods)
+ self.assertIn(initialize, profile._methods)
+ self.assertIn(another, profile._methods)
+
+ self.assertEquals(profile._methods[getInstance], set(['H', 'S', 'P']))
+ self.assertEquals(profile._methods[initialize], set(['H', 'P']))
+ self.assertEquals(profile._methods[another], set(['P']))
+
+ def testEndToEnd(self):
+ dex = cp.ProcessDex(DEX_DUMP.splitlines())
+ mapping, _ = cp.ProcessProguardMapping(PROGUARD_MAPPING.splitlines(), dex)
+
+ profile = cp.ProcessProfile(OBFUSCATED_PROFILE.splitlines(), mapping)
+ with tempfile.NamedTemporaryFile() as temp:
+ profile.WriteToFile(temp.name)
+ with open(temp.name, 'r') as f:
+ for a, b in zip(sorted(f), sorted(UNOBFUSCATED_PROFILE.splitlines())):
+ self.assertEquals(a.strip(), b.strip())
+
+ def testObfuscateProfile(self):
+ with build_utils.TempDir() as temp_dir:
+ # The dex dump is used as the dexfile, by passing /bin/cat as the dexdump
+ # program.
+ dex_path = os.path.join(temp_dir, 'dexdump')
+ with open(dex_path, 'w') as dex_file:
+ dex_file.write(DEX_DUMP_2)
+ mapping_path = os.path.join(temp_dir, 'mapping')
+ with open(mapping_path, 'w') as mapping_file:
+ mapping_file.write(PROGUARD_MAPPING_2)
+ unobfuscated_path = os.path.join(temp_dir, 'unobfuscated')
+ with open(unobfuscated_path, 'w') as unobfuscated_file:
+ unobfuscated_file.write(UNOBFUSCATED_PROFILE)
+ obfuscated_path = os.path.join(temp_dir, 'obfuscated')
+ cp.ObfuscateProfile(unobfuscated_path, dex_path, mapping_path, '/bin/cat',
+ obfuscated_path)
+ with open(obfuscated_path) as obfuscated_file:
+ obfuscated_profile = sorted(obfuscated_file.readlines())
+ for a, b in zip(
+ sorted(OBFUSCATED_PROFILE_2.splitlines()), obfuscated_profile):
+ self.assertEquals(a.strip(), b.strip())
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/devil_chromium.json b/deps/v8/build/android/devil_chromium.json
new file mode 100644
index 0000000000..6cb7608c9e
--- /dev/null
+++ b/deps/v8/build/android/devil_chromium.json
@@ -0,0 +1,130 @@
+{
+ "config_type": "BaseConfig",
+ "dependencies": {
+ "aapt": {
+ "file_info": {
+ "linux2_x86_64": {
+ "local_paths": [
+ "../../third_party/android_sdk/public/build-tools/27.0.3/aapt"
+ ]
+ }
+ }
+ },
+ "adb": {
+ "file_info": {
+ "linux2_x86_64": {
+ "local_paths": [
+ "../../third_party/android_sdk/public/platform-tools/adb"
+ ]
+ }
+ }
+ },
+ "android_build_tools_libc++": {
+ "file_info": {
+ "linux2_x86_64": {
+ "local_paths": [
+ "../../third_party/android_sdk/public/build-tools/27.0.3/lib64/libc++.so"
+ ]
+ }
+ }
+ },
+ "android_sdk": {
+ "file_info": {
+ "linux2_x86_64": {
+ "local_paths": [
+ "../../third_party/android_sdk/public"
+ ]
+ }
+ }
+ },
+ "dexdump": {
+ "file_info": {
+ "linux2_x86_64": {
+ "local_paths": [
+ "../../third_party/android_sdk/public/build-tools/27.0.3/dexdump"
+ ]
+ }
+ }
+ },
+ "split-select": {
+ "file_info": {
+ "linux2_x86_64": {
+ "local_paths": [
+ "../../third_party/android_sdk/public/build-tools/27.0.3/split-select"
+ ]
+ }
+ }
+ },
+ "pymock": {
+ "file_info": {
+ "darwin_x86_64": {
+ "local_paths": [
+ "../../third_party/pymock"
+ ]
+ },
+ "linux2_x86_64": {
+ "local_paths": [
+ "../../third_party/pymock"
+ ]
+ },
+ "win32_AMD64": {
+ "local_paths": [
+ "../../third_party/pymock"
+ ]
+ }
+ }
+ },
+ "simpleperf": {
+ "file_info": {
+ "android_armeabi-v7a": {
+ "local_paths": [
+ "../../third_party/android_ndk/simpleperf/bin/android/arm/simpleperf"
+ ]
+ },
+ "android_arm64-v8a": {
+ "local_paths": [
+ "../../third_party/android_ndk/simpleperf/bin/android/arm64/simpleperf"
+ ]
+ },
+ "android_x86": {
+ "local_paths": [
+ "../../third_party/android_ndk/simpleperf/bin/android/x86/simpleperf"
+ ]
+ },
+ "android_x86_64": {
+ "local_paths": [
+ "../../third_party/android_ndk/simpleperf/bin/android/x86_64/simpleperf"
+ ]
+ },
+ "linux_x86": {
+ "local_paths": [
+ "../../third_party/android_ndk/simpleperf/bin/linux/x86/simpleperf"
+ ]
+ },
+ "linux_x86_64": {
+ "local_paths": [
+ "../../third_party/android_ndk/simpleperf/bin/linux/x86_64/simpleperf"
+ ]
+ }
+ }
+ },
+ "simpleperf_scripts": {
+ "file_info": {
+ "default": {
+ "local_paths": [
+ "../../third_party/android_ndk/simpleperf"
+ ]
+ }
+ }
+ },
+ "llvm-symbolizer": {
+ "file_info": {
+ "default": {
+ "local_paths": [
+ "../../third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer"
+ ]
+ }
+ }
+ }
+ }
+}
diff --git a/deps/v8/build/android/devil_chromium.py b/deps/v8/build/android/devil_chromium.py
new file mode 100644
index 0000000000..d42402e40a
--- /dev/null
+++ b/deps/v8/build/android/devil_chromium.py
@@ -0,0 +1,170 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configures devil for use in chromium."""
+
+import os
+import sys
+
+from pylib.constants import host_paths
+
+if host_paths.DEVIL_PATH not in sys.path:
+ sys.path.append(host_paths.DEVIL_PATH)
+
+from devil import devil_env
+from devil.android.ndk import abis
+
+_DEVIL_CONFIG = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), 'devil_chromium.json'))
+
+_DEVIL_BUILD_PRODUCT_DEPS = {
+ 'chromium_commands': [
+ {
+ 'platform': 'linux2',
+ 'arch': 'x86_64',
+ 'path_components': ['lib.java', 'chromium_commands.dex.jar'],
+ }
+ ],
+ 'forwarder_device': [
+ {
+ 'platform': 'android',
+ 'arch': abis.ARM,
+ 'path_components': ['forwarder_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': abis.ARM_64,
+ 'path_components': ['forwarder_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': 'mips',
+ 'path_components': ['forwarder_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': 'mips64',
+ 'path_components': ['forwarder_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': abis.X86,
+ 'path_components': ['forwarder_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': abis.X86_64,
+ 'path_components': ['forwarder_dist'],
+ },
+ ],
+ 'forwarder_host': [
+ {
+ 'platform': 'linux2',
+ 'arch': 'x86_64',
+ 'path_components': ['host_forwarder'],
+ },
+ ],
+ 'md5sum_device': [
+ {
+ 'platform': 'android',
+ 'arch': abis.ARM,
+ 'path_components': ['md5sum_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': abis.ARM_64,
+ 'path_components': ['md5sum_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': 'mips',
+ 'path_components': ['md5sum_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': 'mips64',
+ 'path_components': ['md5sum_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': abis.X86,
+ 'path_components': ['md5sum_dist'],
+ },
+ {
+ 'platform': 'android',
+ 'arch': abis.X86_64,
+ 'path_components': ['md5sum_dist'],
+ },
+ ],
+ 'md5sum_host': [
+ {
+ 'platform': 'linux2',
+ 'arch': 'x86_64',
+ 'path_components': ['md5sum_bin_host'],
+ },
+ ],
+}
+
+
+def Initialize(output_directory=None, custom_deps=None, adb_path=None):
+ """Initializes devil with chromium's binaries and third-party libraries.
+
+ This includes:
+ - Libraries:
+ - the android SDK ("android_sdk")
+ - pymock ("pymock")
+ - Build products:
+ - host & device forwarder binaries
+ ("forwarder_device" and "forwarder_host")
+ - host & device md5sum binaries ("md5sum_device" and "md5sum_host")
+
+ Args:
+ output_directory: An optional path to the output directory. If not set,
+ no built dependencies are configured.
+ custom_deps: An optional dictionary specifying custom dependencies.
+ This should be of the form:
+
+ {
+ 'dependency_name': {
+ 'platform': 'path',
+ ...
+ },
+ ...
+ }
+ """
+
+ devil_dynamic_config = {
+ 'config_type': 'BaseConfig',
+ 'dependencies': {},
+ }
+ if output_directory:
+ output_directory = os.path.abspath(output_directory)
+ devil_dynamic_config['dependencies'] = {
+ dep_name: {
+ 'file_info': {
+ '%s_%s' % (dep_config['platform'], dep_config['arch']): {
+ 'local_paths': [
+ os.path.join(output_directory, *dep_config['path_components']),
+ ],
+ }
+ for dep_config in dep_configs
+ }
+ }
+ for dep_name, dep_configs in _DEVIL_BUILD_PRODUCT_DEPS.iteritems()
+ }
+ if custom_deps:
+ devil_dynamic_config['dependencies'].update(custom_deps)
+ if adb_path:
+ devil_dynamic_config['dependencies'].update({
+ 'adb': {
+ 'file_info': {
+ devil_env.GetPlatform(): {
+ 'local_paths': [adb_path]
+ }
+ }
+ }
+ })
+
+ devil_env.config.Initialize(
+ configs=[devil_dynamic_config], config_files=[_DEVIL_CONFIG])
diff --git a/deps/v8/build/android/devil_chromium.pydeps b/deps/v8/build/android/devil_chromium.pydeps
new file mode 100644
index 0000000000..ea8f0c2f8a
--- /dev/null
+++ b/deps/v8/build/android/devil_chromium.pydeps
@@ -0,0 +1,38 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android --output build/android/devil_chromium.pydeps build/android/devil_chromium.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/android/__init__.py
+../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/constants/__init__.py
+../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../third_party/catapult/devil/devil/devil_env.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/third_party/zipfile/zipfile_2_7_13.py
+devil_chromium.py
+pylib/__init__.py
+pylib/constants/__init__.py
+pylib/constants/host_paths.py
diff --git a/deps/v8/build/android/diff_resource_sizes.py b/deps/v8/build/android/diff_resource_sizes.py
new file mode 100755
index 0000000000..8066844fdd
--- /dev/null
+++ b/deps/v8/build/android/diff_resource_sizes.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs resource_sizes.py on two apks and outputs the diff."""
+
+import argparse
+import json
+import logging
+import os
+import subprocess
+import sys
+
+from pylib.constants import host_paths
+from pylib.utils import shared_preference_utils
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+ import perf_tests_results_helper # pylint: disable=import-error
+
+with host_paths.SysPath(host_paths.TRACING_PATH):
+ from tracing.value import convert_chart_json # pylint: disable=import-error
+
+_ANDROID_DIR = os.path.dirname(os.path.abspath(__file__))
+with host_paths.SysPath(os.path.join(_ANDROID_DIR, 'gyp')):
+ from util import build_utils # pylint: disable=import-error
+
+
+_BASE_CHART = {
+ 'format_version': '0.1',
+ 'benchmark_name': 'resource_sizes_diff',
+ 'benchmark_description': 'APK resource size diff information',
+ 'trace_rerun_options': [],
+ 'charts': {},
+}
+
+_CHARTJSON_FILENAME = 'results-chart.json'
+_HISTOGRAMS_FILENAME = 'perf_results.json'
+
+
+def DiffResults(chartjson, base_results, diff_results):
+ """Reports the diff between the two given results.
+
+ Args:
+ chartjson: A dictionary that chartjson results will be placed in, or None
+ to only print results.
+ base_results: The chartjson-formatted size results of the base APK.
+ diff_results: The chartjson-formatted size results of the diff APK.
+ """
+ for graph_title, graph in base_results['charts'].iteritems():
+ for trace_title, trace in graph.iteritems():
+ perf_tests_results_helper.ReportPerfResult(
+ chartjson, graph_title, trace_title,
+ diff_results['charts'][graph_title][trace_title]['value']
+ - trace['value'],
+ trace['units'], trace['improvement_direction'],
+ trace['important'])
+
+
+def AddIntermediateResults(chartjson, base_results, diff_results):
+ """Copies the intermediate size results into the output chartjson.
+
+ Args:
+ chartjson: A dictionary that chartjson results will be placed in.
+ base_results: The chartjson-formatted size results of the base APK.
+ diff_results: The chartjson-formatted size results of the diff APK.
+ """
+ for graph_title, graph in base_results['charts'].iteritems():
+ for trace_title, trace in graph.iteritems():
+ perf_tests_results_helper.ReportPerfResult(
+ chartjson, graph_title + '_base_apk', trace_title,
+ trace['value'], trace['units'], trace['improvement_direction'],
+ trace['important'])
+
+ # Both base_results and diff_results should have the same charts/traces, but
+ # loop over them separately in case they don't
+ for graph_title, graph in diff_results['charts'].iteritems():
+ for trace_title, trace in graph.iteritems():
+ perf_tests_results_helper.ReportPerfResult(
+ chartjson, graph_title + '_diff_apk', trace_title,
+ trace['value'], trace['units'], trace['improvement_direction'],
+ trace['important'])
+
+
+def _CreateArgparser():
+ def chromium_path(arg):
+ if arg.startswith('//'):
+ return os.path.join(host_paths.DIR_SOURCE_ROOT, arg[2:])
+ return arg
+
+ argparser = argparse.ArgumentParser(
+ description='Diff resource sizes of two APKs. Arguments not listed here '
+ 'will be passed on to both invocations of resource_sizes.py.')
+ argparser.add_argument('--chromium-output-directory-base',
+ dest='out_dir_base',
+ type=chromium_path,
+ help='Location of the build artifacts for the base '
+ 'APK, i.e. what the size increase/decrease will '
+ 'be measured from.')
+ argparser.add_argument('--chromium-output-directory-diff',
+ dest='out_dir_diff',
+ type=chromium_path,
+ help='Location of the build artifacts for the diff '
+ 'APK.')
+ argparser.add_argument('--chartjson',
+ action='store_true',
+ help='DEPRECATED. Use --output-format=chartjson '
+ 'instead.')
+ argparser.add_argument('--output-format',
+ choices=['chartjson', 'histograms'],
+ help='Output the results to a file in the given '
+ 'format instead of printing the results.')
+ argparser.add_argument('--include-intermediate-results',
+ action='store_true',
+ help='Include the results from the resource_sizes.py '
+ 'runs in the chartjson output.')
+ argparser.add_argument('--output-dir',
+ default='.',
+ type=chromium_path,
+ help='Directory to save chartjson to.')
+ argparser.add_argument('--base-apk',
+ required=True,
+ type=chromium_path,
+ help='Path to the base APK, i.e. what the size '
+ 'increase/decrease will be measured from.')
+ argparser.add_argument('--diff-apk',
+ required=True,
+ type=chromium_path,
+ help='Path to the diff APK, i.e. the APK whose size '
+ 'increase/decrease will be measured against the '
+ 'base APK.')
+ return argparser
+
+
+def main():
+ args, unknown_args = _CreateArgparser().parse_known_args()
+ # TODO(bsheedy): Remove this once all uses of --chartjson are removed.
+ if args.chartjson:
+ args.output_format = 'chartjson'
+
+ chartjson = _BASE_CHART.copy() if args.output_format else None
+
+ with build_utils.TempDir() as base_dir, build_utils.TempDir() as diff_dir:
+ # Run resource_sizes.py on the two APKs
+ resource_sizes_path = os.path.join(_ANDROID_DIR, 'resource_sizes.py')
+ shared_args = (['python', resource_sizes_path, '--output-format=chartjson']
+ + unknown_args)
+
+ base_args = shared_args + ['--output-dir', base_dir, args.base_apk]
+ if args.out_dir_base:
+ base_args += ['--chromium-output-directory', args.out_dir_base]
+ try:
+ subprocess.check_output(base_args, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ print e.output
+ raise
+
+ diff_args = shared_args + ['--output-dir', diff_dir, args.diff_apk]
+ if args.out_dir_diff:
+ diff_args += ['--chromium-output-directory', args.out_dir_diff]
+ try:
+ subprocess.check_output(diff_args, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ print e.output
+ raise
+
+ # Combine the separate results
+ base_file = os.path.join(base_dir, _CHARTJSON_FILENAME)
+ diff_file = os.path.join(diff_dir, _CHARTJSON_FILENAME)
+ base_results = shared_preference_utils.ExtractSettingsFromJson(base_file)
+ diff_results = shared_preference_utils.ExtractSettingsFromJson(diff_file)
+ DiffResults(chartjson, base_results, diff_results)
+ if args.include_intermediate_results:
+ AddIntermediateResults(chartjson, base_results, diff_results)
+
+ if args.output_format:
+ chartjson_path = os.path.join(os.path.abspath(args.output_dir),
+ _CHARTJSON_FILENAME)
+ logging.critical('Dumping diff chartjson to %s', chartjson_path)
+ with open(chartjson_path, 'w') as outfile:
+ json.dump(chartjson, outfile)
+
+ if args.output_format == 'histograms':
+ histogram_result = convert_chart_json.ConvertChartJson(chartjson_path)
+ if histogram_result.returncode != 0:
+ logging.error('chartjson conversion failed with error: %s',
+ histogram_result.stdout)
+ return 1
+
+ histogram_path = os.path.join(os.path.abspath(args.output_dir),
+ 'perf_results.json')
+ logging.critical('Dumping diff histograms to %s', histogram_path)
+ with open(histogram_path, 'w') as json_file:
+ json_file.write(histogram_result.stdout)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/docs/README.md b/deps/v8/build/android/docs/README.md
new file mode 100644
index 0000000000..b6f0a6e9c2
--- /dev/null
+++ b/deps/v8/build/android/docs/README.md
@@ -0,0 +1,11 @@
+# Android Build Docs
+
+* [android_app_bundles.md](android_app_bundles.md)
+* [build_config.md](build_config.md)
+* [coverage.md](coverage.md)
+* [lint.md](lint.md)
+* [life_of_a_resource.md](life_of_a_resource.md)
+* [../incremental_install/README.md](../incremental_install/README.md)
+
+See also:
+* [//build/README.md](../../README.md)
diff --git a/deps/v8/build/android/docs/android_app_bundles.md b/deps/v8/build/android/docs/android_app_bundles.md
new file mode 100644
index 0000000000..8934477195
--- /dev/null
+++ b/deps/v8/build/android/docs/android_app_bundles.md
@@ -0,0 +1,210 @@
+# Introduction
+
+This document describes how the Chromium build system supports Android app
+bundles.
+
+[TOC]
+
+# Overview of app bundles
+
+An Android app bundle is an alternative application distribution format for
+Android applications on the Google Play Store, that allows reducing the size
+of binaries sent for installation to individual devices that run on Android L
+and beyond. For more information about them, see the official Android
+[documentation](https://developer.android.com/guide/app-bundle/).
+
+For the context of this document, the most important points are:
+
+ - Unlike a regular APK (e.g. `foo.apk`), the bundle (e.g. `foo.aab`) cannot
+ be installed directly on a device.
+
+ - Instead, it must be processed into a set of installable split APKs, which
+ are stored inside a special zip archive (e.g. `foo.apks`).
+
+ - The splitting can be based on various criteria: e.g. language or screen
+ density for resources, or cpu ABI for native code.
+
+ - The bundle also uses the notion of dynamic features modules (DFMs) to
+ separate several application features. Each module has its own code, assets
+ and resources, and can be installed separately from the rest of the
+ application if needed.
+
+ - The main application itself is stored in the '`base`' module (this name
+ cannot be changed).
+
+
+# Declaring app bundles with GN templates
+
+Here's an example that shows how to declare a simple bundle that contains a
+single base module, which enables language-based splits:
+
+```gn
+
+ # First declare the first bundle module. The base module is the one
+ # that contains the main application's code, resources and assets.
+ android_app_bundle_module("foo_base_module") {
+ # Declaration are similar to android_apk here.
+ ...
+ }
+
+ # Second, declare the bundle itself.
+ android_app_bundle("foo_bundle") {
+ # Indicate the base module to use for this bundle
+ base_module_target = ":foo_base_module"
+
+ # The name of our bundle file (without any suffix). Default would
+ # be 'foo_bundle' otherwise.
+ bundle_name = "FooBundle"
+
+ # Signing your bundle is required to upload it to the Play Store
+ # but since signing is very slow, avoid doing it for non official
+ # builds. Signing the bundle is not required for local testing.
+ sign_bundle = is_official_build
+
+ # Enable language-based splits for this bundle. Which means that
+ # resources and assets specific to a given language will be placed
+ # into their own split APK in the final .apks archive.
+ enable_language_splits = true
+
+ # Proguard settings must be passed at the bundle, not module, target.
+ proguard_enabled = !is_java_debug
+ }
+```
+
+When generating the `foo_bundle` target with Ninja, you will end up with
+the following:
+
+ - The bundle file under `out/Release/apks/FooBundle.aab`
+
+ - A helper script called `out/Release/bin/foo_bundle`, which can be used
+ to install / launch / uninstall the bundle on local devices.
+
+ This works like an APK wrapper script (e.g. `foo_apk`). Use `--help`
+ to see all possible commands supported by the script.
+
+
+# Declaring dynamic feature modules with GN templates
+
+Please see
+[Dynamic Feature Modules](../../../docs/android_dynamic_feature_modules.md) for
+more details. In short, if you need more modules besides the base one, you
+will need to list all the extra ones using the extra_modules variable which
+takes a list of GN scopes, as in:
+
+```gn
+
+ android_app_bundle_module("foo_base_module") {
+ ...
+ }
+
+ android_app_bundle_module("foo_extra_module") {
+ ...
+ }
+
+ android_app_bundle("foo_bundle") {
+ base_module_target = ":foo_base_module"
+
+ extra_modules = [
+ { # NOTE: Scopes require one field per line, and no comma separators.
+ name = "my_module"
+ module_target = ":foo_extra_module"
+ }
+ ]
+
+ ...
+ }
+```
+
+Note that each extra module is identified by a unique name, which cannot
+be '`base`'.
+
+
+# Bundle signature issues
+
+Signing an app bundle is not necessary, unless you want to upload it to the
+Play Store. Since this process is very slow (it uses `jarsigner` instead of
+the much faster `apkbuilder`), you can control it with the `sign_bundle`
+variable, as described in the example above.
+
+The `.apks` archive however always contains signed split APKs. The keystore
+path/password/alias being used are the default ones, unless you use custom
+values when declaring the bundle itself, as in:
+
+```gn
+ android_app_bundle("foo_bundle") {
+ ...
+ keystore_path = "//path/to/keystore"
+ keystore_password = "K3y$t0Re-Pa$$w0rd"
+ keystore_name = "my-signing-key-name"
+ }
+```
+
+These values are not stored in the bundle itself, but in the wrapper script,
+which will use them to generate the `.apks` archive for you. This allows you
+to properly install updates on top of existing applications on any device.
+
+
+# Proguard and bundles
+
+When using an app bundle that is made of several modules, it is crucial to
+ensure that proguard, if enabled:
+
+- Keeps the obfuscated class names used by each module consistent.
+- Does not remove classes that are not used in one module, but referenced
+ by others.
+
+To achieve this, a special scheme called *synchronized proguarding* is
+performed, which consists of the following steps:
+
+- The list of unoptimized .jar files from all modules are sent to a single
+ proguard command. This generates a new temporary optimized *group* .jar file.
+
+- Each module extracts the optimized class files from the optimized *group*
+ .jar file, to generate its own, module-specific, optimized .jar.
+
+- Each module-specific optimized .jar is then sent to dex generation.
+
+This synchronized proguarding step is added by the `android_app_bundle()` GN
+template. In practice this means the following:
+
+ - If `proguard_enabled` and `proguard_jar_path` must be passed to
+ `android_app_bundle` targets, but not to `android_app_bundle_module` ones.
+
+ - `proguard_configs` can be still passed to individual modules, just
+ like regular APKs. All proguard configs will be merged during the
+ synchronized proguard step.
+
+
+# Manual generation and installation of .apks archives
+
+Note that the `foo_bundle` script knows how to generate the .apks archive
+from the bundle file, and install it to local devices for you. For example,
+to install and launch a bundle, use:
+
+```sh
+ out/Release/bin/foo_bundle run
+```
+
+If you want to manually look or use the `.apks` archive, use the following
+command to generate it:
+
+```sh
+ out/Release/bin/foo_bundle build-bundle-apks \
+ --output-apks=/tmp/BundleFoo.apks
+```
+
+All split APKs within the archive will be properly signed. And you will be
+able to look at its content (with `unzip -l`), or install it manually with:
+
+```sh
+ build/android/gyp/bundletool.py install-apks \
+ --apks=/tmp/BundleFoo.apks \
+ --adb=$(which adb)
+```
+
+The task of examining the manifest is simplified by running the following,
+which dumps the application manifest as XML to stdout:
+
+```sh
+ build/android/gyp/bundletool.py dump-manifest
+```
diff --git a/deps/v8/build/android/docs/build_config.md b/deps/v8/build/android/docs/build_config.md
new file mode 100644
index 0000000000..74af651af7
--- /dev/null
+++ b/deps/v8/build/android/docs/build_config.md
@@ -0,0 +1,170 @@
+# Introduction
+
+This document describes the `.build_config` files that are used by the
+Chromium build system for Android-specific targets like APK, resources,
+and more.
+
+[TOC]
+
+# I. Overview of .build_config files:
+
+The Android build requires performing computations about dependencies in
+various targets, which are not possible with the GN build language. To address
+this, `.build_config` files are written during the build to store the needed
+per-target information as JSON files.
+
+They are always written to `$target_gen_dir/${target_name}.build_config`.
+
+Many scripts under [`build/android/gyp/`](build/android_gyp/), which are used
+during the build, can also accept parameter arguments using
+`@FileArg references`, which look like:
+
+ --some-param=@FileArg(<filename>:<key1>:<key2>:..<keyN>)
+
+This placeholder will ensure that `<filename>` is read as a JSON file, then
+return the value at `[key1][key2]...[keyN]` for the `--some-param` option.
+
+Apart from that, the scripts do not need to know anything about the structure
+of `.build_config` files (but the GN rules that invoke them do and select
+which `@FileArg()` references to use).
+
+For a concrete example, consider the following GN fragment:
+
+```gn
+# From //ui/android/BUILD.gn:
+android_resources("ui_java_resources") {
+ custom_package = "org.chromium.ui"
+ resource_dirs = [ "java/res" ]
+ deps = [
+ ":ui_strings_grd",
+ ]
+}
+```
+
+This will end up generating the following JSON file under
+`$CHROMIUM_OUTPUT_DIR/gen/ui/android/ui_java_resources.build_config`:
+
+```json
+{
+ "deps_info": {
+ "deps_configs": [
+ "gen/ui/android/ui_strings_grd.build_config"
+ ],
+ "name": "ui_java_resources.build_config",
+ "package_name": "org.chromium.ui",
+ "path": "gen/ui/android/ui_java_resources.build_config",
+ "r_text": "gen/ui/android/ui_java_resources_R.txt",
+ "resources_dirs": [
+ "../../ui/android/java/res"
+ ],
+ "resources_zip": "resource_zips/ui/android/ui_java_resources.resources.zip",
+ "srcjar": "gen/ui/android/ui_java_resources.srcjar",
+ "type": "android_resources"
+ },
+ "gradle": {},
+ "resources": {
+ "dependency_zips": [
+ "resource_zips/ui/android/ui_strings_grd.resources.zip"
+ ],
+ "extra_package_names": [],
+ "extra_r_text_files": []
+ }
+}
+```
+
+NOTE: All path values in `.build_config` files are relative to your
+`$CHROMIUM_OUTPUT_DIR`.
+
+# II. Generation of .build_config files:
+
+They are generated by the GN [`write_build_config()`](gn_write_build_config)
+internal template, which ends up invoking
+[`write_build_config.py`](write_build_config_py). For our example above, this
+is with the following parameters:
+
+```
+python ../../build/android/gyp/write_build_config.py \
+ --type=android_resources \
+ --depfile gen/ui/android/ui_java_resources__build_config_crbug_908819.d \
+ --deps-configs=\[\"gen/ui/android/ui_strings_grd.build_config\"\] \
+ --build-config gen/ui/android/ui_java_resources.build_config \
+ --resources-zip resource_zips/ui/android/ui_java_resources.resources.zip \
+ --package-name org.chromium.ui \
+ --r-text gen/ui/android/ui_java_resources_R.txt \
+ --resource-dirs=\[\"../../ui/android/java/res\"\] \
+ --srcjar gen/ui/android/ui_java_resources.srcjar
+```
+
+Note that *most* of the content of the JSON file comes from command-line
+parameters, but not all of it.
+
+In particular, the `resources['dependency_zips']` entry was computed by
+inspecting the content of all dependencies (here, only
+`ui_string_grd.build_config`), and collecting their
+`deps_configs['resources_zip']` values.
+
+Because a target's `.build_config` file will always be generated after
+that of all of its dependencies,
+[`write_build_config.py`](write_build_config_py) can traverse the
+whole (transitive) set of direct *and* indirect dependencies for a given target
+and extract useful information out of it.
+
+This is the kind of processing that cannot be done at the GN language level,
+and is very powerful for Android builds.
+
+
+# III. Usage of .build_config files:
+
+In addition to being parsed by `write_build_config.py`, when they are listed
+in the `--deps-configs` of a given target, the `.build_config` files are used
+by other scripts under [build/android/gyp/] to build stuff.
+
+For example, the GN `android_resources` template uses it to invoke the
+[`process_resources.py`] script with the following command, in order to
+generate various related files (e.g. `ui_java_resources_R.txt`):
+
+```sh
+python ../../build/android/gyp/process_resources.py \
+ --depfile gen/ui/android/ui_java_resources_1.d \
+ --android-sdk-jar ../../third_party/android_sdk/public/platforms/android-28/android.jar \
+ --aapt-path ../../third_party/android_sdk/public/build-tools/27.0.3/aapt \
+ --dependencies-res-zips=@FileArg\(gen/ui/android/ui_java_resources.build_config:resources:dependency_zips\) \
+ --extra-res-packages=@FileArg\(gen/ui/android/ui_java_resources.build_config:resources:extra_package_names\) \
+ --extra-r-text-files=@FileArg\(gen/ui/android/ui_java_resources.build_config:resources:extra_r_text_files\) \
+ --resource-dirs=\[\"../../ui/android/java/res\"\] \
+ --debuggable \
+ --resource-zip-out resource_zips/ui/android/ui_java_resources.resources.zip \
+ --r-text-out gen/ui/android/ui_java_resources_R.txt \
+ --srcjar-out gen/ui/android/ui_java_resources.srcjar \
+ --non-constant-id \
+ --custom-package org.chromium.ui \
+ --shared-resources
+```
+
+Note the use of `@FileArg()` references here, to tell the script where to find
+the information it needs.
+
+
+# IV. Format of .build_config files:
+
+Thanks to `@FileArg()` references, Python build scripts under
+[`build/android/gyp/`](build/android/gyp/) do not need to know anything
+about the internal format of `.build_config` files.
+
+This format is decided between internal GN build rules and
+[`write_build_config.py`][write_build_config_py]. Since these changes rather
+often, the format documentation is kept inside the Python script itself, but
+can be extracted as a Markdown file and visualized with the following commands:
+
+```sh
+# Extract .build_config format documentation
+build/android/gyp/write_build_config.py \
+ --generate-markdown-format-doc > /tmp/format.md
+
+# Launch a browser to visualize the format documentation.
+python tools/md_browser/md_browser.py -d /tmp /tmp/format.md
+```
+
+[build/android/gyp/]: https://chromium.googlesource.com/chromium/src/build/+/master/android/gyp/
+[gn_write_build_config]: https://cs.chromium.org/chromium/src/build/config/android/internal_rules.gni?q=write_build_config&sq=package:chromium
+[write_build_config_py]: https://chromium.googlesource.com/chromium/src/build/+/master/android/gyp/write_build_config.py
diff --git a/deps/v8/build/android/docs/coverage.md b/deps/v8/build/android/docs/coverage.md
new file mode 100644
index 0000000000..14dbef6ac9
--- /dev/null
+++ b/deps/v8/build/android/docs/coverage.md
@@ -0,0 +1,56 @@
+# Android code coverage instructions
+
+These are instructions for collecting code coverage data for android
+instrumentation and junit tests.
+
+[TOC]
+
+## How EMMA coverage works
+
+In order to use EMMA code coverage, we need to create build time **.em** files
+and runtime **.ec** files. Then we need to process them using the
+build/android/generate_emma_html.py script.
+
+## How to collect EMMA coverage data
+
+1. Use the following GN build arguments:
+
+```gn
+target_os = "android"
+emma_coverage = true
+emma_filter = "org.chromium.chrome.browser.ntp.*,-*Test*,-*Fake*,-*Mock*"
+```
+
+The filter syntax is as documented for the [EMMA coverage
+filters](http://emma.sourceforge.net/reference/ch02s06s02.html).
+
+Now when building, **.em** files will be created in the build directory.
+
+2. Run tests, with option `--coverage-dir <directory>`, to specify where to save
+ the .ec file. For example, you can run chrome junit tests:
+ `out/Debug/bin/run_chrome_junit_tests --coverage-dir /tmp/coverage`.
+
+3. Turn off strict mode when running instrumentation tests by adding
+ `--strict-mode=off` because the EMMA code causes strict mode violations by
+ accessing disk.
+
+4. Use a pre-L Android OS (running Dalvik) because code coverage is not
+ supported in ART.
+
+5. The coverage results of junit and instrumentation tests will be merged
+ automatically if they are in the same directory.
+
+6. Now we have both .em and .ec files. We can create a html report using
+ `generate_emma_html.py`, for example:
+
+ ```shell
+ build/android/generate_emma_html.py \
+ --coverage-dir /tmp/coverage/ \
+ --metadata-dir out/Debug/ \
+ --output example.html
+ ```
+ Then an example.html containing coverage info will be created:
+
+ ```
+ EMMA: writing [html] report to [<your_current_directory>/example.html] ...
+ ```
diff --git a/deps/v8/build/android/docs/life_of_a_resource.md b/deps/v8/build/android/docs/life_of_a_resource.md
new file mode 100644
index 0000000000..bd1ffcd994
--- /dev/null
+++ b/deps/v8/build/android/docs/life_of_a_resource.md
@@ -0,0 +1,233 @@
+# Life of an Android Resource
+
+[TOC]
+
+## Overview
+
+This document describes how [Android Resources][android resources]
+are built in Chromium's build system. It does not mention native resources
+which are [processed differently][native resources].
+
+[android resources]: https://developer.android.com/guide/topics/resources/providing-resources
+[native resources]: https://www.chromium.org/developers/tools-we-use-in-chromium/grit/grit-users-guide
+
+The steps consume the following files as inputs:
+* AndroidManifest.xml
+ * Including AndroidManifest.xml files from libraries, which get merged
+ together
+* res/ directories
+
+The steps produce the following intermediate files:
+* R.srcjar (contains R.java files)
+* R.txt
+* .resources.zip
+
+The steps produce the following files within an .apk:
+* AndroidManifest.xml (a binary xml file)
+* resources.arsc (contains all values and configuration metadata)
+* res/** (drawables and layouts)
+* classes.dex (just a small portion of classes from generated R.java files)
+
+
+## The Build Steps
+
+Whenever you try to compile an apk or library target, resources go through the
+following steps:
+
+### 1. Constructs .build\_config files:
+
+Inputs:
+* GN target metadata
+* Other .build_config files
+
+Outputs:
+* Target-specific .build_config file
+
+write_build_config.py is run to record target metadata needed by future steps.
+For more details, see [build_config.md](build_config.md).
+
+
+### 2. Prepares resources:
+
+Inputs:
+* Target-specific build\_config file
+* Target-specific Resource dirs (res/ directories)
+* resources.zip files from dependencies (used to generate the R.txt/java files)
+
+Outputs:
+* Target-specific resources.zip (containing only resources in the
+ target-specific resource dirs, no dependant resources here).
+* Target-specific R.txt
+ * Contains a list of resources and their ids (including of dependencies).
+* Target-specific R.java .srcjar
+ * See [What are R.java files and how are they generated](
+ #how-r_java-files-are-generated)
+
+prepare\_resources.py zips up the target-specific resource dirs and generates
+R.txt and R.java .srcjars. No optimizations, crunching, etc are done on the
+resources.
+
+**The following steps apply only to apk targets (not library targets).**
+
+### 3. Finalizes apk resources:
+
+Inputs:
+* Target-specific build\_config file
+* Dependencies' resources.zip files
+
+Output:
+* Packaged resources zip (named foo.ap_) containing:
+ * AndroidManifest.xml (as binary xml)
+ * resources.arsc
+ * res/**
+* Final R.txt
+ * Contains a list of resources and their ids (including of dependencies).
+* Final R.java .srcjar
+ * See [What are R.java files and how are they generated](
+ #how-r_java-files-are-generated)
+
+
+#### 3(a). Compiles resources:
+
+For each library / resources target your apk depends on, the following happens:
+* Use a regex (defined in the apk target) to remove select resources (optional).
+* Convert png images to webp for binary size (optional).
+* Move drawables in mdpi to non-mdpi directory ([why?](http://crbug.com/289843))
+* Use `aapt2 compile` to compile xml resources to binary xml (references to
+ other resources will now use the id rather than the name for faster lookup at
+ runtime).
+* `aapt2 compile` adds headers/metadata to 9-patch images about which parts of
+ the image are stretchable vs static.
+* `aapt2 compile` outputs a zip with the compiled resources (one for each
+ dependency).
+
+
+#### 3(b). Links resources:
+
+After each dependency is compiled into an intermediate .zip, all those zips are
+linked by the aapt2 link command which does the following:
+* Use the order of dependencies supplied so that some resources clober each
+ other.
+* Compile the AndroidManifest.xml to binary xml (references to resources are now
+ using ids rather than the string names)
+* Create a resources.arsc file that has the name and values of string
+ resources as well as the name and path of non-string resources (ie. layouts
+ and drawables).
+* Combine the compiled resources into one packaged resources apk (a zip file
+ with an .ap\_ extension) that has all the resources related files.
+
+
+#### 3(c). Optimizes resources:
+
+This step obfuscates / strips resources names from the resources.arsc so that
+they can be looked up only by their numeric ids (assigned in the compile
+resources step). Access to resources via `Resources.getIdentifier()` no longer
+work unless resources are [whitelisted](#adding-resources-to-the-whitelist).
+
+## App Bundles and Modules:
+
+Processing resources for bundles and modules is slightly different. Each module
+has its resources compiled and linked separately (ie: it goes through the
+entire process for each module). The modules are then combined to form a
+bundle. Moreover, during "Finalizing the apk resources" step, bundle modules
+produce a `resources.proto` file instead of a `resources.arsc` file.
+
+Resources in a dynamic feature module may reference resources in the base
+module. During the link step for feature module resources, the linked resources
+of the base module are passed in. However, linking against resources currently
+works only with `resources.arsc` format. Thus, when building the base module,
+resources are compiled as both `resources.arsc` and `resources.proto`.
+
+## Debugging resource related errors when resource names are obfuscated
+
+An example message from a stacktrace could be something like this:
+```
+java.lang.IllegalStateException: Could not find CoordinatorLayout descendant
+view with id org.chromium.chrome:id/0_resource_name_obfuscated to anchor view
+android.view.ViewStub{be192d5 G.E...... ......I. 0,0-0,0 #7f0a02ad
+app:id/0_resource_name_obfuscated}
+```
+
+`0_resource_name_obfuscated` is the resource name for all resources that had
+their name obfuscated/stripped during the optimize resources step. To help with
+debugging, the `R.txt` file is archived. The `R.txt` file contains a mapping
+from resource ids to resource names and can be used to get the original resource
+name from the id. In the above message the id is `0x7f0a02ad`.
+
+For local builds, `R.txt` files are output in the `out/*/apks` directory.
+
+For official builds, Googlers can get archived `R.txt` files next to archived
+apks.
+
+### Adding resources to the whitelist
+
+If a resource is accessed via `getIdentifier()` it needs to be whitelisted in an
+aapt2 resources config file. The config file looks like this:
+
+```
+<resource type>/<resource name>#no_obfuscate
+```
+eg:
+```
+string/app_name#no_obfuscate
+id/toolbar#no_obfuscate
+```
+
+The aapt2 config file is passed to the ninja target through the
+`resources_config_path` variable. To add a resource to the whitelist, check
+where the config is for your target and add a new line for your resource. If
+none exist, create a new config file and pass its path in your target.
+
+### Webview resource ids
+
+The first two bytes of a resource id is the package id. For regular apks, this
+is `0x7f`. However, Webview is a shared library which gets loaded into other
+apks. The package id for webview resources is assigned dynamically at runtime.
+When webview is loaded it [rewrites all resources][ResourceRewriter.java] to
+have the correct package id. When deobfuscating webview resource ids, disregard
+the first two bytes in the id when looking it up in the `R.txt` file.
+
+Monochrome, when loaded as webview, rewrites the package ids of resources used
+by the webview portion to the correct value at runtime, otherwise, its resources
+have package id `0x7f` when run as a regular apk.
+
+[ResourceRewriter.java]: https://cs.chromium.org/chromium/src/out/android-Debug/gen/android_webview/glue/glue/generated_java/com/android/webview/chromium/ResourceRewriter.java
+
+## How R.java files are generated
+
+This is how a sample R.java file looks like:
+
+```java
+package org.chromium.ui;
+
+public final class R {
+ public static final class attr {
+ public static final int buttonAlignment = 0x7f030038;
+ public static final int buttonColor = 0x7f03003e;
+ public static final int layout = 0x7f030094;
+ public static final int roundedfillColor = 0x7f0300bf;
+ public static final int secondaryButtonText = 0x7f0300c4;
+ public static final int stackedMargin = 0x7f0300d4;
+ }
+ public static final class id {
+ public static final int apart = 0x7f080021;
+ public static final int dropdown_body_footer_divider = 0x7f08003d;
+ public static final int dropdown_body_list = 0x7f08003e;
+ public static final int dropdown_footer = 0x7f08003f;
+ }
+ public static final class layout {
+ public static final int dropdown_item = 0x7f0a0022;
+ public static final int dropdown_window = 0x7f0a0023;
+ }
+}
+```
+
+R.java is a list of static classes, each with multiple static fields containing
+ids. These ids are used in java code to reference resources in the apk. The
+R.java file generated via the prepare resources step above has temporary ids
+which are not marked `final`. That R.java file is only used so that javac can
+compile the java code that references R.*.
+
+The R.java generated during the finalize apk resources step has
+permanent ids. These ids are marked as `final` (except webview resources that
+need to be [rewritten at runtime](#webview-resource-ids)).
diff --git a/deps/v8/build/android/docs/lint.md b/deps/v8/build/android/docs/lint.md
new file mode 100644
index 0000000000..37f35502e5
--- /dev/null
+++ b/deps/v8/build/android/docs/lint.md
@@ -0,0 +1,91 @@
+# Lint
+
+Android's [**lint**](http://developer.android.com/tools/help/lint.html) is a static
+analysis tool that Chromium uses to catch possible issues in Java code.
+
+[TOC]
+
+## How Chromium uses lint
+
+Chromium runs lint on a per-target basis for all targets using any of the
+following templates if they are marked as Chromium code (i.e.,
+`chromium_code = true`):
+
+ - `android_apk`
+ - `android_library`
+ - `instrumentation_test_apk`
+ - `unittest_apk`
+
+Chromium also runs lint on a per-target basis for all targets using any of the
+following templates if they are marked as Chromium code and they support
+Android (i.e., `supports_android = true`):
+
+ - `java_library`
+
+This is implemented in the
+[`android_lint`](https://code.google.com/p/chromium/codesearch#chromium/src/build/config/android/internal_rules.gni&q=android_lint%20file:internal_rules%5C.gni)
+gn template.
+
+## My code has a lint error
+
+If lint reports an issue in your code, there are several possible remedies.
+In descending order of preference:
+
+### Fix it
+
+While this isn't always the right response, fixing the lint error or warning
+should be the default.
+
+### Suppress it in code
+
+Android provides an annotation,
+[`@SuppressLint`](http://developer.android.com/reference/android/annotation/SuppressLint.html),
+that tells lint to ignore the annotated element. It can be used on classes,
+constructors, methods, parameters, fields, or local variables, though usage
+in Chromium is typically limited to the first three.
+
+Like many suppression annotations, `@SuppressLint` takes a value that tells **lint**
+what to ignore. It can be a single `String`:
+
+```java
+@SuppressLint("NewApi")
+public void foo() {
+ a.methodThatRequiresHighSdkLevel();
+}
+```
+
+It can also be a list of `String`s:
+
+```java
+@SuppressLint({
+ "NewApi",
+ "UseSparseArrays"
+ })
+public Map<Integer, FakeObject> bar() {
+ Map<Integer, FakeObject> shouldBeASparseArray = new HashMap<Integer, FakeObject>();
+ another.methodThatRequiresHighSdkLevel(shouldBeASparseArray);
+ return shouldBeASparseArray;
+}
+```
+
+This is the preferred way of suppressing warnings in a limited scope.
+
+### Suppress it in the suppressions XML file
+
+**lint** can be given an XML configuration containing warnings or errors that
+should be ignored. Chromium's lint suppression XML file can be found in
+[`build/android/lint/suppressions.xml`](https://chromium.googlesource.com/chromium/src/+/master/build/android/lint/suppressions.xml).
+It can be updated to suppress current warnings by running:
+
+```bash
+$ python build/android/lint/suppress.py <result.xml file>
+```
+
+e.g., to suppress lint errors found in `media_java`:
+
+```bash
+$ python build/android/lint/suppress.py out/Debug/gen/media/base/android/media_java__lint/result.xml
+```
+
+**This mechanism should only be used for disabling warnings across the entire code base; class-specific lint warnings should be disabled inline.**
+
diff --git a/deps/v8/build/android/download_doclava.py b/deps/v8/build/android/download_doclava.py
new file mode 100755
index 0000000000..f9b3af635d
--- /dev/null
+++ b/deps/v8/build/android/download_doclava.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Minimal tool to download doclava from Google storage when building for
+Android."""
+
+import os
+import subprocess
+import sys
+
+
+def main():
+ # Some Windows bots inadvertently have third_party/android_sdk installed,
+ # but are unable to run download_from_google_storage because depot_tools
+ # is not in their path, so avoid failure and bail.
+ if sys.platform == 'win32':
+ return 0
+ subprocess.check_call([
+ 'download_from_google_storage',
+ '--no_resume',
+ '--no_auth',
+ '--bucket', 'chromium-doclava',
+ '--extract',
+ '-s',
+ os.path.join('src', 'buildtools', 'android', 'doclava.tar.gz.sha1')])
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/dump_apk_resource_strings.py b/deps/v8/build/android/dump_apk_resource_strings.py
new file mode 100755
index 0000000000..51e01f39f0
--- /dev/null
+++ b/deps/v8/build/android/dump_apk_resource_strings.py
@@ -0,0 +1,662 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A script to parse and dump localized strings in resource.arsc files."""
+
+import argparse
+import collections
+import contextlib
+import cProfile
+import os
+import re
+import subprocess
+import sys
+import zipfile
+
+# pylint: disable=bare-except
+
+# Assuming this script is located under build/android, try to import
+# build/android/gyp/bundletool.py to get the default path to the bundletool
+# jar file. If this fail, using --bundletool-path will be required to parse
+# bundles, allowing this script to be relocated or reused somewhere else.
+try:
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'gyp'))
+ import bundletool
+
+ _DEFAULT_BUNDLETOOL_PATH = bundletool.BUNDLETOOL_JAR_PATH
+except:
+ _DEFAULT_BUNDLETOOL_PATH = None
+
+# Try to get the path of the aapt build tool from catapult/devil.
+try:
+ import devil_chromium # pylint: disable=unused-import
+ from devil.android.sdk import build_tools
+ _AAPT_DEFAULT_PATH = build_tools.GetPath('aapt')
+except:
+ _AAPT_DEFAULT_PATH = None
+
+
+def AutoIndentStringList(lines, indentation=2):
+ """Auto-indents a input list of text lines, based on open/closed braces.
+
+ For example, the following input text:
+
+ 'Foo {',
+ 'Bar {',
+ 'Zoo',
+ '}',
+ '}',
+
+ Will return the following:
+
+ 'Foo {',
+ ' Bar {',
+ ' Zoo',
+ ' }',
+ '}',
+
+ The rules are pretty simple:
+ - A line that ends with an open brace ({) increments indentation.
+ - A line that starts with a closing brace (}) decrements it.
+
+ The main idea is to make outputting structured text data trivial,
+ since it can be assumed that the final output will be passed through
+ this function to make it human-readable.
+
+ Args:
+ lines: an iterator over input text lines. They should not contain
+ line terminator (e.g. '\n').
+ Returns:
+ A new list of text lines, properly auto-indented.
+ """
+ margin = ''
+ result = []
+ # NOTE: Intentional but significant speed optimizations in this function:
+ # - |line and line[0] == <char>| instead of |line.startswith(<char>)|.
+ # - |line and line[-1] == <char>| instead of |line.endswith(<char>)|.
+ for line in lines:
+ if line and line[0] == '}':
+ margin = margin[:-indentation]
+ result.append(margin + line)
+ if line and line[-1] == '{':
+ margin += ' ' * indentation
+
+ return result
+
+
+# pylint: disable=line-too-long
+
+# NOTE: aapt dump will quote the following characters only: \n, \ and "
+# see https://android.googlesource.com/platform/frameworks/base/+/master/libs/androidfw/ResourceTypes.cpp#7270
+
+# pylint: enable=line-too-long
+
+
+def UnquoteString(s):
+ """Unquote a given string from aapt dump.
+
+ Args:
+ s: An UTF-8 encoded string that contains backslashes for quotes, as found
+ in the output of 'aapt dump resources --values'.
+ Returns:
+ The unquoted version of the input string.
+ """
+ if not '\\' in s:
+ return s
+
+ result = ''
+ start = 0
+ size = len(s)
+ while start < size:
+ pos = s.find('\\', start)
+ if pos < 0:
+ break
+
+ result += s[start:pos]
+ count = 1
+ while pos + count < size and s[pos + count] == '\\':
+ count += 1
+
+ result += '\\' * (count / 2)
+ start = pos + count
+ if count & 1:
+ if start < size:
+ ch = s[start]
+ if ch == 'n': # \n is the only non-printable character supported.
+ ch = '\n'
+ result += ch
+ start += 1
+ else:
+ result += '\\'
+
+ result += s[start:]
+ return result
+
+
+assert UnquoteString(r'foo bar') == 'foo bar'
+assert UnquoteString(r'foo\nbar') == 'foo\nbar'
+assert UnquoteString(r'foo\\nbar') == 'foo\\nbar'
+assert UnquoteString(r'foo\\\nbar') == 'foo\\\nbar'
+assert UnquoteString(r'foo\n\nbar') == 'foo\n\nbar'
+assert UnquoteString(r'foo\\bar') == r'foo\bar'
+
+
+def QuoteString(s):
+ """Quote a given string for external output.
+
+ Args:
+ s: An input UTF-8 encoded string.
+ Returns:
+ A quoted version of the string, using the same rules as 'aapt dump'.
+ """
+ # NOTE: Using repr() would escape all non-ASCII bytes in the string, which
+ # is undesirable.
+ return s.replace('\\', r'\\').replace('"', '\\"').replace('\n', '\\n')
+
+
+assert QuoteString(r'foo "bar"') == 'foo \\"bar\\"'
+assert QuoteString('foo\nbar') == 'foo\\nbar'
+
+
+def ReadStringMapFromRTxt(r_txt_path):
+ """Read all string resource IDs and names from an R.txt file.
+
+ Args:
+ r_txt_path: Input file path.
+ Returns:
+ A {res_id -> res_name} dictionary corresponding to the string resources
+ from the input R.txt file.
+ """
+ # NOTE: Typical line of interest looks like:
+ # int string AllowedDomainsForAppsTitle 0x7f130001
+ result = {}
+ prefix = 'int string '
+ with open(r_txt_path) as f:
+ for line in f:
+ line = line.rstrip()
+ if line.startswith(prefix):
+ res_name, res_id = line[len(prefix):].split(' ')
+ result[int(res_id, 0)] = res_name
+ return result
+
+
+class ResourceStringValues(object):
+ """Models all possible values for a named string."""
+
+ def __init__(self):
+ self.res_name = None
+ self.res_values = {}
+
+ def AddValue(self, res_name, res_config, res_value):
+ """Add a new value to this entry.
+
+ Args:
+ res_name: Resource name. If this is not the first time this method
+ is called with the same resource name, then |res_name| should match
+ previous parameters for sanity checking.
+ res_config: Config associated with this value. This can actually be
+ anything that can be converted to a string.
+ res_value: UTF-8 encoded string value.
+ """
+ if res_name is not self.res_name and res_name != self.res_name:
+ if self.res_name is None:
+ self.res_name = res_name
+ else:
+ # Sanity check: the resource name should be the same for all chunks.
+ # Resource ID is redefined with a different name!!
+ print 'WARNING: Resource key ignored (%s, should be %s)' % (
+ res_name, self.res_name)
+
+ if self.res_values.setdefault(res_config, res_value) is not res_value:
+ print 'WARNING: Duplicate value definition for [config %s]: %s ' \
+ '(already has %s)' % (
+ res_config, res_value, self.res_values[res_config])
+
+ def ToStringList(self, res_id):
+ """Convert entry to string list for human-friendly output."""
+ values = sorted(
+ [(str(config), value) for config, value in self.res_values.iteritems()])
+ if res_id is None:
+ # res_id will be None when the resource ID should not be part
+ # of the output.
+ result = ['name=%s count=%d {' % (self.res_name, len(values))]
+ else:
+ result = [
+ 'res_id=0x%08x name=%s count=%d {' % (res_id, self.res_name,
+ len(values))
+ ]
+ for config, value in values:
+ result.append('%-16s "%s"' % (config, QuoteString(value)))
+ result.append('}')
+ return result
+
+
+class ResourceStringMap(object):
+ """Convenience class to hold the set of all localized strings in a table.
+
+ Usage is the following:
+ 1) Create new (empty) instance.
+ 2) Call AddValue() repeatedly to add new values.
+ 3) Eventually call RemapResourceNames() to remap resource names.
+ 4) Call ToStringList() to convert the instance to a human-readable
+ list of strings that can later be used with AutoIndentStringList()
+ for example.
+ """
+
+ def __init__(self):
+ self._res_map = collections.defaultdict(ResourceStringValues)
+
+ def AddValue(self, res_id, res_name, res_config, res_value):
+ self._res_map[res_id].AddValue(res_name, res_config, res_value)
+
+ def RemapResourceNames(self, id_name_map):
+ """Rename all entries according to a given {res_id -> res_name} map."""
+ for res_id, res_name in id_name_map.iteritems():
+ if res_id in self._res_map:
+ self._res_map[res_id].res_name = res_name
+
+ def ToStringList(self, omit_ids=False):
+ """Dump content to a human-readable string list.
+
+ Note that the strings are ordered by their resource name first, and
+ resource id second.
+
+ Args:
+ omit_ids: If True, do not put resource IDs in the result. This might
+ be useful when comparing the outputs of two different builds of the
+ same APK, or two related APKs (e.g. ChromePublic.apk vs Chrome.apk)
+ where the resource IDs might be slightly different, but not the
+ string contents.
+ Return:
+ A list of strings that can later be sent to AutoIndentStringList().
+ """
+ result = ['Resource strings (count=%d) {' % len(self._res_map)]
+ res_map = self._res_map
+
+ # A small function to compare two (res_id, values) tuples
+ # by resource name first, then resource ID.
+ def cmp_id_name(a, b):
+ result = cmp(a[1].res_name, b[1].res_name)
+ if result == 0:
+ result = cmp(a[0], b[0])
+ return result
+
+ for res_id, _ in sorted(res_map.iteritems(), cmp=cmp_id_name):
+ result += res_map[res_id].ToStringList(None if omit_ids else res_id)
+ result.append('} # Resource strings')
+ return result
+
+
+@contextlib.contextmanager
+def ManagedOutput(output_file):
+ """Create an output File object that will be closed on exit if necessary.
+
+ Args:
+ output_file: Optional output file path.
+ Yields:
+ If |output_file| is empty, this simply yields sys.stdout. Otherwise, this
+ opens the file path for writing text, and yields its File object. The
+ context will ensure that the object is always closed on scope exit.
+ """
+ close_output = False
+ if output_file:
+ output = open(output_file, 'wt')
+ close_output = True
+ else:
+ output = sys.stdout
+ try:
+ yield output
+ finally:
+ if close_output:
+ output.close()
+
+
+@contextlib.contextmanager
+def ManagedPythonProfiling(enable_profiling, sort_key='tottime'):
+ """Enable Python profiling if needed.
+
+ Args:
+ enable_profiling: Boolean flag. True to enable python profiling.
+ sort_key: Sorting key for the final stats dump.
+ Yields:
+ If |enable_profiling| is False, this yields False. Otherwise, this
+ yields a new Profile instance just after enabling it. The manager
+ ensures that profiling stops and prints statistics on scope exit.
+ """
+ pr = None
+ if enable_profiling:
+ pr = cProfile.Profile()
+ pr.enable()
+ try:
+ yield pr
+ finally:
+ if pr:
+ pr.disable()
+ pr.print_stats(sort=sort_key)
+
+
+def IsFilePathABundle(input_file):
+ """Return True iff |input_file| holds an Android app bundle."""
+ try:
+ with zipfile.ZipFile(input_file) as input_zip:
+ _ = input_zip.getinfo('BundleConfig.pb')
+ return True
+ except:
+ return False
+
+
+# Example output from 'bundletool dump resources --values' corresponding
+# to strings:
+#
+# 0x7F1200A0 - string/abc_action_menu_overflow_description
+# (default) - [STR] "More options"
+# locale: "ca" - [STR] "Més opcions"
+# locale: "da" - [STR] "Flere muligheder"
+# locale: "fa" - [STR] " گزینه<U+200C>های بیشتر"
+# locale: "ja" - [STR] "その他のオプション"
+# locale: "ta" - [STR] "மேலும் விருப்பங்கள்"
+# locale: "nb" - [STR] "Flere alternativer"
+# ...
+#
+# Fun fact #1: Bundletool uses <lang>-<REGION> instead of <lang>-r<REGION>
+# for locales!
+#
+# Fun fact #2: The <U+200C> is terminal output for \u200c, the output is
+# really UTF-8 encoded when it is read by this script.
+#
+# Fun fact #3: Bundletool quotes \n, \\ and \" just like aapt since 0.8.0.
+#
+_RE_BUNDLE_STRING_RESOURCE_HEADER = re.compile(
+ r'^0x([0-9A-F]+)\s\-\sstring/(\w+)$')
+assert _RE_BUNDLE_STRING_RESOURCE_HEADER.match(
+ '0x7F1200A0 - string/abc_action_menu_overflow_description')
+
+_RE_BUNDLE_STRING_DEFAULT_VALUE = re.compile(
+ r'^\s+\(default\) - \[STR\] "(.*)"$')
+assert _RE_BUNDLE_STRING_DEFAULT_VALUE.match(
+ ' (default) - [STR] "More options"')
+assert _RE_BUNDLE_STRING_DEFAULT_VALUE.match(
+ ' (default) - [STR] "More options"').group(1) == "More options"
+
+_RE_BUNDLE_STRING_LOCALIZED_VALUE = re.compile(
+ r'^\s+locale: "([0-9a-zA-Z-]+)" - \[STR\] "(.*)"$')
+assert _RE_BUNDLE_STRING_LOCALIZED_VALUE.match(
+ u' locale: "ar" - [STR] "گزینه\u200cهای بیشتر"'.encode('utf-8'))
+
+
+def ParseBundleResources(bundle_tool_jar_path, bundle_path):
+ """Use bundletool to extract the localized strings of a given bundle.
+
+ Args:
+ bundle_tool_jar_path: Path to bundletool .jar executable.
+ bundle_path: Path to input bundle.
+ Returns:
+ A new ResourceStringMap instance populated with the bundle's content.
+ """
+ cmd_args = [
+ 'java', '-jar', bundle_tool_jar_path, 'dump', 'resources', '--bundle',
+ bundle_path, '--values'
+ ]
+ p = subprocess.Popen(cmd_args, bufsize=1, stdout=subprocess.PIPE)
+ res_map = ResourceStringMap()
+ current_resource_id = None
+ current_resource_name = None
+ keep_parsing = True
+ need_value = False
+ while keep_parsing:
+ line = p.stdout.readline()
+ if not line:
+ break
+ # Do not use rstrip(), since this should only remove trailing newlines
+ # but not trailing whitespace that happen to be embedded in the string
+ # value for some reason.
+ line = line.rstrip('\n\r')
+ m = _RE_BUNDLE_STRING_RESOURCE_HEADER.match(line)
+ if m:
+ current_resource_id = int(m.group(1), 16)
+ current_resource_name = m.group(2)
+ need_value = True
+ continue
+
+ if not need_value:
+ continue
+
+ resource_config = None
+ m = _RE_BUNDLE_STRING_DEFAULT_VALUE.match(line)
+ if m:
+ resource_config = 'config (default)'
+ resource_value = m.group(1)
+ else:
+ m = _RE_BUNDLE_STRING_LOCALIZED_VALUE.match(line)
+ if m:
+ resource_config = 'config %s' % m.group(1)
+ resource_value = m.group(2)
+
+ if resource_config is None:
+ need_value = False
+ continue
+
+ res_map.AddValue(current_resource_id, current_resource_name,
+ resource_config, UnquoteString(resource_value))
+ return res_map
+
+
+# Name of the binary resources table file inside an APK.
+RESOURCES_FILENAME = 'resources.arsc'
+
+
+def IsFilePathAnApk(input_file):
+ """Returns True iff a ZipFile instance is for a regular APK."""
+ try:
+ with zipfile.ZipFile(input_file) as input_zip:
+ _ = input_zip.getinfo(RESOURCES_FILENAME)
+ return True
+ except:
+ return False
+
+
+# pylint: disable=line-too-long
+
+# Example output from 'aapt dump resources --values' corresponding
+# to strings:
+#
+# config zh-rHK
+# resource 0x7f12009c org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000caa9 (s=0x0008 r=0x00)
+# (string8) "瀏覽首頁"
+# resource 0x7f12009d org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000c8e0 (s=0x0008 r=0x00)
+# (string8) "向上瀏覽"
+#
+
+# The following are compiled regular expressions used to recognize each
+# of line and extract relevant information.
+#
+_RE_AAPT_CONFIG = re.compile(r'^\s+config (.+):$')
+assert _RE_AAPT_CONFIG.match(' config (default):')
+assert _RE_AAPT_CONFIG.match(' config zh-rTW:')
+
+# Match an ISO 639-1 or ISO 639-2 locale.
+_RE_AAPT_ISO_639_LOCALE = re.compile(r'^[a-z]{2,3}(-r[A-Z]{2,3})?$')
+assert _RE_AAPT_ISO_639_LOCALE.match('de')
+assert _RE_AAPT_ISO_639_LOCALE.match('zh-rTW')
+assert _RE_AAPT_ISO_639_LOCALE.match('fil')
+assert not _RE_AAPT_ISO_639_LOCALE.match('land')
+
+_RE_AAPT_BCP47_LOCALE = re.compile(r'^b\+[a-z][a-zA-Z0-9\+]+$')
+assert _RE_AAPT_BCP47_LOCALE.match('b+sr')
+assert _RE_AAPT_BCP47_LOCALE.match('b+sr+Latn')
+assert _RE_AAPT_BCP47_LOCALE.match('b+en+US')
+assert not _RE_AAPT_BCP47_LOCALE.match('b+')
+assert not _RE_AAPT_BCP47_LOCALE.match('b+1234')
+
+_RE_AAPT_STRING_RESOURCE_HEADER = re.compile(
+ r'^\s+resource 0x([0-9a-f]+) [a-zA-Z][a-zA-Z0-9.]+:string/(\w+):.*$')
+assert _RE_AAPT_STRING_RESOURCE_HEADER.match(
+ r' resource 0x7f12009c org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000caa9 (s=0x0008 r=0x00)'
+)
+
+_RE_AAPT_STRING_RESOURCE_VALUE = re.compile(r'^\s+\(string8\) "(.*)"$')
+assert _RE_AAPT_STRING_RESOURCE_VALUE.match(r' (string8) "瀏覽首頁"')
+
+# pylint: enable=line-too-long
+
+
+def _ConvertAaptLocaleToBcp47(locale):
+ """Convert a locale name from 'aapt dump' to its BCP-47 form."""
+ if locale.startswith('b+'):
+ return '-'.join(locale[2:].split('+'))
+ lang, _, region = locale.partition('-r')
+ if region:
+ return '%s-%s' % (lang, region)
+ return lang
+
+
+assert _ConvertAaptLocaleToBcp47('(default)') == '(default)'
+assert _ConvertAaptLocaleToBcp47('en') == 'en'
+assert _ConvertAaptLocaleToBcp47('en-rUS') == 'en-US'
+assert _ConvertAaptLocaleToBcp47('en-US') == 'en-US'
+assert _ConvertAaptLocaleToBcp47('fil') == 'fil'
+assert _ConvertAaptLocaleToBcp47('b+sr+Latn') == 'sr-Latn'
+
+
+def ParseApkResources(aapt_path, apk_path):
+ """Use aapt to extract the localized strings of a given bundle.
+
+ Args:
+ bundle_tool_jar_path: Path to bundletool .jar executable.
+ bundle_path: Path to input bundle.
+ Returns:
+ A new ResourceStringMap instance populated with the bundle's content.
+ """
+ cmd_args = [aapt_path, 'dump', '--values', 'resources', apk_path]
+ p = subprocess.Popen(cmd_args, bufsize=1, stdout=subprocess.PIPE)
+
+ res_map = ResourceStringMap()
+ current_locale = None
+ current_resource_id = None
+ current_resource_name = None
+ need_value = False
+ while True:
+ line = p.stdout.readline().rstrip()
+ if not line:
+ break
+ m = _RE_AAPT_CONFIG.match(line)
+ if m:
+ locale = None
+ aapt_locale = m.group(1)
+ if aapt_locale == '(default)':
+ locale = aapt_locale
+ elif _RE_AAPT_ISO_639_LOCALE.match(aapt_locale):
+ locale = aapt_locale
+ elif _RE_AAPT_BCP47_LOCALE.match(aapt_locale):
+ locale = aapt_locale
+ if locale is not None:
+ current_locale = _ConvertAaptLocaleToBcp47(locale)
+ continue
+
+ if current_locale is None:
+ continue
+
+ if need_value:
+ m = _RE_AAPT_STRING_RESOURCE_VALUE.match(line)
+ if not m:
+ # Should not happen
+ sys.stderr.write('WARNING: Missing value for string ID 0x%08x "%s"' %
+ (current_resource_id, current_resource_name))
+ resource_value = '<MISSING_STRING_%08x>' % current_resource_id
+ else:
+ resource_value = UnquoteString(m.group(1))
+
+ res_map.AddValue(current_resource_id, current_resource_name,
+ 'config %s' % current_locale, resource_value)
+ need_value = False
+ else:
+ m = _RE_AAPT_STRING_RESOURCE_HEADER.match(line)
+ if m:
+ current_resource_id = int(m.group(1), 16)
+ current_resource_name = m.group(2)
+ need_value = True
+
+ return res_map
+
+
+def main(args):
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+ parser.add_argument(
+ 'input_file',
+ help='Input file path. This can be either an APK, or an app bundle.')
+ parser.add_argument('--output', help='Optional output file path.')
+ parser.add_argument(
+ '--omit-ids',
+ action='store_true',
+ help='Omit resource IDs in the output. This is useful '
+ 'to compare the contents of two distinct builds of the '
+ 'same APK.')
+ parser.add_argument(
+ '--aapt-path',
+ default=_AAPT_DEFAULT_PATH,
+ help='Path to aapt executable. Optional for APKs.')
+ parser.add_argument(
+ '--r-txt-path',
+ help='Path to an optional input R.txt file used to translate resource '
+ 'IDs to string names. Useful when resources names in the input files '
+ 'were obfuscated. NOTE: If ${INPUT_FILE}.R.txt exists, if will be used '
+ 'automatically by this script.')
+ parser.add_argument(
+ '--bundletool-path',
+ default=_DEFAULT_BUNDLETOOL_PATH,
+ help='Path to alternate bundletool .jar file. Only used for bundles.')
+ parser.add_argument(
+ '--profile', action='store_true', help='Enable Python profiling.')
+
+ options = parser.parse_args(args)
+
+ # Create a {res_id -> res_name} map for unobfuscation, if needed.
+ res_id_name_map = {}
+ r_txt_path = options.r_txt_path
+ if not r_txt_path:
+ candidate_r_txt_path = options.input_file + '.R.txt'
+ if os.path.exists(candidate_r_txt_path):
+ r_txt_path = candidate_r_txt_path
+
+ if r_txt_path:
+ res_id_name_map = ReadStringMapFromRTxt(r_txt_path)
+
+ # Create a helper lambda that creates a new ResourceStringMap instance
+ # based on the input file's type.
+ if IsFilePathABundle(options.input_file):
+ if not options.bundletool_path:
+ parser.error(
+ '--bundletool-path <BUNDLETOOL_JAR> is required to parse bundles.')
+
+ # use bundletool to parse the bundle resources.
+ def create_string_map():
+ return ParseBundleResources(options.bundletool_path, options.input_file)
+
+ elif IsFilePathAnApk(options.input_file):
+ if not options.aapt_path:
+ parser.error('--aapt-path <AAPT> is required to parse APKs.')
+
+ # Use aapt dump to parse the APK resources.
+ def create_string_map():
+ return ParseApkResources(options.aapt_path, options.input_file)
+
+ else:
+ parser.error('Unknown file format: %s' % options.input_file)
+
+ # Print everything now.
+ with ManagedOutput(options.output) as output:
+ with ManagedPythonProfiling(options.profile):
+ res_map = create_string_map()
+ res_map.RemapResourceNames(res_id_name_map)
+ lines = AutoIndentStringList(res_map.ToStringList(options.omit_ids))
+ for line in lines:
+ output.write(line)
+ output.write('\n')
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/emma_coverage_stats.py b/deps/v8/build/android/emma_coverage_stats.py
new file mode 100755
index 0000000000..fe1775a8a2
--- /dev/null
+++ b/deps/v8/build/android/emma_coverage_stats.py
@@ -0,0 +1,479 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates incremental code coverage reports for Java code in Chromium.
+
+Usage:
+
+ build/android/emma_coverage_stats.py -v --out <output file path> --emma-dir
+ <EMMA file directory> --lines-for-coverage-file
+ <path to file containing lines for coverage>
+
+ Creates a JSON representation of the overall and file coverage stats and saves
+ this information to the specified output file.
+"""
+
+import argparse
+import collections
+import json
+import logging
+import os
+import re
+import sys
+from xml.etree import ElementTree
+
+import devil_chromium
+from devil.utils import run_tests_helper
+
+NOT_EXECUTABLE = -1
+NOT_COVERED = 0
+COVERED = 1
+PARTIALLY_COVERED = 2
+
+# Coverage information about a single line of code.
+LineCoverage = collections.namedtuple(
+ 'LineCoverage',
+ ['lineno', 'source', 'covered_status', 'fractional_line_coverage'])
+
+
+class _EmmaHtmlParser(object):
+ """Encapsulates HTML file parsing operations.
+
+ This class contains all operations related to parsing HTML files that were
+ produced using the EMMA code coverage tool.
+
+ Example HTML:
+
+ Package links:
+ <a href="_files/1.html">org.chromium.chrome</a>
+ This is returned by the selector |XPATH_SELECT_PACKAGE_ELEMENTS|.
+
+ Class links:
+ <a href="1e.html">DoActivity.java</a>
+ This is returned by the selector |XPATH_SELECT_CLASS_ELEMENTS|.
+
+ Line coverage data:
+ <tr class="p">
+ <td class="l" title="78% line coverage (7 out of 9)">108</td>
+ <td title="78% line coverage (7 out of 9 instructions)">
+ if (index < 0 || index = mSelectors.size()) index = 0;</td>
+ </tr>
+ <tr>
+ <td class="l">109</td>
+ <td> </td>
+ </tr>
+ <tr class="c">
+ <td class="l">110</td>
+ <td> if (mSelectors.get(index) != null) {</td>
+ </tr>
+ <tr class="z">
+ <td class="l">111</td>
+ <td> for (int i = 0; i < mSelectors.size(); i++) {</td>
+ </tr>
+ Each <tr> element is returned by the selector |XPATH_SELECT_LOC|.
+
+ We can parse this to get:
+ 1. Line number
+ 2. Line of source code
+ 3. Coverage status (c, z, or p)
+ 4. Fractional coverage value (% out of 100 if PARTIALLY_COVERED)
+ """
+ # Selector to match all <a> elements within the rows that are in the table
+ # that displays all of the different packages.
+ _XPATH_SELECT_PACKAGE_ELEMENTS = './/BODY/TABLE[4]/TR/TD/A'
+
+ # Selector to match all <a> elements within the rows that are in the table
+ # that displays all of the different classes within a package.
+ _XPATH_SELECT_CLASS_ELEMENTS = './/BODY/TABLE[3]/TR/TD/A'
+
+ # Selector to match all <tr> elements within the table containing Java source
+ # code in an EMMA HTML file.
+ _XPATH_SELECT_LOC = './/BODY/TABLE[4]/TR'
+
+ # Children of HTML elements are represented as a list in ElementTree. These
+ # constants represent list indices corresponding to relevant child elements.
+
+ # Child 1 contains percentage covered for a line.
+ _ELEMENT_PERCENT_COVERED = 1
+
+ # Child 1 contains the original line of source code.
+ _ELEMENT_CONTAINING_SOURCE_CODE = 1
+
+ # Child 0 contains the line number.
+ _ELEMENT_CONTAINING_LINENO = 0
+
+ # Maps CSS class names to corresponding coverage constants.
+ _CSS_TO_STATUS = {'c': COVERED, 'p': PARTIALLY_COVERED, 'z': NOT_COVERED}
+
+ # UTF-8 no break space.
+ _NO_BREAK_SPACE = '\xc2\xa0'
+
+ def __init__(self, emma_file_base_dir):
+ """Initializes _EmmaHtmlParser.
+
+ Args:
+ emma_file_base_dir: Path to the location where EMMA report files are
+ stored. Should be where index.html is stored.
+ """
+ self._base_dir = emma_file_base_dir
+ self._emma_files_path = os.path.join(self._base_dir, '_files')
+ self._index_path = os.path.join(self._base_dir, 'index.html')
+
+ def GetLineCoverage(self, emma_file_path):
+ """Returns a list of LineCoverage objects for the given EMMA HTML file.
+
+ Args:
+ emma_file_path: String representing the path to the EMMA HTML file.
+
+ Returns:
+ A list of LineCoverage objects.
+ """
+ line_tr_elements = self._FindElements(
+ emma_file_path, self._XPATH_SELECT_LOC)
+ line_coverage = []
+ for tr in line_tr_elements:
+ # Get the coverage status.
+ coverage_status = self._CSS_TO_STATUS.get(tr.get('CLASS'), NOT_EXECUTABLE)
+ # Get the fractional coverage value.
+ if coverage_status == PARTIALLY_COVERED:
+ title_attribute = (tr[self._ELEMENT_PERCENT_COVERED].get('TITLE'))
+ # Parse string that contains percent covered: "83% line coverage ...".
+ percent_covered = title_attribute.split('%')[0]
+ fractional_coverage = int(percent_covered) / 100.0
+ else:
+ fractional_coverage = 1.0
+
+ # Get the line number.
+ lineno_element = tr[self._ELEMENT_CONTAINING_LINENO]
+ # Handles oddly formatted HTML (where there is an extra <a> tag).
+ lineno = int(lineno_element.text or
+ lineno_element[self._ELEMENT_CONTAINING_LINENO].text)
+ # Get the original line of Java source code.
+ raw_source = tr[self._ELEMENT_CONTAINING_SOURCE_CODE].text
+ utf8_source = raw_source.encode('UTF-8')
+ source = utf8_source.replace(self._NO_BREAK_SPACE, ' ')
+
+ line = LineCoverage(lineno, source, coverage_status, fractional_coverage)
+ line_coverage.append(line)
+
+ return line_coverage
+
+ def GetPackageNameToEmmaFileDict(self):
+ """Returns a dict mapping Java packages to EMMA HTML coverage files.
+
+ Parses the EMMA index.html file to get a list of packages, then parses each
+ package HTML file to get a list of classes for that package, and creates
+ a dict with this info.
+
+ Returns:
+ A dict mapping string representation of Java packages (with class
+ names appended) to the corresponding file paths of EMMA HTML files.
+ """
+ # These <a> elements contain each package name and the path of the file
+ # where all classes within said package are listed.
+ package_link_elements = self._FindElements(
+ self._index_path, self._XPATH_SELECT_PACKAGE_ELEMENTS)
+ # Maps file path of package directory (EMMA generated) to package name.
+ # Example: emma_dir/f.html: org.chromium.chrome.
+ package_links = {
+ os.path.join(self._base_dir, link.attrib['HREF']): link.text
+ for link in package_link_elements if 'HREF' in link.attrib
+ }
+
+ package_to_emma = {}
+ for package_emma_file_path, package_name in package_links.iteritems():
+ # These <a> elements contain each class name in the current package and
+ # the path of the file where the coverage info is stored for each class.
+ coverage_file_link_elements = self._FindElements(
+ package_emma_file_path, self._XPATH_SELECT_CLASS_ELEMENTS)
+
+ for class_name_element in coverage_file_link_elements:
+ emma_coverage_file_path = os.path.join(
+ self._emma_files_path, class_name_element.attrib['HREF'])
+ full_package_name = '%s.%s' % (package_name, class_name_element.text)
+ package_to_emma[full_package_name] = emma_coverage_file_path
+
+ return package_to_emma
+
+ # pylint: disable=no-self-use
+ def _FindElements(self, file_path, xpath_selector):
+ """Reads a HTML file and performs an XPath match.
+
+ Args:
+ file_path: String representing the path to the HTML file.
+ xpath_selector: String representing xpath search pattern.
+
+ Returns:
+ A list of ElementTree.Elements matching the given XPath selector.
+ Returns an empty list if there is no match.
+ """
+ with open(file_path) as f:
+ file_contents = f.read().decode('ISO-8859-1').encode('UTF-8')
+ root = ElementTree.fromstring(file_contents)
+ return root.findall(xpath_selector)
+
+
+class _EmmaCoverageStats(object):
+ """Computes code coverage stats for Java code using the coverage tool EMMA.
+
+ This class provides an API that allows users to capture absolute code coverage
+ and code coverage on a subset of lines for each Java source file. Coverage
+ reports are generated in JSON format.
+ """
+ # Regular expression to get package name from Java package statement.
+ RE_PACKAGE_MATCH_GROUP = 'package'
+ RE_PACKAGE = re.compile(r'package (?P<%s>[\w.]*);' % RE_PACKAGE_MATCH_GROUP)
+
+ def __init__(self, emma_file_base_dir, files_for_coverage):
+ """Initialize _EmmaCoverageStats.
+
+ Args:
+ emma_file_base_dir: String representing the path to the base directory
+ where EMMA HTML coverage files are stored, i.e. parent of index.html.
+ files_for_coverage: A list of Java source code file paths to get EMMA
+ coverage for.
+ """
+ self._emma_parser = _EmmaHtmlParser(emma_file_base_dir)
+ self._source_to_emma = self._GetSourceFileToEmmaFileDict(files_for_coverage)
+
+ def GetCoverageDict(self, lines_for_coverage):
+ """Returns a dict containing detailed coverage information.
+
+ Gets detailed coverage stats for each file specified in the
+ |lines_for_coverage| dict and the total incremental number of lines covered
+ and executable for all files in |lines_for_coverage|.
+
+ Args:
+ lines_for_coverage: A dict mapping Java source file paths to lists of line
+ numbers.
+
+ Returns:
+ A dict containing coverage stats for the given dict of files and lines.
+ Contains absolute coverage stats for each file, coverage stats for each
+ file's lines specified in |lines_for_coverage|, line by line coverage
+ for each file, and overall coverage stats for the lines specified in
+ |lines_for_coverage|.
+ """
+ file_coverage = {}
+ for file_path, line_numbers in lines_for_coverage.iteritems():
+ file_coverage_dict = self.GetCoverageDictForFile(file_path, line_numbers)
+ if file_coverage_dict:
+ file_coverage[file_path] = file_coverage_dict
+ else:
+ logging.warning(
+ 'No code coverage data for %s, skipping.', file_path)
+
+ covered_statuses = [s['incremental'] for s in file_coverage.itervalues()]
+ num_covered_lines = sum(s['covered'] for s in covered_statuses)
+ num_total_lines = sum(s['total'] for s in covered_statuses)
+ return {
+ 'files': file_coverage,
+ 'patch': {
+ 'incremental': {
+ 'covered': num_covered_lines,
+ 'total': num_total_lines
+ }
+ }
+ }
+
+ def GetCoverageDictForFile(self, file_path, line_numbers):
+ """Returns a dict containing detailed coverage info for the given file.
+
+ Args:
+ file_path: The path to the Java source file that we want to create the
+ coverage dict for.
+ line_numbers: A list of integer line numbers to retrieve additional stats
+ for.
+
+ Returns:
+ A dict containing absolute, incremental, and line by line coverage for
+ a file.
+ """
+ if file_path not in self._source_to_emma:
+ return None
+ emma_file = self._source_to_emma[file_path]
+ total_line_coverage = self._emma_parser.GetLineCoverage(emma_file)
+ incremental_line_coverage = [line for line in total_line_coverage
+ if line.lineno in line_numbers]
+ line_by_line_coverage = [
+ {
+ 'line': line.source,
+ 'coverage': line.covered_status,
+ 'changed': line.lineno in line_numbers,
+ 'fractional_coverage': line.fractional_line_coverage,
+ }
+ for line in total_line_coverage
+ ]
+ total_covered_lines, total_lines = (
+ self.GetSummaryStatsForLines(total_line_coverage))
+ incremental_covered_lines, incremental_total_lines = (
+ self.GetSummaryStatsForLines(incremental_line_coverage))
+
+ file_coverage_stats = {
+ 'absolute': {
+ 'covered': total_covered_lines,
+ 'total': total_lines
+ },
+ 'incremental': {
+ 'covered': incremental_covered_lines,
+ 'total': incremental_total_lines
+ },
+ 'source': line_by_line_coverage,
+ }
+ return file_coverage_stats
+
+ # pylint: disable=no-self-use
+ def GetSummaryStatsForLines(self, line_coverage):
+ """Gets summary stats for a given list of LineCoverage objects.
+
+ Args:
+ line_coverage: A list of LineCoverage objects.
+
+ Returns:
+ A tuple containing the number of lines that are covered and the total
+ number of lines that are executable, respectively
+ """
+ partially_covered_sum = 0
+ covered_status_totals = {COVERED: 0, NOT_COVERED: 0, PARTIALLY_COVERED: 0}
+ for line in line_coverage:
+ status = line.covered_status
+ if status == NOT_EXECUTABLE:
+ continue
+ covered_status_totals[status] += 1
+ if status == PARTIALLY_COVERED:
+ partially_covered_sum += line.fractional_line_coverage
+
+ total_covered = covered_status_totals[COVERED] + partially_covered_sum
+ total_lines = sum(covered_status_totals.values())
+ return total_covered, total_lines
+
+ def _GetSourceFileToEmmaFileDict(self, files):
+ """Gets a dict used to correlate Java source files with EMMA HTML files.
+
+ This method gathers the information needed to correlate EMMA HTML
+ files with Java source files. EMMA XML and plain text reports do not provide
+ line by line coverage data, so HTML reports must be used instead.
+ Unfortunately, the HTML files that are created are given garbage names
+ (i.e 1.html) so we need to manually correlate EMMA HTML files
+ with the original Java source files.
+
+ Args:
+ files: A list of file names for which coverage information is desired.
+
+ Returns:
+ A dict mapping Java source file paths to EMMA HTML file paths.
+ """
+ # Maps Java source file paths to package names.
+ # Example: /usr/code/file.java -> org.chromium.file.java.
+ source_to_package = {}
+ for file_path in files:
+ package = self.GetPackageNameFromFile(file_path)
+ if package:
+ source_to_package[file_path] = package
+ else:
+ logging.warning("Skipping %s because it doesn\'t have a package "
+ "statement.", file_path)
+
+ # Maps package names to EMMA report HTML files.
+ # Example: org.chromium.file.java -> out/coverage/1a.html.
+ package_to_emma = self._emma_parser.GetPackageNameToEmmaFileDict()
+ # Finally, we have a dict mapping Java file paths to EMMA report files.
+ # Example: /usr/code/file.java -> out/coverage/1a.html.
+ source_to_emma = {source: package_to_emma[package]
+ for source, package in source_to_package.iteritems()
+ if package in package_to_emma}
+ return source_to_emma
+
+ @staticmethod
+ def NeedsCoverage(file_path):
+ """Checks to see if the file needs to be analyzed for code coverage.
+
+ Args:
+ file_path: A string representing path to the file.
+
+ Returns:
+ True for Java files that exist, False for all others.
+ """
+ if os.path.splitext(file_path)[1] == '.java' and os.path.exists(file_path):
+ return True
+ else:
+ logging.info('Skipping file %s, cannot compute code coverage.', file_path)
+ return False
+
+ @staticmethod
+ def GetPackageNameFromFile(file_path):
+ """Gets the full package name including the file name for a given file path.
+
+ Args:
+ file_path: String representing the path to the Java source file.
+
+ Returns:
+ A string representing the full package name with file name appended or
+ None if there is no package statement in the file.
+ """
+ with open(file_path) as f:
+ file_content = f.read()
+ package_match = re.search(_EmmaCoverageStats.RE_PACKAGE, file_content)
+ if package_match:
+ package = package_match.group(_EmmaCoverageStats.RE_PACKAGE_MATCH_GROUP)
+ file_name = os.path.basename(file_path)
+ return '%s.%s' % (package, file_name)
+ else:
+ return None
+
+
+def GenerateCoverageReport(line_coverage_file, out_file_path, coverage_dir):
+ """Generates a coverage report for a given set of lines.
+
+ Writes the results of the coverage analysis to the file specified by
+ |out_file_path|.
+
+ Args:
+ line_coverage_file: The path to a file which contains a dict mapping file
+ names to lists of line numbers. Example: {file1: [1, 2, 3], ...} means
+ that we should compute coverage information on lines 1 - 3 for file1.
+ out_file_path: A string representing the location to write the JSON report.
+ coverage_dir: A string representing the file path where the EMMA
+ HTML coverage files are located (i.e. folder where index.html is located).
+ """
+ with open(line_coverage_file) as f:
+ potential_files_for_coverage = json.load(f)
+
+ files_for_coverage = {f: lines
+ for f, lines in potential_files_for_coverage.iteritems()
+ if _EmmaCoverageStats.NeedsCoverage(f)}
+
+ coverage_results = {}
+ if files_for_coverage:
+ code_coverage = _EmmaCoverageStats(coverage_dir, files_for_coverage.keys())
+ coverage_results = code_coverage.GetCoverageDict(files_for_coverage)
+ else:
+ logging.info('No Java files requiring coverage were included in %s.',
+ line_coverage_file)
+
+ with open(out_file_path, 'w+') as out_status_file:
+ json.dump(coverage_results, out_status_file)
+
+
+def main():
+ argparser = argparse.ArgumentParser()
+ argparser.add_argument('--out', required=True, type=str,
+ help='Report output file path.')
+ argparser.add_argument('--emma-dir', required=True, type=str,
+ help='EMMA HTML report directory.')
+ argparser.add_argument('--lines-for-coverage-file', required=True, type=str,
+ help='File containing a JSON object. Should contain a '
+ 'dict mapping file names to lists of line numbers of '
+ 'code for which coverage information is desired.')
+ argparser.add_argument('-v', '--verbose', action='count',
+ help='Print verbose log information.')
+ args = argparser.parse_args()
+ run_tests_helper.SetLogLevel(args.verbose)
+ devil_chromium.Initialize()
+ GenerateCoverageReport(args.lines_for_coverage_file, args.out, args.emma_dir)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/emma_coverage_stats_test.py b/deps/v8/build/android/emma_coverage_stats_test.py
new file mode 100755
index 0000000000..44f6dc3586
--- /dev/null
+++ b/deps/v8/build/android/emma_coverage_stats_test.py
@@ -0,0 +1,563 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import unittest
+from xml.etree import ElementTree
+
+import emma_coverage_stats
+from pylib.constants import host_paths
+
+with host_paths.SysPath(host_paths.PYMOCK_PATH):
+ import mock # pylint: disable=import-error
+
+EMPTY_COVERAGE_STATS_DICT = {
+ 'files': {},
+ 'patch': {
+ 'incremental': {
+ 'covered': 0, 'total': 0
+ }
+ }
+}
+
+
+class _EmmaHtmlParserTest(unittest.TestCase):
+ """Tests for _EmmaHtmlParser.
+
+ Uses modified EMMA report HTML that contains only the subset of tags needed
+ for test verification.
+ """
+
+ def setUp(self):
+ self.emma_dir = 'fake/dir/'
+ self.parser = emma_coverage_stats._EmmaHtmlParser(self.emma_dir)
+ self.simple_html = '<TR><TD CLASS="p">Test HTML</TD></TR>'
+ self.index_html = (
+ '<HTML>'
+ '<BODY>'
+ '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '<TABLE CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '<TABLE CLASS="it" CELLSPACING="0">'
+ '</TABLE>'
+ '<TABLE CELLSPACING="0" WIDTH="100%">'
+ '<TR>'
+ '<TH CLASS="f">name</TH>'
+ '<TH>class, %</TH>'
+ '<TH>method, %</TH>'
+ '<TH>block, %</TH>'
+ '<TH>line, %</TH>'
+ '</TR>'
+ '<TR CLASS="o">'
+ '<TD><A HREF="_files/0.html"'
+ '>org.chromium.chrome.browser</A></TD>'
+ '<TD CLASS="h">0% (0/3)</TD>'
+ '</TR>'
+ '<TR>'
+ '<TD><A HREF="_files/1.html"'
+ '>org.chromium.chrome.browser.tabmodel</A></TD>'
+ '<TD CLASS="h">0% (0/8)</TD>'
+ '</TR>'
+ '</TABLE>'
+ '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '</BODY>'
+ '</HTML>'
+ )
+ self.package_1_class_list_html = (
+ '<HTML>'
+ '<BODY>'
+ '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '<TABLE CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '<TABLE CELLSPACING="0" WIDTH="100%">'
+ '<TR>'
+ '<TH CLASS="f">name</TH>'
+ '<TH>class, %</TH>'
+ '<TH>method, %</TH>'
+ '<TH>block, %</TH>'
+ '<TH>line, %</TH>'
+ '</TR>'
+ '<TR CLASS="o">'
+ '<TD><A HREF="1e.html">IntentHelper.java</A></TD>'
+ '<TD CLASS="h">0% (0/3)</TD>'
+ '<TD CLASS="h">0% (0/9)</TD>'
+ '<TD CLASS="h">0% (0/97)</TD>'
+ '<TD CLASS="h">0% (0/26)</TD>'
+ '</TR>'
+ '</TABLE>'
+ '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '</BODY>'
+ '</HTML>'
+ )
+ self.package_2_class_list_html = (
+ '<HTML>'
+ '<BODY>'
+ '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '<TABLE CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '<TABLE CELLSPACING="0" WIDTH="100%">'
+ '<TR>'
+ '<TH CLASS="f">name</TH>'
+ '<TH>class, %</TH>'
+ '<TH>method, %</TH>'
+ '<TH>block, %</TH>'
+ '<TH>line, %</TH>'
+ '</TR>'
+ '<TR CLASS="o">'
+ '<TD><A HREF="1f.html">ContentSetting.java</A></TD>'
+ '<TD CLASS="h">0% (0/1)</TD>'
+ '</TR>'
+ '<TR>'
+ '<TD><A HREF="20.html">DevToolsServer.java</A></TD>'
+ '</TR>'
+ '<TR CLASS="o">'
+ '<TD><A HREF="21.html">FileProviderHelper.java</A></TD>'
+ '</TR>'
+ '<TR>'
+ '<TD><A HREF="22.html">ContextualMenuBar.java</A></TD>'
+ '</TR>'
+ '<TR CLASS="o">'
+ '<TD><A HREF="23.html">AccessibilityUtil.java</A></TD>'
+ '</TR>'
+ '<TR>'
+ '<TD><A HREF="24.html">NavigationPopup.java</A></TD>'
+ '</TR>'
+ '</TABLE>'
+ '<TABLE CLASS="hdft" CELLSPACING="0" WIDTH="100%">'
+ '</TABLE>'
+ '</BODY>'
+ '</HTML>'
+ )
+ self.partially_covered_tr_html = (
+ '<TR CLASS="p">'
+ '<TD CLASS="l" TITLE="78% line coverage (7 out of 9)">108</TD>'
+ '<TD TITLE="78% line coverage (7 out of 9 instructions)">'
+ 'if (index &lt; 0 || index = mSelectors.size()) index = 0;</TD>'
+ '</TR>'
+ )
+ self.covered_tr_html = (
+ '<TR CLASS="c">'
+ '<TD CLASS="l">110</TD>'
+ '<TD> if (mSelectors.get(index) != null) {</TD>'
+ '</TR>'
+ )
+ self.not_executable_tr_html = (
+ '<TR>'
+ '<TD CLASS="l">109</TD>'
+ '<TD> </TD>'
+ '</TR>'
+ )
+ self.tr_with_extra_a_tag = (
+ '<TR CLASS="z">'
+ '<TD CLASS="l">'
+ '<A name="1f">54</A>'
+ '</TD>'
+ '<TD> }</TD>'
+ '</TR>'
+ )
+
+ def testInit(self):
+ emma_dir = self.emma_dir
+ parser = emma_coverage_stats._EmmaHtmlParser(emma_dir)
+ self.assertEqual(parser._base_dir, emma_dir)
+ self.assertEqual(parser._emma_files_path, 'fake/dir/_files')
+ self.assertEqual(parser._index_path, 'fake/dir/index.html')
+
+ def testFindElements_basic(self):
+ read_values = [self.simple_html]
+ found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
+ file_path='fake', xpath_selector='.//TD')
+ self.assertIs(type(found), list)
+ self.assertIs(type(found[0]), ElementTree.Element)
+ self.assertEqual(found[0].text, 'Test HTML')
+
+ def testFindElements_multipleElements(self):
+ multiple_trs = self.not_executable_tr_html + self.covered_tr_html
+ read_values = ['<div>' + multiple_trs + '</div>']
+ found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
+ file_path='fake', xpath_selector='.//TR')
+ self.assertEquals(2, len(found))
+
+ def testFindElements_noMatch(self):
+ read_values = [self.simple_html]
+ found, _ = MockOpenForFunction(self.parser._FindElements, read_values,
+ file_path='fake', xpath_selector='.//TR')
+ self.assertEqual(found, [])
+
+ def testFindElements_badFilePath(self):
+ with self.assertRaises(IOError):
+ with mock.patch('os.path.exists', return_value=False):
+ self.parser._FindElements('fake', xpath_selector='//tr')
+
+ def testGetPackageNameToEmmaFileDict_basic(self):
+ expected_dict = {
+ 'org.chromium.chrome.browser.AccessibilityUtil.java':
+ 'fake/dir/_files/23.html',
+ 'org.chromium.chrome.browser.ContextualMenuBar.java':
+ 'fake/dir/_files/22.html',
+ 'org.chromium.chrome.browser.tabmodel.IntentHelper.java':
+ 'fake/dir/_files/1e.html',
+ 'org.chromium.chrome.browser.ContentSetting.java':
+ 'fake/dir/_files/1f.html',
+ 'org.chromium.chrome.browser.DevToolsServer.java':
+ 'fake/dir/_files/20.html',
+ 'org.chromium.chrome.browser.NavigationPopup.java':
+ 'fake/dir/_files/24.html',
+ 'org.chromium.chrome.browser.FileProviderHelper.java':
+ 'fake/dir/_files/21.html'}
+
+ read_values = [self.index_html, self.package_1_class_list_html,
+ self.package_2_class_list_html]
+ return_dict, mock_open = MockOpenForFunction(
+ self.parser.GetPackageNameToEmmaFileDict, read_values)
+
+ self.assertDictEqual(return_dict, expected_dict)
+ self.assertEqual(mock_open.call_count, 3)
+ calls = [mock.call('fake/dir/index.html'),
+ mock.call('fake/dir/_files/1.html'),
+ mock.call('fake/dir/_files/0.html')]
+ mock_open.assert_has_calls(calls)
+
+ def testGetPackageNameToEmmaFileDict_noPackageElements(self):
+ self.parser._FindElements = mock.Mock(return_value=[])
+ return_dict = self.parser.GetPackageNameToEmmaFileDict()
+ self.assertDictEqual({}, return_dict)
+
+ def testGetLineCoverage_status_basic(self):
+ line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
+ self.assertEqual(line_coverage[0].covered_status,
+ emma_coverage_stats.COVERED)
+
+ def testGetLineCoverage_status_statusMissing(self):
+ line_coverage = self.GetLineCoverageWithFakeElements(
+ [self.not_executable_tr_html])
+ self.assertEqual(line_coverage[0].covered_status,
+ emma_coverage_stats.NOT_EXECUTABLE)
+
+ def testGetLineCoverage_fractionalCoverage_basic(self):
+ line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
+ self.assertEqual(line_coverage[0].fractional_line_coverage, 1.0)
+
+ def testGetLineCoverage_fractionalCoverage_partial(self):
+ line_coverage = self.GetLineCoverageWithFakeElements(
+ [self.partially_covered_tr_html])
+ self.assertEqual(line_coverage[0].fractional_line_coverage, 0.78)
+
+ def testGetLineCoverage_lineno_basic(self):
+ line_coverage = self.GetLineCoverageWithFakeElements([self.covered_tr_html])
+ self.assertEqual(line_coverage[0].lineno, 110)
+
+ def testGetLineCoverage_lineno_withAlternativeHtml(self):
+ line_coverage = self.GetLineCoverageWithFakeElements(
+ [self.tr_with_extra_a_tag])
+ self.assertEqual(line_coverage[0].lineno, 54)
+
+ def testGetLineCoverage_source(self):
+ self.parser._FindElements = mock.Mock(
+ return_value=[ElementTree.fromstring(self.covered_tr_html)])
+ line_coverage = self.parser.GetLineCoverage('fake_path')
+ self.assertEqual(line_coverage[0].source,
+ ' if (mSelectors.get(index) != null) {')
+
+ def testGetLineCoverage_multipleElements(self):
+ line_coverage = self.GetLineCoverageWithFakeElements(
+ [self.covered_tr_html, self.partially_covered_tr_html,
+ self.tr_with_extra_a_tag])
+ self.assertEqual(len(line_coverage), 3)
+
+ def GetLineCoverageWithFakeElements(self, html_elements):
+ """Wraps GetLineCoverage so mock HTML can easily be used.
+
+ Args:
+ html_elements: List of strings each representing an HTML element.
+
+ Returns:
+ A list of LineCoverage objects.
+ """
+ elements = [ElementTree.fromstring(string) for string in html_elements]
+ with mock.patch('emma_coverage_stats._EmmaHtmlParser._FindElements',
+ return_value=elements):
+ return self.parser.GetLineCoverage('fake_path')
+
+
+class _EmmaCoverageStatsTest(unittest.TestCase):
+ """Tests for _EmmaCoverageStats."""
+
+ def setUp(self):
+ self.good_source_to_emma = {
+ '/path/to/1/File1.java': '/emma/1.html',
+ '/path/2/File2.java': '/emma/2.html',
+ '/path/2/File3.java': '/emma/3.html'
+ }
+ self.line_coverage = [
+ emma_coverage_stats.LineCoverage(
+ 1, '', emma_coverage_stats.COVERED, 1.0),
+ emma_coverage_stats.LineCoverage(
+ 2, '', emma_coverage_stats.COVERED, 1.0),
+ emma_coverage_stats.LineCoverage(
+ 3, '', emma_coverage_stats.NOT_EXECUTABLE, 1.0),
+ emma_coverage_stats.LineCoverage(
+ 4, '', emma_coverage_stats.NOT_COVERED, 1.0),
+ emma_coverage_stats.LineCoverage(
+ 5, '', emma_coverage_stats.PARTIALLY_COVERED, 0.85),
+ emma_coverage_stats.LineCoverage(
+ 6, '', emma_coverage_stats.PARTIALLY_COVERED, 0.20)
+ ]
+ self.lines_for_coverage = [1, 3, 5, 6]
+ with mock.patch('emma_coverage_stats._EmmaHtmlParser._FindElements',
+ return_value=[]):
+ self.simple_coverage = emma_coverage_stats._EmmaCoverageStats(
+ 'fake_dir', {})
+
+ def testInit(self):
+ coverage_stats = self.simple_coverage
+ self.assertIsInstance(coverage_stats._emma_parser,
+ emma_coverage_stats._EmmaHtmlParser)
+ self.assertIsInstance(coverage_stats._source_to_emma, dict)
+
+ def testNeedsCoverage_withExistingJavaFile(self):
+ test_file = '/path/to/file/File.java'
+ with mock.patch('os.path.exists', return_value=True):
+ self.assertTrue(
+ emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
+
+ def testNeedsCoverage_withNonJavaFile(self):
+ test_file = '/path/to/file/File.c'
+ with mock.patch('os.path.exists', return_value=True):
+ self.assertFalse(
+ emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
+
+ def testNeedsCoverage_fileDoesNotExist(self):
+ test_file = '/path/to/file/File.java'
+ with mock.patch('os.path.exists', return_value=False):
+ self.assertFalse(
+ emma_coverage_stats._EmmaCoverageStats.NeedsCoverage(test_file))
+
+ def testGetPackageNameFromFile_basic(self):
+ test_file_text = """// Test Copyright
+ package org.chromium.chrome.browser;
+ import android.graphics.RectF;"""
+ result_package, _ = MockOpenForFunction(
+ emma_coverage_stats._EmmaCoverageStats.GetPackageNameFromFile,
+ [test_file_text], file_path='/path/to/file/File.java')
+ self.assertEqual(result_package, 'org.chromium.chrome.browser.File.java')
+
+ def testGetPackageNameFromFile_noPackageStatement(self):
+ result_package, _ = MockOpenForFunction(
+ emma_coverage_stats._EmmaCoverageStats.GetPackageNameFromFile,
+ ['not a package statement'], file_path='/path/to/file/File.java')
+ self.assertIsNone(result_package)
+
+ def testGetSummaryStatsForLines_basic(self):
+ covered, total = self.simple_coverage.GetSummaryStatsForLines(
+ self.line_coverage)
+ self.assertEqual(covered, 3.05)
+ self.assertEqual(total, 5)
+
+ def testGetSourceFileToEmmaFileDict(self):
+ package_names = {
+ '/path/to/1/File1.java': 'org.fake.one.File1.java',
+ '/path/2/File2.java': 'org.fake.File2.java',
+ '/path/2/File3.java': 'org.fake.File3.java'
+ }
+ package_to_emma = {
+ 'org.fake.one.File1.java': '/emma/1.html',
+ 'org.fake.File2.java': '/emma/2.html',
+ 'org.fake.File3.java': '/emma/3.html'
+ }
+ with mock.patch('os.path.exists', return_value=True):
+ coverage_stats = self.simple_coverage
+ coverage_stats._emma_parser.GetPackageNameToEmmaFileDict = mock.MagicMock(
+ return_value=package_to_emma)
+ coverage_stats.GetPackageNameFromFile = lambda x: package_names[x]
+ result_dict = coverage_stats._GetSourceFileToEmmaFileDict(
+ package_names.keys())
+ self.assertDictEqual(result_dict, self.good_source_to_emma)
+
+ def testGetCoverageDictForFile(self):
+ line_coverage = self.line_coverage
+ self.simple_coverage._emma_parser.GetLineCoverage = lambda x: line_coverage
+ self.simple_coverage._source_to_emma = {'/fake/src': 'fake/emma'}
+ lines = self.lines_for_coverage
+ expected_dict = {
+ 'absolute': {
+ 'covered': 3.05,
+ 'total': 5
+ },
+ 'incremental': {
+ 'covered': 2.05,
+ 'total': 3
+ },
+ 'source': [
+ {
+ 'line': line_coverage[0].source,
+ 'coverage': line_coverage[0].covered_status,
+ 'changed': True,
+ 'fractional_coverage': line_coverage[0].fractional_line_coverage,
+ },
+ {
+ 'line': line_coverage[1].source,
+ 'coverage': line_coverage[1].covered_status,
+ 'changed': False,
+ 'fractional_coverage': line_coverage[1].fractional_line_coverage,
+ },
+ {
+ 'line': line_coverage[2].source,
+ 'coverage': line_coverage[2].covered_status,
+ 'changed': True,
+ 'fractional_coverage': line_coverage[2].fractional_line_coverage,
+ },
+ {
+ 'line': line_coverage[3].source,
+ 'coverage': line_coverage[3].covered_status,
+ 'changed': False,
+ 'fractional_coverage': line_coverage[3].fractional_line_coverage,
+ },
+ {
+ 'line': line_coverage[4].source,
+ 'coverage': line_coverage[4].covered_status,
+ 'changed': True,
+ 'fractional_coverage': line_coverage[4].fractional_line_coverage,
+ },
+ {
+ 'line': line_coverage[5].source,
+ 'coverage': line_coverage[5].covered_status,
+ 'changed': True,
+ 'fractional_coverage': line_coverage[5].fractional_line_coverage,
+ }
+ ]
+ }
+ result_dict = self.simple_coverage.GetCoverageDictForFile(
+ '/fake/src', lines)
+ self.assertDictEqual(result_dict, expected_dict)
+
+ def testGetCoverageDictForFile_emptyCoverage(self):
+ expected_dict = {
+ 'absolute': {'covered': 0, 'total': 0},
+ 'incremental': {'covered': 0, 'total': 0},
+ 'source': []
+ }
+ self.simple_coverage._emma_parser.GetLineCoverage = lambda x: []
+ self.simple_coverage._source_to_emma = {'fake_dir': 'fake/emma'}
+ result_dict = self.simple_coverage.GetCoverageDictForFile('fake_dir', {})
+ self.assertDictEqual(result_dict, expected_dict)
+
+ def testGetCoverageDictForFile_missingCoverage(self):
+ self.simple_coverage._source_to_emma = {}
+ result_dict = self.simple_coverage.GetCoverageDictForFile('fake_file', {})
+ self.assertIsNone(result_dict)
+
+ def testGetCoverageDict_basic(self):
+ files_for_coverage = {
+ '/path/to/1/File1.java': [1, 3, 4],
+ '/path/2/File2.java': [1, 2]
+ }
+ self.simple_coverage._source_to_emma = {
+ '/path/to/1/File1.java': 'emma_1',
+ '/path/2/File2.java': 'emma_2'
+ }
+ coverage_info = {
+ 'emma_1': [
+ emma_coverage_stats.LineCoverage(
+ 1, '', emma_coverage_stats.COVERED, 1.0),
+ emma_coverage_stats.LineCoverage(
+ 2, '', emma_coverage_stats.PARTIALLY_COVERED, 0.5),
+ emma_coverage_stats.LineCoverage(
+ 3, '', emma_coverage_stats.NOT_EXECUTABLE, 1.0),
+ emma_coverage_stats.LineCoverage(
+ 4, '', emma_coverage_stats.COVERED, 1.0)
+ ],
+ 'emma_2': [
+ emma_coverage_stats.LineCoverage(
+ 1, '', emma_coverage_stats.NOT_COVERED, 1.0),
+ emma_coverage_stats.LineCoverage(
+ 2, '', emma_coverage_stats.COVERED, 1.0)
+ ]
+ }
+ expected_dict = {
+ 'files': {
+ '/path/2/File2.java': {
+ 'absolute': {'covered': 1, 'total': 2},
+ 'incremental': {'covered': 1, 'total': 2},
+ 'source': [{'changed': True, 'coverage': 0,
+ 'line': '', 'fractional_coverage': 1.0},
+ {'changed': True, 'coverage': 1,
+ 'line': '', 'fractional_coverage': 1.0}]
+ },
+ '/path/to/1/File1.java': {
+ 'absolute': {'covered': 2.5, 'total': 3},
+ 'incremental': {'covered': 2, 'total': 2},
+ 'source': [{'changed': True, 'coverage': 1,
+ 'line': '', 'fractional_coverage': 1.0},
+ {'changed': False, 'coverage': 2,
+ 'line': '', 'fractional_coverage': 0.5},
+ {'changed': True, 'coverage': -1,
+ 'line': '', 'fractional_coverage': 1.0},
+ {'changed': True, 'coverage': 1,
+ 'line': '', 'fractional_coverage': 1.0}]
+ }
+ },
+ 'patch': {'incremental': {'covered': 3, 'total': 4}}
+ }
+ # Return the relevant coverage info for each file.
+ self.simple_coverage._emma_parser.GetLineCoverage = (
+ lambda x: coverage_info[x])
+ result_dict = self.simple_coverage.GetCoverageDict(files_for_coverage)
+ self.assertDictEqual(result_dict, expected_dict)
+
+ def testGetCoverageDict_noCoverage(self):
+ result_dict = self.simple_coverage.GetCoverageDict({})
+ self.assertDictEqual(result_dict, EMPTY_COVERAGE_STATS_DICT)
+
+
+class EmmaCoverageStatsGenerateCoverageReport(unittest.TestCase):
+ """Tests for GenerateCoverageReport."""
+
+ def testGenerateCoverageReport_missingJsonFile(self):
+ with self.assertRaises(IOError):
+ with mock.patch('os.path.exists', return_value=False):
+ emma_coverage_stats.GenerateCoverageReport('', '', '')
+
+ def testGenerateCoverageReport_invalidJsonFile(self):
+ with self.assertRaises(ValueError):
+ with mock.patch('os.path.exists', return_value=True):
+ MockOpenForFunction(emma_coverage_stats.GenerateCoverageReport, [''],
+ line_coverage_file='', out_file_path='',
+ coverage_dir='')
+
+
+def MockOpenForFunction(func, side_effects, **kwargs):
+ """Allows easy mock open and read for callables that open multiple files.
+
+ Will mock the python open function in a way such that each time read() is
+ called on an open file, the next element in |side_effects| is returned. This
+ makes it easier to test functions that call open() multiple times.
+
+ Args:
+ func: The callable to invoke once mock files are setup.
+ side_effects: A list of return values for each file to return once read.
+ Length of list should be equal to the number calls to open in |func|.
+ **kwargs: Keyword arguments to be passed to |func|.
+
+ Returns:
+ A tuple containing the return value of |func| and the MagicMock object used
+ to mock all calls to open respectively.
+ """
+ mock_open = mock.mock_open()
+ mock_open.side_effect = [mock.mock_open(read_data=side_effect).return_value
+ for side_effect in side_effects]
+ with mock.patch('__builtin__.open', mock_open):
+ return func(**kwargs), mock_open
+
+
+if __name__ == '__main__':
+ # Suppress logging messages.
+ unittest.main(buffer=True)
diff --git a/deps/v8/build/android/empty/.keep b/deps/v8/build/android/empty/.keep
new file mode 100644
index 0000000000..1fd038b8cf
--- /dev/null
+++ b/deps/v8/build/android/empty/.keep
@@ -0,0 +1,2 @@
+# This empty res folder can be passed to aapt while building Java libraries or
+# APKs that don't have any resources.
diff --git a/deps/v8/build/android/empty_proguard.flags b/deps/v8/build/android/empty_proguard.flags
new file mode 100644
index 0000000000..53484fe815
--- /dev/null
+++ b/deps/v8/build/android/empty_proguard.flags
@@ -0,0 +1 @@
+# Used for apk targets that do not need proguard. See build/java_apk.gypi.
diff --git a/deps/v8/build/android/envsetup.sh b/deps/v8/build/android/envsetup.sh
new file mode 100755
index 0000000000..49041a4e70
--- /dev/null
+++ b/deps/v8/build/android/envsetup.sh
@@ -0,0 +1,29 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Adds Android SDK tools and related helpers to PATH, useful for development.
+# Not used on bots, nor required for any commands to succeed.
+# Use like: source build/android/envsetup.sh
+
+# Make sure we're being sourced.
+if [[ -n "$BASH_VERSION" && "${BASH_SOURCE:-$0}" == "$0" ]]; then
+ echo "ERROR: envsetup must be sourced."
+ exit 1
+fi
+
+# This only exists to set local variables. Don't call this manually.
+android_envsetup_main() {
+ local SCRIPT_PATH="$1"
+ local SCRIPT_DIR="$(dirname "$SCRIPT_PATH")"
+ local CHROME_SRC="$(readlink -f "${SCRIPT_DIR}/../../")"
+ local ANDROID_SDK_ROOT="${CHROME_SRC}/third_party/android_sdk/public"
+
+ export PATH=$PATH:${ANDROID_SDK_ROOT}/platform-tools
+ export PATH=$PATH:${ANDROID_SDK_ROOT}/tools/
+ export PATH=$PATH:${CHROME_SRC}/build/android
+}
+# In zsh, $0 is the name of the file being sourced.
+android_envsetup_main "${BASH_SOURCE:-$0}"
+unset -f android_envsetup_main
diff --git a/deps/v8/build/android/generate_emma_html.py b/deps/v8/build/android/generate_emma_html.py
new file mode 100755
index 0000000000..dab3992c45
--- /dev/null
+++ b/deps/v8/build/android/generate_emma_html.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Aggregates EMMA coverage files to produce html output."""
+
+import fnmatch
+import json
+import optparse
+import os
+import sys
+
+import devil_chromium
+from devil.utils import cmd_helper
+from pylib import constants
+from pylib.constants import host_paths
+
+
+def _GetFilesWithExt(root_dir, ext):
+ """Gets all files with a given extension.
+
+ Args:
+ root_dir: Directory in which to search for files.
+ ext: Extension to look for (including dot)
+
+ Returns:
+ A list of absolute paths to files that match.
+ """
+ files = []
+ for root, _, filenames in os.walk(root_dir):
+ basenames = fnmatch.filter(filenames, '*.' + ext)
+ files.extend([os.path.join(root, basename)
+ for basename in basenames])
+
+ return files
+
+
+def main():
+ option_parser = optparse.OptionParser()
+ option_parser.add_option('--output', help='HTML output filename.')
+ option_parser.add_option('--coverage-dir', default=None,
+ help=('Root of the directory in which to search for '
+ 'coverage data (.ec) files.'))
+ option_parser.add_option('--metadata-dir', default=None,
+ help=('Root of the directory in which to search for '
+ 'coverage metadata (.em) files.'))
+ option_parser.add_option('--cleanup', action='store_true',
+ help=('If set, removes coverage files generated at '
+ 'runtime.'))
+ options, _ = option_parser.parse_args()
+
+ devil_chromium.Initialize()
+
+ if not (options.coverage_dir and options.metadata_dir and options.output):
+ option_parser.error('One or more mandatory options are missing.')
+
+ coverage_files = _GetFilesWithExt(options.coverage_dir, 'ec')
+ metadata_files = _GetFilesWithExt(options.metadata_dir, 'em')
+ # Filter out zero-length files. These are created by emma_instr.py when a
+ # target has no classes matching the coverage filter.
+ metadata_files = [f for f in metadata_files if os.path.getsize(f)]
+ print 'Found coverage files: %s' % str(coverage_files)
+ print 'Found metadata files: %s' % str(metadata_files)
+
+ sources = []
+ for f in metadata_files:
+ sources_file = os.path.splitext(f)[0] + '_sources.txt'
+ with open(sources_file, 'r') as sf:
+ sources.extend(json.load(sf))
+
+ # Source paths should be passed to EMMA in a way that the relative file paths
+ # reflect the class package name.
+ PARTIAL_PACKAGE_NAMES = ['com/google', 'org/chromium', 'com/chrome']
+ fixed_source_paths = set()
+
+ for path in sources:
+ for partial in PARTIAL_PACKAGE_NAMES:
+ if partial in path:
+ fixed_path = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, path[:path.index(partial)])
+ fixed_source_paths.add(fixed_path)
+ break
+
+ sources = list(fixed_source_paths)
+
+ input_args = []
+ for f in coverage_files + metadata_files:
+ input_args.append('-in')
+ input_args.append(f)
+
+ output_args = ['-Dreport.html.out.file', options.output,
+ '-Dreport.html.out.encoding', 'UTF-8']
+ source_args = ['-sp', ','.join(sources)]
+
+ exit_code = cmd_helper.RunCmd(
+ ['java', '-cp',
+ os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'lib', 'emma.jar'),
+ 'emma', 'report', '-r', 'html']
+ + input_args + output_args + source_args)
+
+ if options.cleanup:
+ for f in coverage_files:
+ os.remove(f)
+
+ # Command tends to exit with status 0 when it actually failed.
+ if not exit_code and not os.path.exists(options.output):
+ exit_code = 1
+
+ return exit_code
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gradle/AndroidManifest.xml b/deps/v8/build/android/gradle/AndroidManifest.xml
new file mode 100644
index 0000000000..f3e50e0c93
--- /dev/null
+++ b/deps/v8/build/android/gradle/AndroidManifest.xml
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ Copyright 2018 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+-->
+
+<!--
+ This is a dummy manifest which is required by Android Studio's _all target.
+ No <uses-sdk> is allowed due to https://crbug.com/841529.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="org.dummy">
+</manifest>
diff --git a/deps/v8/build/android/gradle/OWNERS b/deps/v8/build/android/gradle/OWNERS
new file mode 100644
index 0000000000..d1f94845f4
--- /dev/null
+++ b/deps/v8/build/android/gradle/OWNERS
@@ -0,0 +1,4 @@
+agrieve@chromium.org
+wnwen@chromium.org
+
+# COMPONENT: Build
diff --git a/deps/v8/build/android/gradle/android.jinja b/deps/v8/build/android/gradle/android.jinja
new file mode 100644
index 0000000000..4a7da2961e
--- /dev/null
+++ b/deps/v8/build/android/gradle/android.jinja
@@ -0,0 +1,114 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+{% macro expand_sourceset(variables, prefix) %}
+{% if variables is defined %}
+ {{ prefix }} {
+{% if variables.android_manifest is defined %}
+ manifest.srcFile "{{ variables.android_manifest }}"
+{% endif %}
+{% if variables.java_dirs is defined %}
+ java.srcDirs = [
+{% for path in variables.java_dirs %}
+ "{{ path }}",
+{% endfor %}
+ ]
+{% endif %}
+{% if variables.java_excludes is defined %}
+ java.filter.exclude(
+{% for path in variables.java_excludes %}
+ "{{ path }}",
+{% endfor %}
+ )
+{% endif %}
+{% if variables.jni_libs is defined %}
+ jniLibs.srcDirs = [
+{% for path in variables.jni_libs %}
+ "{{ path }}",
+{% endfor %}
+ ]
+{% endif %}
+{% if variables.res_dirs is defined %}
+ res.srcDirs = [
+{% for path in variables.res_dirs %}
+ "{{ path }}",
+{% endfor %}
+ ]
+{% endif %}
+ }
+{% endif %}
+{% endmacro %}
+// Generated by //build/android/generate_gradle.py
+
+{% if template_type in ('android_library', 'android_junit') %}
+apply plugin: "com.android.library"
+{% elif template_type == 'android_apk' %}
+apply plugin: "com.android.application"
+{% endif %}
+
+android {
+ compileSdkVersion "{{ compile_sdk_version }}"
+
+ defaultConfig {
+ vectorDrawables.useSupportLibrary = true
+ minSdkVersion 19
+ targetSdkVersion {{ target_sdk_version }}
+ }
+
+ compileOptions {
+ sourceCompatibility JavaVersion.VERSION_1_8
+ targetCompatibility JavaVersion.VERSION_1_8
+ }
+
+{% if native is defined %}
+ externalNativeBuild {
+ cmake {
+ path "CMakeLists.txt"
+ }
+ }
+{% endif %}
+
+ sourceSets {
+{% for name in ['main', 'test', 'androidTest', 'debug', 'release'] %}
+ {{ name }} {
+ aidl.srcDirs = []
+ assets.srcDirs = []
+ java.srcDirs = []
+ jni.srcDirs = []
+ renderscript.srcDirs = []
+ res.srcDirs = []
+ resources.srcDirs = []
+ }
+{% endfor %}
+
+{{ expand_sourceset(main, 'main') }}
+{{ expand_sourceset(test, 'test') }}
+{% if android_test is defined %}
+{% for t in android_test %}
+{{ expand_sourceset(t, 'androidTest') }}
+{% endfor %}
+{% endif %}
+ }
+}
+
+{% include 'dependencies.jinja' %}
+
+afterEvaluate {
+ def tasksToDisable = tasks.findAll {
+ return (it.name.equals('generateDebugSources') // causes unwanted AndroidManifest.java
+ || it.name.equals('generateReleaseSources')
+ || it.name.endsWith('BuildConfig') // causes unwanted BuildConfig.java
+ || it.name.equals('preDebugAndroidTestBuild')
+{% if not use_gradle_process_resources %}
+ || it.name.endsWith('Assets')
+ || it.name.endsWith('Resources')
+ || it.name.endsWith('ResValues')
+{% endif %}
+ || it.name.endsWith('Aidl')
+ || it.name.endsWith('Renderscript')
+ || it.name.endsWith('Shaders'))
+ }
+ tasksToDisable.each { Task task ->
+ task.enabled = false
+ }
+}
diff --git a/deps/v8/build/android/gradle/cmake.jinja b/deps/v8/build/android/gradle/cmake.jinja
new file mode 100644
index 0000000000..996a5218f6
--- /dev/null
+++ b/deps/v8/build/android/gradle/cmake.jinja
@@ -0,0 +1,26 @@
+{# Copyright 2018 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+# Generated by //build/android/generate_gradle.py
+
+cmake_minimum_required(VERSION 3.4.1)
+
+project(chrome C CXX)
+
+{% if native.includes is defined %}
+include_directories(
+{% for path in native.includes %}
+ {{ path }}
+{% endfor %}
+)
+{% endif %}
+
+{% for name, target in native.targets.iteritems() %}
+{% if target.sources is defined %}
+add_library("{{ name }}"
+{% for path in target.sources %}
+ {{ path }}
+{% endfor %}
+)
+{% endif %}
+{% endfor %}
diff --git a/deps/v8/build/android/gradle/dependencies.jinja b/deps/v8/build/android/gradle/dependencies.jinja
new file mode 100644
index 0000000000..87bc312853
--- /dev/null
+++ b/deps/v8/build/android/gradle/dependencies.jinja
@@ -0,0 +1,28 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+{% macro expand_deps(variables, prefix) %}
+{% if variables is defined %}
+{% if variables.prebuilts is defined %}
+{% for path in variables.prebuilts %}
+ {{ prefix }} files("{{ path }}")
+{% endfor %}
+{% endif %}
+{% if variables.java_project_deps is defined %}
+{% for proj in variables.java_project_deps %}
+ {{ prefix }} project(":{{ proj }}")
+{% endfor %}
+{% endif %}
+{% if variables.android_project_deps is defined %}
+{% for proj in variables.android_project_deps %}
+ {{ prefix }} project(path: ":{{ proj }}")
+{% endfor %}
+{% endif %}
+{% endif %}
+{% endmacro %}
+
+dependencies {
+{{ expand_deps(main, 'implementation') }}
+{{ expand_deps(test, 'testImplementation') }}
+{{ expand_deps(android_test, 'androidTestImplementation') }}
+}
diff --git a/deps/v8/build/android/gradle/generate_gradle.py b/deps/v8/build/android/gradle/generate_gradle.py
new file mode 100755
index 0000000000..f2bcec5e80
--- /dev/null
+++ b/deps/v8/build/android/gradle/generate_gradle.py
@@ -0,0 +1,974 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates an Android Studio project from a GN target."""
+
+import argparse
+import codecs
+import collections
+import glob
+import json
+import logging
+import os
+import re
+import shutil
+import subprocess
+import sys
+import zipfile
+
+_BUILD_ANDROID = os.path.join(os.path.dirname(__file__), os.pardir)
+sys.path.append(_BUILD_ANDROID)
+import devil_chromium
+from devil.utils import run_tests_helper
+from pylib import constants
+from pylib.constants import host_paths
+
+sys.path.append(os.path.join(_BUILD_ANDROID, 'gyp'))
+import jinja_template
+from util import build_utils
+
+_DEPOT_TOOLS_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party',
+ 'depot_tools')
+_DEFAULT_ANDROID_MANIFEST_PATH = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'gradle',
+ 'AndroidManifest.xml')
+_FILE_DIR = os.path.dirname(__file__)
+_SRCJARS_SUBDIR = 'extracted-srcjars'
+_JNI_LIBS_SUBDIR = 'symlinked-libs'
+_ARMEABI_SUBDIR = 'armeabi'
+_RES_SUBDIR = 'extracted-res'
+_GRADLE_BUILD_FILE = 'build.gradle'
+_CMAKE_FILE = 'CMakeLists.txt'
+# This needs to come first alphabetically among all modules.
+_MODULE_ALL = '_all'
+_SRC_INTERNAL = os.path.join(
+ os.path.dirname(host_paths.DIR_SOURCE_ROOT), 'src-internal')
+
+_DEFAULT_TARGETS = [
+ '//android_webview/test/embedded_test_server:aw_net_test_support_apk',
+ '//android_webview/test:webview_instrumentation_apk',
+ '//android_webview/test:webview_instrumentation_test_apk',
+ '//base:base_junit_tests',
+ '//chrome/android:chrome_junit_tests',
+ '//chrome/android:chrome_public_apk',
+ '//chrome/android:chrome_public_test_apk',
+ '//content/public/android:content_junit_tests',
+ '//content/shell/android:content_shell_apk',
+ # Below must be included even with --all since they are libraries.
+ '//base/android/jni_generator:jni_processor',
+ '//tools/android/errorprone_plugin:errorprone_plugin_java',
+]
+
+_EXCLUDED_PREBUILT_JARS = [
+ # Android Studio already provides Desugar runtime.
+ # Including it would cause linking error because of a duplicate class.
+ 'lib.java/third_party/bazel/desugar/Desugar-runtime.jar'
+]
+
+
+def _TemplatePath(name):
+ return os.path.join(_FILE_DIR, '{}.jinja'.format(name))
+
+
+def _RebasePath(path_or_list, new_cwd=None, old_cwd=None):
+ """Makes the given path(s) relative to new_cwd, or absolute if not specified.
+
+ If new_cwd is not specified, absolute paths are returned.
+ If old_cwd is not specified, constants.GetOutDirectory() is assumed.
+ """
+ if path_or_list is None:
+ return []
+ if not isinstance(path_or_list, basestring):
+ return [_RebasePath(p, new_cwd, old_cwd) for p in path_or_list]
+ if old_cwd is None:
+ old_cwd = constants.GetOutDirectory()
+ old_cwd = os.path.abspath(old_cwd)
+ if new_cwd:
+ new_cwd = os.path.abspath(new_cwd)
+ return os.path.relpath(os.path.join(old_cwd, path_or_list), new_cwd)
+ return os.path.abspath(os.path.join(old_cwd, path_or_list))
+
+
+def _IsSubpathOf(child, parent):
+ """Returns whether |child| is a subpath of |parent|."""
+ return not os.path.relpath(child, parent).startswith(os.pardir)
+
+
+def _WriteFile(path, data):
+ """Writes |data| to |path|, constucting parent directories if necessary."""
+ logging.info('Writing %s', path)
+ dirname = os.path.dirname(path)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+ with codecs.open(path, 'w', 'utf-8') as output_file:
+ output_file.write(data)
+
+
+def _ReadPropertiesFile(path):
+ with open(path) as f:
+ return dict(l.rstrip().split('=', 1) for l in f if '=' in l)
+
+
+def _RunGnGen(output_dir, args=None):
+ cmd = [
+ os.path.join(_DEPOT_TOOLS_PATH, 'gn'),
+ 'gen',
+ output_dir,
+ ]
+ if args:
+ cmd.extend(args)
+ logging.info('Running: %r', cmd)
+ subprocess.check_call(cmd)
+
+
+def _RunNinja(output_dir, args, j):
+ cmd = [
+ os.path.join(_DEPOT_TOOLS_PATH, 'ninja'),
+ '-C',
+ output_dir,
+ '-j{}'.format(j),
+ ]
+ cmd.extend(args)
+ logging.info('Running: %r', cmd)
+ subprocess.check_call(cmd)
+
+
+def _QueryForAllGnTargets(output_dir):
+ # Query ninja rather than GN since it's faster.
+ cmd = [
+ os.path.join(_DEPOT_TOOLS_PATH, 'ninja'),
+ '-C',
+ output_dir,
+ '-t',
+ 'targets',
+ ]
+ logging.info('Running: %r', cmd)
+ ninja_output = build_utils.CheckOutput(cmd)
+ ret = []
+ SUFFIX_LEN = len('__build_config_crbug_908819')
+ for line in ninja_output.splitlines():
+ ninja_target = line.rsplit(':', 1)[0]
+ # Ignore root aliases by ensure a : exists.
+ if ':' in ninja_target and ninja_target.endswith(
+ '__build_config_crbug_908819'):
+ ret.append('//' + ninja_target[:-SUFFIX_LEN])
+ return ret
+
+
+class _ProjectEntry(object):
+ """Helper class for project entries."""
+
+ _cached_entries = {}
+
+ def __init__(self, gn_target):
+ # Use _ProjectEntry.FromGnTarget instead for caching.
+ self._gn_target = gn_target
+ self._build_config = None
+ self._java_files = None
+ self._all_entries = None
+ self.android_test_entries = []
+
+ @classmethod
+ def FromGnTarget(cls, gn_target):
+ assert gn_target.startswith('//'), gn_target
+ if ':' not in gn_target:
+ gn_target = '%s:%s' % (gn_target, os.path.basename(gn_target))
+ if gn_target not in cls._cached_entries:
+ cls._cached_entries[gn_target] = cls(gn_target)
+ return cls._cached_entries[gn_target]
+
+ @classmethod
+ def FromBuildConfigPath(cls, path):
+ prefix = 'gen/'
+ suffix = '.build_config'
+ assert path.startswith(prefix) and path.endswith(suffix), path
+ subdir = path[len(prefix):-len(suffix)]
+ gn_target = '//%s:%s' % (os.path.split(subdir))
+ return cls.FromGnTarget(gn_target)
+
+ def __hash__(self):
+ return hash(self._gn_target)
+
+ def __eq__(self, other):
+ return self._gn_target == other.GnTarget()
+
+ def GnTarget(self):
+ return self._gn_target
+
+ def NinjaTarget(self):
+ return self._gn_target[2:]
+
+ def GnBuildConfigTarget(self):
+ return '%s__build_config_crbug_908819' % self._gn_target
+
+ def NinjaBuildConfigTarget(self):
+ return '%s__build_config_crbug_908819' % self.NinjaTarget()
+
+ def GradleSubdir(self):
+ """Returns the output subdirectory."""
+ ninja_target = self.NinjaTarget()
+ # Support targets at the root level. e.g. //:foo
+ if ninja_target[0] == ':':
+ ninja_target = ninja_target[1:]
+ return ninja_target.replace(':', os.path.sep)
+
+ def ProjectName(self):
+ """Returns the Gradle project name."""
+ return self.GradleSubdir().replace(os.path.sep, '.')
+
+ def BuildConfig(self):
+ """Reads and returns the project's .build_config JSON."""
+ if not self._build_config:
+ path = os.path.join('gen', self.GradleSubdir() + '.build_config')
+ with open(_RebasePath(path)) as jsonfile:
+ self._build_config = json.load(jsonfile)
+ return self._build_config
+
+ def DepsInfo(self):
+ return self.BuildConfig()['deps_info']
+
+ def Gradle(self):
+ return self.BuildConfig()['gradle']
+
+ def Javac(self):
+ return self.BuildConfig()['javac']
+
+ def GetType(self):
+ """Returns the target type from its .build_config."""
+ return self.DepsInfo()['type']
+
+ def IsValid(self):
+ return self.GetType() in (
+ 'android_apk',
+ 'java_library',
+ "java_annotation_processor",
+ 'java_binary',
+ 'junit_binary',
+ )
+
+ def ResZips(self):
+ return self.DepsInfo().get('owned_resources_zips', [])
+
+ def ResDirs(self):
+ return self.DepsInfo().get('owned_resources_dirs', [])
+
+ def JavaFiles(self):
+ if self._java_files is None:
+ java_sources_file = self.DepsInfo().get('java_sources_file')
+ java_files = []
+ if java_sources_file:
+ java_sources_file = _RebasePath(java_sources_file)
+ java_files = build_utils.ReadSourcesList(java_sources_file)
+ self._java_files = java_files
+ return self._java_files
+
+ def GeneratedJavaFiles(self):
+ return [p for p in self.JavaFiles() if not p.startswith('..')]
+
+ def PrebuiltJars(self):
+ all_jars = self.Gradle().get('dependent_prebuilt_jars', [])
+ return [i for i in all_jars if i not in _EXCLUDED_PREBUILT_JARS]
+
+ def AllEntries(self):
+ """Returns a list of all entries that the current entry depends on.
+
+ This includes the entry itself to make iterating simpler."""
+ if self._all_entries is None:
+ logging.debug('Generating entries for %s', self.GnTarget())
+ deps = [_ProjectEntry.FromBuildConfigPath(p)
+ for p in self.Gradle()['dependent_android_projects']]
+ deps.extend(_ProjectEntry.FromBuildConfigPath(p)
+ for p in self.Gradle()['dependent_java_projects'])
+ all_entries = set()
+ for dep in deps:
+ all_entries.update(dep.AllEntries())
+ all_entries.add(self)
+ self._all_entries = list(all_entries)
+ return self._all_entries
+
+
+class _ProjectContextGenerator(object):
+ """Helper class to generate gradle build files"""
+ def __init__(self, project_dir, build_vars, use_gradle_process_resources,
+ jinja_processor, split_projects, channel):
+ self.project_dir = project_dir
+ self.build_vars = build_vars
+ self.use_gradle_process_resources = use_gradle_process_resources
+ self.jinja_processor = jinja_processor
+ self.split_projects = split_projects
+ self.channel = channel
+ self.processed_java_dirs = set()
+ self.processed_prebuilts = set()
+ self.processed_res_dirs = set()
+
+ def _GenJniLibs(self, root_entry):
+ libraries = []
+ for entry in self._GetEntries(root_entry):
+ libraries += entry.BuildConfig().get('native', {}).get('libraries', [])
+ if libraries:
+ return _CreateJniLibsDir(constants.GetOutDirectory(),
+ self.EntryOutputDir(root_entry), libraries)
+ return []
+
+ def _GenJavaDirs(self, root_entry):
+ java_files = []
+ for entry in self._GetEntries(root_entry):
+ java_files += entry.JavaFiles()
+ java_dirs, excludes = _ComputeJavaSourceDirsAndExcludes(
+ constants.GetOutDirectory(), java_files)
+ return java_dirs, excludes
+
+ def _GenCustomManifest(self, entry):
+ """Returns the path to the generated AndroidManifest.xml.
+
+ Gradle uses package id from manifest when generating R.class. So, we need
+ to generate a custom manifest if we let gradle process resources. We cannot
+ simply set android.defaultConfig.applicationId because it is not supported
+ for library targets."""
+ resource_packages = entry.Javac().get('resource_packages')
+ if not resource_packages:
+ logging.debug('Target ' + entry.GnTarget() + ' includes resources from '
+ 'unknown package. Unable to process with gradle.')
+ return _DEFAULT_ANDROID_MANIFEST_PATH
+ elif len(resource_packages) > 1:
+ logging.debug('Target ' + entry.GnTarget() + ' includes resources from '
+ 'multiple packages. Unable to process with gradle.')
+ return _DEFAULT_ANDROID_MANIFEST_PATH
+
+ variables = {'package': resource_packages[0]}
+ data = self.jinja_processor.Render(_TemplatePath('manifest'), variables)
+ output_file = os.path.join(
+ self.EntryOutputDir(entry), 'AndroidManifest.xml')
+ _WriteFile(output_file, data)
+
+ return output_file
+
+ def _Relativize(self, entry, paths):
+ return _RebasePath(paths, self.EntryOutputDir(entry))
+
+ def _Srcjars(self, entry):
+ srcjars = _RebasePath(entry.Gradle().get('bundled_srcjars', []))
+ if not self.use_gradle_process_resources:
+ srcjars += _RebasePath(entry.DepsInfo().get('owned_resource_srcjars', []))
+ return srcjars
+
+ def _GetEntries(self, entry):
+ if self.split_projects:
+ return [entry]
+ return entry.AllEntries()
+
+ def EntryOutputDir(self, entry):
+ return os.path.join(self.project_dir, entry.GradleSubdir())
+
+ def AllSrcjars(self, root_entry):
+ srcjars = []
+ for entry in self._GetEntries(root_entry):
+ srcjars += self._Srcjars(entry)
+ return set(srcjars)
+
+ def AllResZips(self, root_entry):
+ res_zips = []
+ for entry in self._GetEntries(root_entry):
+ res_zips += entry.ResZips()
+ return set(_RebasePath(res_zips))
+
+ def GeneratedInputs(self, root_entry, fast=None):
+ generated_inputs = set()
+ if not fast:
+ generated_inputs.update(self.AllResZips(root_entry))
+ generated_inputs.update(self.AllSrcjars(root_entry))
+ for entry in self._GetEntries(root_entry):
+ generated_inputs.update(entry.GeneratedJavaFiles())
+ generated_inputs.update(entry.PrebuiltJars())
+ return generated_inputs
+
+ def GeneratedZips(self, root_entry, fast=None):
+ entry_output_dir = self.EntryOutputDir(root_entry)
+ tuples = []
+ if not fast:
+ tuples.extend((s, os.path.join(entry_output_dir, _SRCJARS_SUBDIR))
+ for s in self.AllSrcjars(root_entry))
+ tuples.extend((s, os.path.join(entry_output_dir, _RES_SUBDIR))
+ for s in self.AllResZips(root_entry))
+ return tuples
+
+ def GenerateManifest(self, root_entry):
+ android_manifest = root_entry.DepsInfo().get('android_manifest')
+ if not android_manifest:
+ android_manifest = self._GenCustomManifest(root_entry)
+ return self._Relativize(root_entry, android_manifest)
+
+ def Generate(self, root_entry):
+ # TODO(agrieve): Add an option to use interface jars and see if that speeds
+ # things up at all.
+ variables = {}
+ java_dirs, excludes = self._GenJavaDirs(root_entry)
+ java_dirs.append(
+ os.path.join(self.EntryOutputDir(root_entry), _SRCJARS_SUBDIR))
+ self.processed_java_dirs.update(java_dirs)
+ java_dirs.sort()
+ variables['java_dirs'] = self._Relativize(root_entry, java_dirs)
+ variables['java_excludes'] = excludes
+ variables['jni_libs'] = self._Relativize(
+ root_entry, set(self._GenJniLibs(root_entry)))
+ prebuilts = set(
+ p for e in self._GetEntries(root_entry) for p in e.PrebuiltJars())
+ self.processed_prebuilts.update(prebuilts)
+ variables['prebuilts'] = self._Relativize(root_entry, prebuilts)
+ res_dirs = set(
+ p for e in self._GetEntries(root_entry) for p in e.ResDirs())
+ # Do not add generated resources for the all module since it creates many
+ # duplicates, and currently resources are only used for editing.
+ self.processed_res_dirs.update(res_dirs)
+ res_dirs.add(
+ os.path.join(self.EntryOutputDir(root_entry), _RES_SUBDIR))
+ variables['res_dirs'] = self._Relativize(root_entry, res_dirs)
+ if self.split_projects:
+ deps = [_ProjectEntry.FromBuildConfigPath(p)
+ for p in root_entry.Gradle()['dependent_android_projects']]
+ variables['android_project_deps'] = [d.ProjectName() for d in deps]
+ deps = [_ProjectEntry.FromBuildConfigPath(p)
+ for p in root_entry.Gradle()['dependent_java_projects']]
+ variables['java_project_deps'] = [d.ProjectName() for d in deps]
+ return variables
+
+
+def _ComputeJavaSourceDirs(java_files):
+ """Returns a dictionary of source dirs with each given files in one."""
+ found_roots = {}
+ for path in java_files:
+ path_root = path
+ # Recognize these tokens as top-level.
+ while True:
+ path_root = os.path.dirname(path_root)
+ basename = os.path.basename(path_root)
+ assert basename, 'Failed to find source dir for ' + path
+ if basename in ('java', 'src'):
+ break
+ if basename in ('javax', 'org', 'com'):
+ path_root = os.path.dirname(path_root)
+ break
+ if path_root not in found_roots:
+ found_roots[path_root] = []
+ found_roots[path_root].append(path)
+ return found_roots
+
+
+def _ComputeExcludeFilters(wanted_files, unwanted_files, parent_dir):
+ """Returns exclude patters to exclude unwanted files but keep wanted files.
+
+ - Shortens exclude list by globbing if possible.
+ - Exclude patterns are relative paths from the parent directory.
+ """
+ excludes = []
+ files_to_include = set(wanted_files)
+ files_to_exclude = set(unwanted_files)
+ while files_to_exclude:
+ unwanted_file = files_to_exclude.pop()
+ target_exclude = os.path.join(
+ os.path.dirname(unwanted_file), '*.java')
+ found_files = set(glob.glob(target_exclude))
+ valid_files = found_files & files_to_include
+ if valid_files:
+ excludes.append(os.path.relpath(unwanted_file, parent_dir))
+ else:
+ excludes.append(os.path.relpath(target_exclude, parent_dir))
+ files_to_exclude -= found_files
+ return excludes
+
+
+def _ComputeJavaSourceDirsAndExcludes(output_dir, java_files):
+ """Computes the list of java source directories and exclude patterns.
+
+ 1. Computes the root java source directories from the list of files.
+ 2. Compute exclude patterns that exclude all extra files only.
+ 3. Returns the list of java source directories and exclude patterns.
+ """
+ java_dirs = []
+ excludes = []
+ if java_files:
+ java_files = _RebasePath(java_files)
+ computed_dirs = _ComputeJavaSourceDirs(java_files)
+ java_dirs = computed_dirs.keys()
+ all_found_java_files = set()
+
+ for directory, files in computed_dirs.iteritems():
+ found_java_files = build_utils.FindInDirectory(directory, '*.java')
+ all_found_java_files.update(found_java_files)
+ unwanted_java_files = set(found_java_files) - set(files)
+ if unwanted_java_files:
+ logging.debug('Directory requires excludes: %s', directory)
+ excludes.extend(
+ _ComputeExcludeFilters(files, unwanted_java_files, directory))
+
+ missing_java_files = set(java_files) - all_found_java_files
+ # Warn only about non-generated files that are missing.
+ missing_java_files = [p for p in missing_java_files
+ if not p.startswith(output_dir)]
+ if missing_java_files:
+ logging.warning(
+ 'Some java files were not found: %s', missing_java_files)
+
+ return java_dirs, excludes
+
+
+def _CreateRelativeSymlink(target_path, link_path):
+ link_dir = os.path.dirname(link_path)
+ relpath = os.path.relpath(target_path, link_dir)
+ logging.debug('Creating symlink %s -> %s', link_path, relpath)
+ os.symlink(relpath, link_path)
+
+
+def _CreateJniLibsDir(output_dir, entry_output_dir, so_files):
+ """Creates directory with symlinked .so files if necessary.
+
+ Returns list of JNI libs directories."""
+
+ if so_files:
+ symlink_dir = os.path.join(entry_output_dir, _JNI_LIBS_SUBDIR)
+ shutil.rmtree(symlink_dir, True)
+ abi_dir = os.path.join(symlink_dir, _ARMEABI_SUBDIR)
+ if not os.path.exists(abi_dir):
+ os.makedirs(abi_dir)
+ for so_file in so_files:
+ target_path = os.path.join(output_dir, so_file)
+ symlinked_path = os.path.join(abi_dir, so_file)
+ _CreateRelativeSymlink(target_path, symlinked_path)
+
+ return [symlink_dir]
+
+ return []
+
+
+def _GenerateLocalProperties(sdk_dir):
+ """Returns the data for project.properties as a string."""
+ return '\n'.join([
+ '# Generated by //build/android/gradle/generate_gradle.py',
+ 'sdk.dir=%s' % sdk_dir,
+ ''])
+
+
+def _GenerateBaseVars(generator, build_vars):
+ variables = {}
+ variables['compile_sdk_version'] = (
+ 'android-%s' % build_vars['compile_sdk_version'])
+ target_sdk_version = build_vars['android_sdk_version']
+ if target_sdk_version.isalpha():
+ target_sdk_version = '"{}"'.format(target_sdk_version)
+ variables['target_sdk_version'] = target_sdk_version
+ variables['use_gradle_process_resources'] = (
+ generator.use_gradle_process_resources)
+ variables['channel'] = generator.channel
+ return variables
+
+
+def _GenerateGradleFile(entry, generator, build_vars, jinja_processor):
+ """Returns the data for a project's build.gradle."""
+ deps_info = entry.DepsInfo()
+ variables = _GenerateBaseVars(generator, build_vars)
+ sourceSetName = 'main'
+
+ if deps_info['type'] == 'android_apk':
+ target_type = 'android_apk'
+ elif deps_info['type'] in ('java_library', 'java_annotation_processor'):
+ is_prebuilt = deps_info.get('is_prebuilt', False)
+ gradle_treat_as_prebuilt = deps_info.get('gradle_treat_as_prebuilt', False)
+ if is_prebuilt or gradle_treat_as_prebuilt:
+ return None
+ elif deps_info['requires_android']:
+ target_type = 'android_library'
+ else:
+ target_type = 'java_library'
+ elif deps_info['type'] == 'java_binary':
+ target_type = 'java_binary'
+ variables['main_class'] = deps_info.get('main_class')
+ elif deps_info['type'] == 'junit_binary':
+ target_type = 'android_junit'
+ sourceSetName = 'test'
+ else:
+ return None
+
+ variables['target_name'] = os.path.splitext(deps_info['name'])[0]
+ variables['template_type'] = target_type
+ variables['main'] = {}
+ variables[sourceSetName] = generator.Generate(entry)
+ variables['main']['android_manifest'] = generator.GenerateManifest(entry)
+
+ if entry.android_test_entries:
+ variables['android_test'] = []
+ for e in entry.android_test_entries:
+ test_entry = generator.Generate(e)
+ test_entry['android_manifest'] = generator.GenerateManifest(e)
+ variables['android_test'].append(test_entry)
+ for key, value in test_entry.iteritems():
+ if isinstance(value, list):
+ test_entry[key] = sorted(set(value) - set(variables['main'][key]))
+
+ return jinja_processor.Render(
+ _TemplatePath(target_type.split('_')[0]), variables)
+
+
+def _IsTestDir(path):
+ return ('javatests/' in path or
+ 'junit/' in path or
+ 'test/' in path or
+ 'testing/' in path)
+
+
+# Example: //chrome/android:monochrome
+def _GetNative(relative_func, target_names):
+ out_dir = constants.GetOutDirectory()
+ with open(os.path.join(out_dir, 'project.json'), 'r') as project_file:
+ projects = json.load(project_file)
+ project_targets = projects['targets']
+ root_dir = projects['build_settings']['root_path']
+ targets = {}
+ includes = set()
+ def process_paths(paths):
+ # Ignores leading //
+ return relative_func(
+ sorted(os.path.join(root_dir, path[2:]) for path in paths))
+ for target_name in target_names:
+ target = project_targets[target_name]
+ includes.update(target.get('include_dirs', []))
+ sources = [f for f in target.get('sources', []) if f.endswith('.cc')]
+ if sources:
+ # CMake does not like forward slashes or colons for the target name.
+ filtered_name = target_name.replace('/', '.').replace(':', '-')
+ targets[filtered_name] = {
+ 'sources': process_paths(sources),
+ }
+ return {
+ 'targets': targets,
+ 'includes': process_paths(includes),
+ }
+
+
+def _GenerateModuleAll(gradle_output_dir, generator, build_vars,
+ jinja_processor, native_targets):
+ """Returns the data for a pseudo build.gradle of all dirs.
+
+ See //docs/android_studio.md for more details."""
+ variables = _GenerateBaseVars(generator, build_vars)
+ target_type = 'android_apk'
+ variables['target_name'] = _MODULE_ALL
+ variables['template_type'] = target_type
+ java_dirs = sorted(generator.processed_java_dirs)
+ prebuilts = sorted(generator.processed_prebuilts)
+ res_dirs = sorted(generator.processed_res_dirs)
+ def Relativize(paths):
+ return _RebasePath(paths, os.path.join(gradle_output_dir, _MODULE_ALL))
+ main_java_dirs = [d for d in java_dirs if not _IsTestDir(d)]
+ test_java_dirs = [d for d in java_dirs if _IsTestDir(d)]
+ variables['main'] = {
+ 'android_manifest': Relativize(_DEFAULT_ANDROID_MANIFEST_PATH),
+ 'java_dirs': Relativize(main_java_dirs),
+ 'prebuilts': Relativize(prebuilts),
+ 'java_excludes': ['**/*.java'],
+ 'res_dirs': Relativize(res_dirs),
+ }
+ variables['android_test'] = [{
+ 'java_dirs': Relativize(test_java_dirs),
+ 'java_excludes': ['**/*.java'],
+ }]
+ if native_targets:
+ variables['native'] = _GetNative(
+ relative_func=Relativize, target_names=native_targets)
+ data = jinja_processor.Render(
+ _TemplatePath(target_type.split('_')[0]), variables)
+ _WriteFile(
+ os.path.join(gradle_output_dir, _MODULE_ALL, _GRADLE_BUILD_FILE), data)
+ if native_targets:
+ cmake_data = jinja_processor.Render(_TemplatePath('cmake'), variables)
+ _WriteFile(
+ os.path.join(gradle_output_dir, _MODULE_ALL, _CMAKE_FILE), cmake_data)
+
+
+def _GenerateRootGradle(jinja_processor, channel):
+ """Returns the data for the root project's build.gradle."""
+ return jinja_processor.Render(_TemplatePath('root'), {'channel': channel})
+
+
+def _GenerateSettingsGradle(project_entries):
+ """Returns the data for settings.gradle."""
+ project_name = os.path.basename(os.path.dirname(host_paths.DIR_SOURCE_ROOT))
+ lines = []
+ lines.append('// Generated by //build/android/gradle/generate_gradle.py')
+ lines.append('rootProject.name = "%s"' % project_name)
+ lines.append('rootProject.projectDir = settingsDir')
+ lines.append('')
+ for name, subdir in project_entries:
+ # Example target:
+ # android_webview:android_webview_java__build_config_crbug_908819
+ lines.append('include ":%s"' % name)
+ lines.append('project(":%s").projectDir = new File(settingsDir, "%s")' %
+ (name, subdir))
+ return '\n'.join(lines)
+
+
+def _ExtractFile(zip_path, extracted_path):
+ logging.info('Extracting %s to %s', zip_path, extracted_path)
+ with zipfile.ZipFile(zip_path) as z:
+ z.extractall(extracted_path)
+
+
+def _ExtractZips(entry_output_dir, zip_tuples):
+ """Extracts all srcjars to the directory given by the tuples."""
+ extracted_paths = set(s[1] for s in zip_tuples)
+ for extracted_path in extracted_paths:
+ assert _IsSubpathOf(extracted_path, entry_output_dir)
+ shutil.rmtree(extracted_path, True)
+
+ for zip_path, extracted_path in zip_tuples:
+ _ExtractFile(zip_path, extracted_path)
+
+
+def _FindAllProjectEntries(main_entries):
+ """Returns the list of all _ProjectEntry instances given the root project."""
+ found = set()
+ to_scan = list(main_entries)
+ while to_scan:
+ cur_entry = to_scan.pop()
+ if cur_entry in found:
+ continue
+ found.add(cur_entry)
+ sub_config_paths = cur_entry.DepsInfo()['deps_configs']
+ to_scan.extend(
+ _ProjectEntry.FromBuildConfigPath(p) for p in sub_config_paths)
+ return list(found)
+
+
+def _CombineTestEntries(entries):
+ """Combines test apks into the androidTest source set of their target.
+
+ - Speeds up android studio
+ - Adds proper dependency between test and apk_under_test
+ - Doesn't work for junit yet due to resulting circular dependencies
+ - e.g. base_junit_tests > base_junit_test_support > base_java
+ """
+ combined_entries = []
+ android_test_entries = collections.defaultdict(list)
+ for entry in entries:
+ target_name = entry.GnTarget()
+ if (target_name.endswith('_test_apk__apk') and
+ 'apk_under_test' in entry.Gradle()):
+ apk_name = entry.Gradle()['apk_under_test']
+ android_test_entries[apk_name].append(entry)
+ else:
+ combined_entries.append(entry)
+ for entry in combined_entries:
+ target_name = entry.DepsInfo()['name']
+ if target_name in android_test_entries:
+ entry.android_test_entries = android_test_entries[target_name]
+ del android_test_entries[target_name]
+ # Add unmatched test entries as individual targets.
+ combined_entries.extend(e for l in android_test_entries.values() for e in l)
+ return combined_entries
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--output-directory',
+ help='Path to the root build directory.')
+ parser.add_argument('-v',
+ '--verbose',
+ dest='verbose_count',
+ default=0,
+ action='count',
+ help='Verbose level')
+ parser.add_argument('--target',
+ dest='targets',
+ action='append',
+ help='GN target to generate project for. Replaces set of '
+ 'default targets. May be repeated.')
+ parser.add_argument('--extra-target',
+ dest='extra_targets',
+ action='append',
+ help='GN target to generate project for, in addition to '
+ 'the default ones. May be repeated.')
+ parser.add_argument('--project-dir',
+ help='Root of the output project.',
+ default=os.path.join('$CHROMIUM_OUTPUT_DIR', 'gradle'))
+ parser.add_argument('--all',
+ action='store_true',
+ help='Include all .java files reachable from any '
+ 'apk/test/binary target. On by default unless '
+ '--split-projects is used (--split-projects can '
+ 'slow down Studio given too many targets).')
+ parser.add_argument('--use-gradle-process-resources',
+ action='store_true',
+ help='Have gradle generate R.java rather than ninja')
+ parser.add_argument('--split-projects',
+ action='store_true',
+ help='Split projects by their gn deps rather than '
+ 'combining all the dependencies of each target')
+ parser.add_argument('--fast',
+ action='store_true',
+ help='Skip generating R.java and other generated files.')
+ parser.add_argument('-j',
+ default=1000 if os.path.exists(_SRC_INTERNAL) else 50,
+ help='Value for number of parallel jobs for ninja')
+ parser.add_argument('--native-target',
+ dest='native_targets',
+ action='append',
+ help='GN native targets to generate for. May be '
+ 'repeated.')
+ parser.add_argument('--compile-sdk-version',
+ type=int,
+ default=0,
+ help='Override compileSdkVersion for android sdk docs. '
+ 'Useful when sources for android_sdk_version is '
+ 'not available in Android Studio.')
+ parser.add_argument(
+ '--sdk-path',
+ default=os.path.expanduser('~/Android/Sdk'),
+ help='The path to use as the SDK root, overrides the '
+ 'default at ~/Android/Sdk.')
+ version_group = parser.add_mutually_exclusive_group()
+ version_group.add_argument('--beta',
+ action='store_true',
+ help='Generate a project that is compatible with '
+ 'Android Studio Beta.')
+ version_group.add_argument('--canary',
+ action='store_true',
+ help='Generate a project that is compatible with '
+ 'Android Studio Canary.')
+ args = parser.parse_args()
+ if args.output_directory:
+ constants.SetOutputDirectory(args.output_directory)
+ constants.CheckOutputDirectory()
+ output_dir = constants.GetOutDirectory()
+ devil_chromium.Initialize(output_directory=output_dir)
+ run_tests_helper.SetLogLevel(args.verbose_count)
+
+ if args.use_gradle_process_resources:
+ assert args.split_projects, (
+ 'Gradle resources does not work without --split-projects.')
+
+ _gradle_output_dir = os.path.abspath(
+ args.project_dir.replace('$CHROMIUM_OUTPUT_DIR', output_dir))
+ logging.warning('Creating project at: %s', _gradle_output_dir)
+
+ # Generate for "all targets" by default when not using --split-projects (too
+ # slow), and when no --target has been explicitly set. "all targets" means all
+ # java targets that are depended on by an apk or java_binary (leaf
+ # java_library targets will not be included).
+ args.all = args.all or (not args.split_projects and not args.targets)
+
+ targets_from_args = set(args.targets or _DEFAULT_TARGETS)
+ if args.extra_targets:
+ targets_from_args.update(args.extra_targets)
+
+ if args.all:
+ if args.native_targets:
+ _RunGnGen(output_dir, ['--ide=json'])
+ elif not os.path.exists(os.path.join(output_dir, 'build.ninja')):
+ _RunGnGen(output_dir)
+ else:
+ # Faster than running "gn gen" in the no-op case.
+ _RunNinja(output_dir, ['build.ninja'], args.j)
+ # Query ninja for all __build_config_crbug_908819 targets.
+ targets = _QueryForAllGnTargets(output_dir)
+ else:
+ assert not args.native_targets, 'Native editing requires --all.'
+ targets = [re.sub(r'_test_apk$', '_test_apk__apk', t)
+ for t in targets_from_args]
+ # Necessary after "gn clean"
+ if not os.path.exists(os.path.join(output_dir, 'build_vars.txt')):
+ _RunGnGen(output_dir)
+
+ build_vars = _ReadPropertiesFile(os.path.join(output_dir, 'build_vars.txt'))
+ jinja_processor = jinja_template.JinjaProcessor(_FILE_DIR)
+ if args.beta:
+ channel = 'beta'
+ elif args.canary:
+ channel = 'canary'
+ else:
+ channel = 'stable'
+ if args.compile_sdk_version:
+ build_vars['compile_sdk_version'] = args.compile_sdk_version
+ else:
+ build_vars['compile_sdk_version'] = build_vars['android_sdk_version']
+ generator = _ProjectContextGenerator(_gradle_output_dir, build_vars,
+ args.use_gradle_process_resources, jinja_processor, args.split_projects,
+ channel)
+
+ main_entries = [_ProjectEntry.FromGnTarget(t) for t in targets]
+
+ logging.warning('Building .build_config files...')
+ _RunNinja(
+ output_dir, [e.NinjaBuildConfigTarget() for e in main_entries], args.j)
+
+ if args.all:
+ # There are many unused libraries, so restrict to those that are actually
+ # used by apks/binaries/tests or that are explicitly mentioned in --targets.
+ main_entries = [e for e in main_entries if (
+ e.GetType() in ('android_apk', 'java_binary', 'junit_binary') or
+ e.GnTarget() in targets_from_args or
+ e.GnTarget().endswith('_test_apk__apk'))]
+
+ if args.split_projects:
+ main_entries = _FindAllProjectEntries(main_entries)
+
+ logging.info('Generating for %d targets.', len(main_entries))
+
+ entries = [e for e in _CombineTestEntries(main_entries) if e.IsValid()]
+ logging.info('Creating %d projects for targets.', len(entries))
+
+ logging.warning('Writing .gradle files...')
+ project_entries = []
+ # When only one entry will be generated we want it to have a valid
+ # build.gradle file with its own AndroidManifest.
+ for entry in entries:
+ data = _GenerateGradleFile(entry, generator, build_vars, jinja_processor)
+ if data and not args.all:
+ project_entries.append((entry.ProjectName(), entry.GradleSubdir()))
+ _WriteFile(
+ os.path.join(generator.EntryOutputDir(entry), _GRADLE_BUILD_FILE),
+ data)
+ if args.all:
+ project_entries.append((_MODULE_ALL, _MODULE_ALL))
+ _GenerateModuleAll(_gradle_output_dir, generator, build_vars,
+ jinja_processor, args.native_targets)
+
+ _WriteFile(os.path.join(generator.project_dir, _GRADLE_BUILD_FILE),
+ _GenerateRootGradle(jinja_processor, channel))
+
+ _WriteFile(os.path.join(generator.project_dir, 'settings.gradle'),
+ _GenerateSettingsGradle(project_entries))
+
+ # Ensure the Android Studio sdk is correctly initialized.
+ if not os.path.exists(args.sdk_path):
+ # Help first-time users avoid Android Studio forcibly changing back to
+ # the previous default due to not finding a valid sdk under this dir.
+ shutil.copytree(_RebasePath(build_vars['android_sdk_root']), args.sdk_path)
+ _WriteFile(
+ os.path.join(generator.project_dir, 'local.properties'),
+ _GenerateLocalProperties(args.sdk_path))
+
+ zip_tuples = []
+ generated_inputs = set()
+ for entry in entries:
+ entries_to_gen = [entry]
+ entries_to_gen.extend(entry.android_test_entries)
+ for entry_to_gen in entries_to_gen:
+ # Build all paths references by .gradle that exist within output_dir.
+ generated_inputs.update(
+ generator.GeneratedInputs(entry_to_gen, args.fast))
+ zip_tuples.extend(generator.GeneratedZips(entry_to_gen, args.fast))
+ if generated_inputs:
+ logging.warning('Building generated source files...')
+ targets = _RebasePath(generated_inputs, output_dir)
+ _RunNinja(output_dir, targets, args.j)
+ if zip_tuples:
+ _ExtractZips(generator.project_dir, zip_tuples)
+
+ logging.warning('Generated projects for Android Studio %s', channel)
+ logging.warning('For more tips: https://chromium.googlesource.com/chromium'
+ '/src.git/+/master/docs/android_studio.md')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gradle/gn_to_cmake.py b/deps/v8/build/android/gradle/gn_to_cmake.py
new file mode 100755
index 0000000000..dd6c1323c0
--- /dev/null
+++ b/deps/v8/build/android/gradle/gn_to_cmake.py
@@ -0,0 +1,687 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Usage: gn_to_cmake.py <json_file_name>
+
+gn gen out/config --ide=json --json-ide-script=../../gn/gn_to_cmake.py
+
+or
+
+gn gen out/config --ide=json
+python gn/gn_to_cmake.py out/config/project.json
+
+The first is recommended, as it will auto-update.
+"""
+
+import functools
+import json
+import posixpath
+import string
+import sys
+
+
+def CMakeStringEscape(a):
+ """Escapes the string 'a' for use inside a CMake string.
+
+ This means escaping
+ '\' otherwise it may be seen as modifying the next character
+ '"' otherwise it will end the string
+ ';' otherwise the string becomes a list
+
+ The following do not need to be escaped
+ '#' when the lexer is in string state, this does not start a comment
+ """
+ return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
+
+
+def CMakeTargetEscape(a):
+ """Escapes the string 'a' for use as a CMake target name.
+
+ CMP0037 in CMake 3.0 restricts target names to "^[A-Za-z0-9_.:+-]+$"
+ The ':' is only allowed for imported targets.
+ """
+ def Escape(c):
+ if c in string.ascii_letters or c in string.digits or c in '_.+-':
+ return c
+ else:
+ return '__'
+ return ''.join([Escape(c) for c in a])
+
+
+def SetVariable(out, variable_name, value):
+ """Sets a CMake variable."""
+ out.write('set("')
+ out.write(CMakeStringEscape(variable_name))
+ out.write('" "')
+ out.write(CMakeStringEscape(value))
+ out.write('")\n')
+
+
+def SetVariableList(out, variable_name, values):
+ """Sets a CMake variable to a list."""
+ if not values:
+ return SetVariable(out, variable_name, "")
+ if len(values) == 1:
+ return SetVariable(out, variable_name, values[0])
+ out.write('list(APPEND "')
+ out.write(CMakeStringEscape(variable_name))
+ out.write('"\n "')
+ out.write('"\n "'.join([CMakeStringEscape(value) for value in values]))
+ out.write('")\n')
+
+
+def SetFilesProperty(output, variable, property_name, values, sep):
+ """Given a set of source files, sets the given property on them."""
+ output.write('set_source_files_properties(')
+ WriteVariable(output, variable)
+ output.write(' PROPERTIES ')
+ output.write(property_name)
+ output.write(' "')
+ for value in values:
+ output.write(CMakeStringEscape(value))
+ output.write(sep)
+ output.write('")\n')
+
+
+def SetCurrentTargetProperty(out, property_name, values, sep=''):
+ """Given a target, sets the given property."""
+ out.write('set_target_properties("${target}" PROPERTIES ')
+ out.write(property_name)
+ out.write(' "')
+ for value in values:
+ out.write(CMakeStringEscape(value))
+ out.write(sep)
+ out.write('")\n')
+
+
+def WriteVariable(output, variable_name, prepend=None):
+ if prepend:
+ output.write(prepend)
+ output.write('${')
+ output.write(variable_name)
+ output.write('}')
+
+
+# See GetSourceFileType in gn
+source_file_types = {
+ '.cc': 'cxx',
+ '.cpp': 'cxx',
+ '.cxx': 'cxx',
+ '.c': 'c',
+ '.s': 'asm',
+ '.S': 'asm',
+ '.asm': 'asm',
+ '.o': 'obj',
+ '.obj': 'obj',
+}
+
+
+class CMakeTargetType(object):
+ def __init__(self, command, modifier, property_modifier, is_linkable):
+ self.command = command
+ self.modifier = modifier
+ self.property_modifier = property_modifier
+ self.is_linkable = is_linkable
+CMakeTargetType.custom = CMakeTargetType('add_custom_target', 'SOURCES',
+ None, False)
+
+# See GetStringForOutputType in gn
+cmake_target_types = {
+ 'unknown': CMakeTargetType.custom,
+ 'group': CMakeTargetType.custom,
+ 'executable': CMakeTargetType('add_executable', None, 'RUNTIME', True),
+ 'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY', True),
+ 'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY', True),
+ 'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE', False),
+ 'source_set': CMakeTargetType('add_library', 'OBJECT', None, False),
+ 'copy': CMakeTargetType.custom,
+ 'action': CMakeTargetType.custom,
+ 'action_foreach': CMakeTargetType.custom,
+ 'bundle_data': CMakeTargetType.custom,
+ 'create_bundle': CMakeTargetType.custom,
+}
+
+
+def FindFirstOf(s, a):
+ return min(s.find(i) for i in a if i in s)
+
+
+def GetCMakeTargetName(gn_target_name):
+ # See <chromium>/src/tools/gn/label.cc#Resolve
+ # //base/test:test_support(//build/toolchain/win:msvc)
+ path_separator = FindFirstOf(gn_target_name, (':', '('))
+ location = None
+ name = None
+ toolchain = None
+ if not path_separator:
+ location = gn_target_name[2:]
+ else:
+ location = gn_target_name[2:path_separator]
+ toolchain_separator = gn_target_name.find('(', path_separator)
+ if toolchain_separator == -1:
+ name = gn_target_name[path_separator + 1:]
+ else:
+ if toolchain_separator > path_separator:
+ name = gn_target_name[path_separator + 1:toolchain_separator]
+ assert gn_target_name.endswith(')')
+ toolchain = gn_target_name[toolchain_separator + 1:-1]
+ assert location or name
+
+ cmake_target_name = None
+ if location.endswith('/' + name):
+ cmake_target_name = location
+ elif location:
+ cmake_target_name = location + '_' + name
+ else:
+ cmake_target_name = name
+ if toolchain:
+ cmake_target_name += '--' + toolchain
+ return CMakeTargetEscape(cmake_target_name)
+
+
+class Project(object):
+ def __init__(self, project_json):
+ self.targets = project_json['targets']
+ build_settings = project_json['build_settings']
+ self.root_path = build_settings['root_path']
+ self.build_path = posixpath.join(self.root_path,
+ build_settings['build_dir'][2:])
+ self.object_source_deps = {}
+
+ def GetAbsolutePath(self, path):
+ if path.startswith("//"):
+ return self.root_path + "/" + path[2:]
+ else:
+ return path
+
+ def GetObjectSourceDependencies(self, gn_target_name, object_dependencies):
+ """All OBJECT libraries whose sources have not been absorbed."""
+ if gn_target_name in self.object_source_deps:
+ object_dependencies.update(self.object_source_deps[gn_target_name])
+ return
+ target_deps = set()
+ dependencies = self.targets[gn_target_name].get('deps', [])
+ for dependency in dependencies:
+ dependency_type = self.targets[dependency].get('type', None)
+ if dependency_type == 'source_set':
+ target_deps.add(dependency)
+ if dependency_type not in gn_target_types_that_absorb_objects:
+ self.GetObjectSourceDependencies(dependency, target_deps)
+ self.object_source_deps[gn_target_name] = target_deps
+ object_dependencies.update(target_deps)
+
+ def GetObjectLibraryDependencies(self, gn_target_name, object_dependencies):
+ """All OBJECT libraries whose libraries have not been absorbed."""
+ dependencies = self.targets[gn_target_name].get('deps', [])
+ for dependency in dependencies:
+ dependency_type = self.targets[dependency].get('type', None)
+ if dependency_type == 'source_set':
+ object_dependencies.add(dependency)
+ self.GetObjectLibraryDependencies(dependency, object_dependencies)
+
+
+class Target(object):
+ def __init__(self, gn_target_name, project):
+ self.gn_name = gn_target_name
+ self.properties = project.targets[self.gn_name]
+ self.cmake_name = GetCMakeTargetName(self.gn_name)
+ self.gn_type = self.properties.get('type', None)
+ self.cmake_type = cmake_target_types.get(self.gn_type, None)
+
+
+def WriteAction(out, target, project, sources, synthetic_dependencies):
+ outputs = []
+ output_directories = set()
+ for output in target.properties.get('outputs', []):
+ output_abs_path = project.GetAbsolutePath(output)
+ outputs.append(output_abs_path)
+ output_directory = posixpath.dirname(output_abs_path)
+ if output_directory:
+ output_directories.add(output_directory)
+ outputs_name = '${target}__output'
+ SetVariableList(out, outputs_name, outputs)
+
+ out.write('add_custom_command(OUTPUT ')
+ WriteVariable(out, outputs_name)
+ out.write('\n')
+
+ if output_directories:
+ out.write(' COMMAND ${CMAKE_COMMAND} -E make_directory "')
+ out.write('" "'.join([CMakeStringEscape(d) for d in output_directories]))
+ out.write('"\n')
+
+ script = target.properties['script']
+ arguments = target.properties['args']
+ out.write(' COMMAND python "')
+ out.write(CMakeStringEscape(project.GetAbsolutePath(script)))
+ out.write('"')
+ if arguments:
+ out.write('\n "')
+ out.write('"\n "'.join([CMakeStringEscape(a) for a in arguments]))
+ out.write('"')
+ out.write('\n')
+
+ out.write(' DEPENDS ')
+ for sources_type_name in sources.values():
+ WriteVariable(out, sources_type_name, ' ')
+ out.write('\n')
+
+ #TODO: CMake 3.7 is introducing DEPFILE
+
+ out.write(' WORKING_DIRECTORY "')
+ out.write(CMakeStringEscape(project.build_path))
+ out.write('"\n')
+
+ out.write(' COMMENT "Action: ${target}"\n')
+
+ out.write(' VERBATIM)\n')
+
+ synthetic_dependencies.add(outputs_name)
+
+
+def ExpandPlaceholders(source, a):
+ source_dir, source_file_part = posixpath.split(source)
+ source_name_part, _ = posixpath.splitext(source_file_part)
+ #TODO: {{source_gen_dir}}, {{source_out_dir}}, {{response_file_name}}
+ return a.replace('{{source}}', source) \
+ .replace('{{source_file_part}}', source_file_part) \
+ .replace('{{source_name_part}}', source_name_part) \
+ .replace('{{source_dir}}', source_dir) \
+ .replace('{{source_root_relative_dir}}', source_dir)
+
+
+def WriteActionForEach(out, target, project, sources, synthetic_dependencies):
+ all_outputs = target.properties.get('outputs', [])
+ inputs = target.properties.get('sources', [])
+ # TODO: consider expanding 'output_patterns' instead.
+ outputs_per_input = len(all_outputs) / len(inputs)
+ for count, source in enumerate(inputs):
+ source_abs_path = project.GetAbsolutePath(source)
+
+ outputs = []
+ output_directories = set()
+ for output in all_outputs[outputs_per_input * count:
+ outputs_per_input * (count+1)]:
+ output_abs_path = project.GetAbsolutePath(output)
+ outputs.append(output_abs_path)
+ output_directory = posixpath.dirname(output_abs_path)
+ if output_directory:
+ output_directories.add(output_directory)
+ outputs_name = '${target}__output_' + str(count)
+ SetVariableList(out, outputs_name, outputs)
+
+ out.write('add_custom_command(OUTPUT ')
+ WriteVariable(out, outputs_name)
+ out.write('\n')
+
+ if output_directories:
+ out.write(' COMMAND ${CMAKE_COMMAND} -E make_directory "')
+ out.write('" "'.join([CMakeStringEscape(d) for d in output_directories]))
+ out.write('"\n')
+
+ script = target.properties['script']
+ # TODO: need to expand {{xxx}} in arguments
+ arguments = target.properties['args']
+ out.write(' COMMAND python "')
+ out.write(CMakeStringEscape(project.GetAbsolutePath(script)))
+ out.write('"')
+ if arguments:
+ out.write('\n "')
+ expand = functools.partial(ExpandPlaceholders, source_abs_path)
+ out.write('"\n "'.join(
+ [CMakeStringEscape(expand(a)) for a in arguments]))
+ out.write('"')
+ out.write('\n')
+
+ out.write(' DEPENDS')
+ if 'input' in sources:
+ WriteVariable(out, sources['input'], ' ')
+ out.write(' "')
+ out.write(CMakeStringEscape(source_abs_path))
+ out.write('"\n')
+
+ #TODO: CMake 3.7 is introducing DEPFILE
+
+ out.write(' WORKING_DIRECTORY "')
+ out.write(CMakeStringEscape(project.build_path))
+ out.write('"\n')
+
+ out.write(' COMMENT "Action ${target} on ')
+ out.write(CMakeStringEscape(source_abs_path))
+ out.write('"\n')
+
+ out.write(' VERBATIM)\n')
+
+ synthetic_dependencies.add(outputs_name)
+
+
+def WriteCopy(out, target, project, sources, synthetic_dependencies):
+ inputs = target.properties.get('sources', [])
+ raw_outputs = target.properties.get('outputs', [])
+
+ # TODO: consider expanding 'output_patterns' instead.
+ outputs = []
+ for output in raw_outputs:
+ output_abs_path = project.GetAbsolutePath(output)
+ outputs.append(output_abs_path)
+ outputs_name = '${target}__output'
+ SetVariableList(out, outputs_name, outputs)
+
+ out.write('add_custom_command(OUTPUT ')
+ WriteVariable(out, outputs_name)
+ out.write('\n')
+
+ for src, dst in zip(inputs, outputs):
+ out.write(' COMMAND ${CMAKE_COMMAND} -E copy "')
+ out.write(CMakeStringEscape(project.GetAbsolutePath(src)))
+ out.write('" "')
+ out.write(CMakeStringEscape(dst))
+ out.write('"\n')
+
+ out.write(' DEPENDS ')
+ for sources_type_name in sources.values():
+ WriteVariable(out, sources_type_name, ' ')
+ out.write('\n')
+
+ out.write(' WORKING_DIRECTORY "')
+ out.write(CMakeStringEscape(project.build_path))
+ out.write('"\n')
+
+ out.write(' COMMENT "Copy ${target}"\n')
+
+ out.write(' VERBATIM)\n')
+
+ synthetic_dependencies.add(outputs_name)
+
+
+def WriteCompilerFlags(out, target, project, sources):
+ # Hack, set linker language to c if no c or cxx files present.
+ if not 'c' in sources and not 'cxx' in sources:
+ SetCurrentTargetProperty(out, 'LINKER_LANGUAGE', ['C'])
+
+ # Mark uncompiled sources as uncompiled.
+ if 'input' in sources:
+ SetFilesProperty(out, sources['input'], 'HEADER_FILE_ONLY', ('True',), '')
+ if 'other' in sources:
+ SetFilesProperty(out, sources['other'], 'HEADER_FILE_ONLY', ('True',), '')
+
+ # Mark object sources as linkable.
+ if 'obj' in sources:
+ SetFilesProperty(out, sources['obj'], 'EXTERNAL_OBJECT', ('True',), '')
+
+ # TODO: 'output_name', 'output_dir', 'output_extension'
+ # This includes using 'source_outputs' to direct compiler output.
+
+ # Includes
+ includes = target.properties.get('include_dirs', [])
+ if includes:
+ out.write('set_property(TARGET "${target}" ')
+ out.write('APPEND PROPERTY INCLUDE_DIRECTORIES')
+ for include_dir in includes:
+ out.write('\n "')
+ out.write(project.GetAbsolutePath(include_dir))
+ out.write('"')
+ out.write(')\n')
+
+ # Defines
+ defines = target.properties.get('defines', [])
+ if defines:
+ SetCurrentTargetProperty(out, 'COMPILE_DEFINITIONS', defines, ';')
+
+ # Compile flags
+ # "arflags", "asmflags", "cflags",
+ # "cflags_c", "clfags_cc", "cflags_objc", "clfags_objcc"
+ # CMake does not have per target lang compile flags.
+ # TODO: $<$<COMPILE_LANGUAGE:CXX>:cflags_cc style generator expression.
+ # http://public.kitware.com/Bug/view.php?id=14857
+ flags = []
+ flags.extend(target.properties.get('cflags', []))
+ cflags_asm = target.properties.get('asmflags', [])
+ cflags_c = target.properties.get('cflags_c', [])
+ cflags_cxx = target.properties.get('cflags_cc', [])
+ if 'c' in sources and not any(k in sources for k in ('asm', 'cxx')):
+ flags.extend(cflags_c)
+ elif 'cxx' in sources and not any(k in sources for k in ('asm', 'c')):
+ flags.extend(cflags_cxx)
+ else:
+ # TODO: This is broken, one cannot generally set properties on files,
+ # as other targets may require different properties on the same files.
+ if 'asm' in sources and cflags_asm:
+ SetFilesProperty(out, sources['asm'], 'COMPILE_FLAGS', cflags_asm, ' ')
+ if 'c' in sources and cflags_c:
+ SetFilesProperty(out, sources['c'], 'COMPILE_FLAGS', cflags_c, ' ')
+ if 'cxx' in sources and cflags_cxx:
+ SetFilesProperty(out, sources['cxx'], 'COMPILE_FLAGS', cflags_cxx, ' ')
+ if flags:
+ SetCurrentTargetProperty(out, 'COMPILE_FLAGS', flags, ' ')
+
+ # Linker flags
+ ldflags = target.properties.get('ldflags', [])
+ if ldflags:
+ SetCurrentTargetProperty(out, 'LINK_FLAGS', ldflags, ' ')
+
+
+gn_target_types_that_absorb_objects = (
+ 'executable',
+ 'loadable_module',
+ 'shared_library',
+ 'static_library'
+)
+
+
+def WriteSourceVariables(out, target, project):
+ # gn separates the sheep from the goats based on file extensions.
+ # A full separation is done here because of flag handing (see Compile flags).
+ source_types = {'cxx':[], 'c':[], 'asm':[],
+ 'obj':[], 'obj_target':[], 'input':[], 'other':[]}
+
+ # TODO .def files on Windows
+ for source in target.properties.get('sources', []):
+ _, ext = posixpath.splitext(source)
+ source_abs_path = project.GetAbsolutePath(source)
+ source_types[source_file_types.get(ext, 'other')].append(source_abs_path)
+
+ for input_path in target.properties.get('inputs', []):
+ input_abs_path = project.GetAbsolutePath(input_path)
+ source_types['input'].append(input_abs_path)
+
+ # OBJECT library dependencies need to be listed as sources.
+ # Only executables and non-OBJECT libraries may reference an OBJECT library.
+ # https://gitlab.kitware.com/cmake/cmake/issues/14778
+ if target.gn_type in gn_target_types_that_absorb_objects:
+ object_dependencies = set()
+ project.GetObjectSourceDependencies(target.gn_name, object_dependencies)
+ for dependency in object_dependencies:
+ cmake_dependency_name = GetCMakeTargetName(dependency)
+ obj_target_sources = '$<TARGET_OBJECTS:' + cmake_dependency_name + '>'
+ source_types['obj_target'].append(obj_target_sources)
+
+ sources = {}
+ for source_type, sources_of_type in source_types.items():
+ if sources_of_type:
+ sources[source_type] = '${target}__' + source_type + '_srcs'
+ SetVariableList(out, sources[source_type], sources_of_type)
+ return sources
+
+
+def WriteTarget(out, target, project):
+ out.write('\n#')
+ out.write(target.gn_name)
+ out.write('\n')
+
+ if target.cmake_type is None:
+ print 'Target {} has unknown target type {}, skipping.'.format(
+ target.gn_name, target.gn_type)
+ return
+
+ SetVariable(out, 'target', target.cmake_name)
+
+ sources = WriteSourceVariables(out, target, project)
+
+ synthetic_dependencies = set()
+ if target.gn_type == 'action':
+ WriteAction(out, target, project, sources, synthetic_dependencies)
+ if target.gn_type == 'action_foreach':
+ WriteActionForEach(out, target, project, sources, synthetic_dependencies)
+ if target.gn_type == 'copy':
+ WriteCopy(out, target, project, sources, synthetic_dependencies)
+
+ out.write(target.cmake_type.command)
+ out.write('("${target}"')
+ if target.cmake_type.modifier is not None:
+ out.write(' ')
+ out.write(target.cmake_type.modifier)
+ for sources_type_name in sources.values():
+ WriteVariable(out, sources_type_name, ' ')
+ if synthetic_dependencies:
+ out.write(' DEPENDS')
+ for synthetic_dependencie in synthetic_dependencies:
+ WriteVariable(out, synthetic_dependencie, ' ')
+ out.write(')\n')
+
+ if target.cmake_type.command != 'add_custom_target':
+ WriteCompilerFlags(out, target, project, sources)
+
+ libraries = set()
+ nonlibraries = set()
+
+ dependencies = set(target.properties.get('deps', []))
+ # Transitive OBJECT libraries are in sources.
+ # Those sources are dependent on the OBJECT library dependencies.
+ # Those sources cannot bring in library dependencies.
+ object_dependencies = set()
+ if target.gn_type != 'source_set':
+ project.GetObjectLibraryDependencies(target.gn_name, object_dependencies)
+ for object_dependency in object_dependencies:
+ dependencies.update(project.targets.get(object_dependency).get('deps', []))
+
+ for dependency in dependencies:
+ gn_dependency_type = project.targets.get(dependency, {}).get('type', None)
+ cmake_dependency_type = cmake_target_types.get(gn_dependency_type, None)
+ cmake_dependency_name = GetCMakeTargetName(dependency)
+ if cmake_dependency_type.command != 'add_library':
+ nonlibraries.add(cmake_dependency_name)
+ elif cmake_dependency_type.modifier != 'OBJECT':
+ if target.cmake_type.is_linkable:
+ libraries.add(cmake_dependency_name)
+ else:
+ nonlibraries.add(cmake_dependency_name)
+
+ # Non-library dependencies.
+ if nonlibraries:
+ out.write('add_dependencies("${target}"')
+ for nonlibrary in nonlibraries:
+ out.write('\n "')
+ out.write(nonlibrary)
+ out.write('"')
+ out.write(')\n')
+
+ # Non-OBJECT library dependencies.
+ external_libraries = target.properties.get('libs', [])
+ if target.cmake_type.is_linkable and (external_libraries or libraries):
+ library_dirs = target.properties.get('lib_dirs', [])
+ if library_dirs:
+ SetVariableList(out, '${target}__library_directories', library_dirs)
+
+ system_libraries = []
+ for external_library in external_libraries:
+ if '/' in external_library:
+ libraries.add(project.GetAbsolutePath(external_library))
+ else:
+ if external_library.endswith('.framework'):
+ external_library = external_library[:-len('.framework')]
+ system_library = 'library__' + external_library
+ if library_dirs:
+ system_library = system_library + '__for_${target}'
+ out.write('find_library("')
+ out.write(CMakeStringEscape(system_library))
+ out.write('" "')
+ out.write(CMakeStringEscape(external_library))
+ out.write('"')
+ if library_dirs:
+ out.write(' PATHS "')
+ WriteVariable(out, '${target}__library_directories')
+ out.write('"')
+ out.write(')\n')
+ system_libraries.append(system_library)
+ out.write('target_link_libraries("${target}"')
+ for library in libraries:
+ out.write('\n "')
+ out.write(CMakeStringEscape(library))
+ out.write('"')
+ for system_library in system_libraries:
+ WriteVariable(out, system_library, '\n "')
+ out.write('"')
+ out.write(')\n')
+
+
+def WriteProject(project):
+ out = open(posixpath.join(project.build_path, 'CMakeLists.txt'), 'w+')
+ out.write('# Generated by gn_to_cmake.py.\n')
+ out.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
+ out.write('cmake_policy(VERSION 2.8.8)\n\n')
+
+ # Update the gn generated ninja build.
+ # If a build file has changed, this will update CMakeLists.ext if
+ # gn gen out/config --ide=json --json-ide-script=../../gn/gn_to_cmake.py
+ # style was used to create this config.
+ out.write('execute_process(COMMAND ninja -C "')
+ out.write(CMakeStringEscape(project.build_path))
+ out.write('" build.ninja)\n')
+
+ out.write('include(CMakeLists.ext)\n')
+ out.close()
+
+ out = open(posixpath.join(project.build_path, 'CMakeLists.ext'), 'w+')
+ out.write('# Generated by gn_to_cmake.py.\n')
+ out.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
+ out.write('cmake_policy(VERSION 2.8.8)\n')
+
+ # The following appears to be as-yet undocumented.
+ # http://public.kitware.com/Bug/view.php?id=8392
+ out.write('enable_language(ASM)\n\n')
+ # ASM-ATT does not support .S files.
+ # output.write('enable_language(ASM-ATT)\n')
+
+ # Current issues with automatic re-generation:
+ # The gn generated build.ninja target uses build.ninja.d
+ # but build.ninja.d does not contain the ide or gn.
+ # Currently the ide is not run if the project.json file is not changed
+ # but the ide needs to be run anyway if it has itself changed.
+ # This can be worked around by deleting the project.json file.
+ out.write('file(READ "')
+ gn_deps_file = posixpath.join(project.build_path, 'build.ninja.d')
+ out.write(CMakeStringEscape(gn_deps_file))
+ out.write('" "gn_deps_string" OFFSET ')
+ out.write(str(len('build.ninja: ')))
+ out.write(')\n')
+ # One would think this would need to worry about escaped spaces
+ # but gn doesn't escape spaces here (it generates invalid .d files).
+ out.write('string(REPLACE " " ";" "gn_deps" ${gn_deps_string})\n')
+ out.write('foreach("gn_dep" ${gn_deps})\n')
+ out.write(' configure_file(${gn_dep} "CMakeLists.devnull" COPYONLY)\n')
+ out.write('endforeach("gn_dep")\n')
+
+ for target_name in project.targets.keys():
+ out.write('\n')
+ WriteTarget(out, Target(target_name, project), project)
+
+
+def main():
+ if len(sys.argv) != 2:
+ print 'Usage: ' + sys.argv[0] + ' <json_file_name>'
+ exit(1)
+
+ json_path = sys.argv[1]
+ project = None
+ with open(json_path, 'r') as json_file:
+ project = json.loads(json_file.read())
+
+ WriteProject(Project(project))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/deps/v8/build/android/gradle/java.jinja b/deps/v8/build/android/gradle/java.jinja
new file mode 100644
index 0000000000..92fe575af8
--- /dev/null
+++ b/deps/v8/build/android/gradle/java.jinja
@@ -0,0 +1,41 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+// Generated by //build/android/generate_gradle.py
+
+apply plugin: "java"
+{% if template_type == 'java_binary' %}
+apply plugin: "application"
+{% endif %}
+
+sourceSets {
+ main {
+ java.srcDirs = [
+{% for path in main.java_dirs %}
+ "{{ path }}",
+{% endfor %}
+ ]
+{% if main.java_excludes is defined %}
+ java.filter.exclude(
+{% for path in main.java_excludes %}
+ "{{ path }}",
+{% endfor %}
+ )
+{% endif %}
+ }
+}
+
+sourceCompatibility = JavaVersion.VERSION_1_8
+targetCompatibility = JavaVersion.VERSION_1_8
+
+{% if template_type == 'java_binary' %}
+applicationName = "{{ target_name }}"
+{% if main_class %}
+mainClassName = "{{ main_class }}"
+{% endif %}
+{% endif %}
+{% if template_type in ('java_binary', 'java_library') %}
+archivesBaseName = "{{ target_name }}"
+{% endif %}
+
+{% include 'dependencies.jinja' %}
diff --git a/deps/v8/build/android/gradle/manifest.jinja b/deps/v8/build/android/gradle/manifest.jinja
new file mode 100644
index 0000000000..dea7071eb6
--- /dev/null
+++ b/deps/v8/build/android/gradle/manifest.jinja
@@ -0,0 +1,7 @@
+{# Copyright 2017 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="{{ package }}">
+</manifest>
diff --git a/deps/v8/build/android/gradle/root.jinja b/deps/v8/build/android/gradle/root.jinja
new file mode 100644
index 0000000000..a53591e965
--- /dev/null
+++ b/deps/v8/build/android/gradle/root.jinja
@@ -0,0 +1,20 @@
+{# Copyright 2016 The Chromium Authors. All rights reserved. #}
+{# Use of this source code is governed by a BSD-style license that can be #}
+{# found in the LICENSE file. #}
+// Generated by //build/android/generate_gradle.py
+
+buildscript {
+ repositories {
+ google()
+ jcenter()
+ }
+ dependencies {
+{% if channel == 'canary' %}
+ classpath "com.android.tools.build:gradle:3.5.0-alpha07"
+{% elif channel == 'beta' %}
+ classpath "com.android.tools.build:gradle:3.1.0-beta4"
+{% else %}
+ classpath "com.android.tools.build:gradle:3.0.1"
+{% endif %}
+ }
+}
diff --git a/deps/v8/build/android/gyp/OWNERS b/deps/v8/build/android/gyp/OWNERS
new file mode 100644
index 0000000000..74dca6f718
--- /dev/null
+++ b/deps/v8/build/android/gyp/OWNERS
@@ -0,0 +1,6 @@
+agrieve@chromium.org
+estevenson@chromium.org
+digit@chromium.org
+wnwen@chromium.org
+
+# COMPONENT: Build
diff --git a/deps/v8/build/android/gyp/aar.py b/deps/v8/build/android/gyp/aar.py
new file mode 100755
index 0000000000..d0f357db33
--- /dev/null
+++ b/deps/v8/build/android/gyp/aar.py
@@ -0,0 +1,166 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Processes an Android AAR file."""
+
+import argparse
+import os
+import posixpath
+import re
+import shutil
+import sys
+from xml.etree import ElementTree
+import zipfile
+
+from util import build_utils
+from util import md5_check
+
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir)))
+import gn_helpers
+
+
+def _IsManifestEmpty(manifest_str):
+ """Returns whether the given manifest has merge-worthy elements.
+
+ E.g.: <activity>, <service>, etc.
+ """
+ doc = ElementTree.fromstring(manifest_str)
+ for node in doc:
+ if node.tag == 'application':
+ if len(node):
+ return False
+ elif node.tag != 'uses-sdk':
+ return False
+
+ return True
+
+
+def _CreateInfo(aar_file):
+ data = {}
+ data['aidl'] = []
+ data['assets'] = []
+ data['resources'] = []
+ data['subjars'] = []
+ data['subjar_tuples'] = []
+ data['has_classes_jar'] = False
+ data['has_proguard_flags'] = False
+ data['has_native_libraries'] = False
+ data['has_r_text_file'] = False
+ with zipfile.ZipFile(aar_file) as z:
+ data['is_manifest_empty'] = (
+ _IsManifestEmpty(z.read('AndroidManifest.xml')))
+
+ for name in z.namelist():
+ if name.endswith('/'):
+ continue
+ if name.startswith('aidl/'):
+ data['aidl'].append(name)
+ elif name.startswith('res/'):
+ data['resources'].append(name)
+ elif name.startswith('libs/') and name.endswith('.jar'):
+ label = posixpath.basename(name)[:-4]
+ label = re.sub(r'[^a-zA-Z0-9._]', '_', label)
+ data['subjars'].append(name)
+ data['subjar_tuples'].append([label, name])
+ elif name.startswith('assets/'):
+ data['assets'].append(name)
+ elif name.startswith('jni/'):
+ data['has_native_libraries'] = True
+ if 'native_libraries' in data:
+ data['native_libraries'].append(name)
+ else:
+ data['native_libraries'] = [name]
+ elif name == 'classes.jar':
+ data['has_classes_jar'] = True
+ elif name == 'proguard.txt':
+ data['has_proguard_flags'] = True
+ elif name == 'R.txt':
+ # Some AARs, e.g. gvr_controller_java, have empty R.txt. Such AARs
+ # have no resources as well. We treat empty R.txt as having no R.txt.
+ data['has_r_text_file'] = (z.read('R.txt').strip() != '')
+
+ return """\
+# Generated by //build/android/gyp/aar.py
+# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
+
+""" + gn_helpers.ToGNString(data)
+
+
+def _AddCommonArgs(parser):
+ parser.add_argument('aar_file',
+ help='Path to the AAR file.',
+ type=os.path.normpath)
+
+
+def main():
+ parser = argparse.ArgumentParser(description=__doc__)
+ command_parsers = parser.add_subparsers(dest='command')
+ subp = command_parsers.add_parser(
+ 'list', help='Output a GN scope describing the contents of the .aar.')
+ _AddCommonArgs(subp)
+ subp.add_argument('--output',
+ help='Output file.',
+ default='-')
+
+ subp = command_parsers.add_parser('extract', help='Extracts the .aar')
+ _AddCommonArgs(subp)
+ subp.add_argument('--output-dir',
+ help='Output directory for the extracted files.',
+ required=True,
+ type=os.path.normpath)
+ subp.add_argument('--assert-info-file',
+ help='Path to .info file. Asserts that it matches what '
+ '"list" would output.',
+ type=argparse.FileType('r'))
+
+ args = parser.parse_args()
+
+ if args.command == 'extract':
+ if args.assert_info_file:
+ expected = _CreateInfo(args.aar_file)
+ actual = args.assert_info_file.read()
+ if actual != expected:
+ raise Exception('android_aar_prebuilt() cached .info file is '
+ 'out-of-date. Run gn gen with '
+ 'update_android_aar_prebuilts=true to update it.')
+
+ def clobber():
+ # Clear previously extracted versions of the AAR if it is obsolete.
+ shutil.rmtree(args.output_dir, ignore_errors=True)
+ build_utils.ExtractAll(args.aar_file, path=args.output_dir)
+
+ with zipfile.ZipFile(args.aar_file) as zf:
+ md5_check.CallAndRecordIfStale(
+ clobber, input_paths=[args.aar_file],
+ output_paths=[
+ os.path.join(args.output_dir, n) for n in zf.namelist()])
+
+ elif args.command == 'list':
+ aar_info = _CreateInfo(args.aar_file)
+ aar_output_present = args.output != '-' and os.path.isfile(args.output)
+ if aar_output_present:
+ # Some .info files are read-only, for examples the cipd-controlled ones
+ # under third_party/android_deps/repositoty. To deal with these, first
+ # that its content is correct, and if it is, exit without touching
+ # the file system.
+ file_info = open(args.output, 'r').read()
+ if file_info == aar_info:
+ return
+
+ # Try to write the file. This may fail for read-only ones that were
+ # not updated.
+ try:
+ with open(args.output, 'w') as f:
+ f.write(aar_info)
+ except IOError as e:
+ if not aar_output_present:
+ raise e
+ raise Exception('Could not update output file: %s\n%s\n' %
+ (args.output, e))
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/aar.pydeps b/deps/v8/build/android/gyp/aar.pydeps
new file mode 100644
index 0000000000..e08c5475e3
--- /dev/null
+++ b/deps/v8/build/android/gyp/aar.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aar.pydeps build/android/gyp/aar.py
+../../gn_helpers.py
+aar.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/aidl.py b/deps/v8/build/android/gyp/aidl.py
new file mode 100755
index 0000000000..64ad29041a
--- /dev/null
+++ b/deps/v8/build/android/gyp/aidl.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Invokes Android's aidl
+"""
+
+import optparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def main(argv):
+ option_parser = optparse.OptionParser()
+ option_parser.add_option('--aidl-path', help='Path to the aidl binary.')
+ option_parser.add_option('--imports', help='Files to import.')
+ option_parser.add_option('--includes',
+ help='Directories to add as import search paths.')
+ option_parser.add_option('--srcjar', help='Path for srcjar output.')
+ options, args = option_parser.parse_args(argv[1:])
+
+ with build_utils.TempDir() as temp_dir:
+ for f in args:
+ classname = os.path.splitext(os.path.basename(f))[0]
+ output = os.path.join(temp_dir, classname + '.java')
+ aidl_cmd = [options.aidl_path]
+ aidl_cmd += [
+ '-p' + s for s in build_utils.ParseGnList(options.imports)
+ ]
+ if options.includes is not None:
+ aidl_cmd += [
+ '-I' + s for s in build_utils.ParseGnList(options.includes)
+ ]
+ aidl_cmd += [
+ f,
+ output
+ ]
+ build_utils.CheckOutput(aidl_cmd)
+
+ with build_utils.AtomicOutput(options.srcjar) as f:
+ with zipfile.ZipFile(f, 'w') as srcjar:
+ for path in build_utils.FindInDirectory(temp_dir, '*.java'):
+ with open(path) as fileobj:
+ data = fileobj.read()
+ pkg_name = re.search(r'^\s*package\s+(.*?)\s*;', data, re.M).group(1)
+ arcname = '%s/%s' % (
+ pkg_name.replace('.', '/'), os.path.basename(path))
+ build_utils.AddToZipHermetic(srcjar, arcname, data=data)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/deps/v8/build/android/gyp/aidl.pydeps b/deps/v8/build/android/gyp/aidl.pydeps
new file mode 100644
index 0000000000..2dbce376f1
--- /dev/null
+++ b/deps/v8/build/android/gyp/aidl.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aidl.pydeps build/android/gyp/aidl.py
+../../gn_helpers.py
+aidl.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/apkbuilder.py b/deps/v8/build/android/gyp/apkbuilder.py
new file mode 100755
index 0000000000..310a192828
--- /dev/null
+++ b/deps/v8/build/android/gyp/apkbuilder.py
@@ -0,0 +1,377 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Adds the code parts to a resource APK."""
+
+import argparse
+import itertools
+import os
+import shutil
+import sys
+import tempfile
+import zipfile
+
+import finalize_apk
+
+from util import build_utils
+
+
+# Taken from aapt's Package.cpp:
+_NO_COMPRESS_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.gif', '.wav', '.mp2',
+ '.mp3', '.ogg', '.aac', '.mpg', '.mpeg', '.mid',
+ '.midi', '.smf', '.jet', '.rtttl', '.imy', '.xmf',
+ '.mp4', '.m4a', '.m4v', '.3gp', '.3gpp', '.3g2',
+ '.3gpp2', '.amr', '.awb', '.wma', '.wmv', '.webm')
+
+
+def _ParseArgs(args):
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument(
+ '--assets',
+ help='GYP-list of files to add as assets in the form '
+ '"srcPath:zipPath", where ":zipPath" is optional.')
+ parser.add_argument(
+ '--java-resources', help='GYP-list of java_resources JARs to include.')
+ parser.add_argument('--write-asset-list',
+ action='store_true',
+ help='Whether to create an assets/assets_list file.')
+ parser.add_argument(
+ '--uncompressed-assets',
+ help='Same as --assets, except disables compression.')
+ parser.add_argument('--resource-apk',
+ help='An .ap_ file built using aapt',
+ required=True)
+ parser.add_argument('--output-apk',
+ help='Path to the output file',
+ required=True)
+ parser.add_argument('--format', choices=['apk', 'bundle-module'],
+ default='apk', help='Specify output format.')
+ parser.add_argument('--dex-file',
+ help='Path to the classes.dex to use')
+ parser.add_argument('--uncompress-dex', action='store_true',
+ help='Store .dex files uncompressed in the APK')
+ parser.add_argument('--native-libs',
+ action='append',
+ help='GYP-list of native libraries to include. '
+ 'Can be specified multiple times.',
+ default=[])
+ parser.add_argument('--secondary-native-libs',
+ action='append',
+ help='GYP-list of native libraries for secondary '
+ 'android-abi. Can be specified multiple times.',
+ default=[])
+ parser.add_argument('--android-abi',
+ help='Android architecture to use for native libraries')
+ parser.add_argument('--secondary-android-abi',
+ help='The secondary Android architecture to use for'
+ 'secondary native libraries')
+ parser.add_argument(
+ '--native-lib-placeholders',
+ help='GYP-list of native library placeholders to add.')
+ parser.add_argument(
+ '--secondary-native-lib-placeholders',
+ help='GYP-list of native library placeholders to add '
+ 'for the secondary ABI')
+ parser.add_argument('--uncompress-shared-libraries', default='False',
+ choices=['true', 'True', 'false', 'False'],
+ help='Whether to uncompress native shared libraries. Argument must be '
+ 'a boolean value.')
+ parser.add_argument('--apksigner-path',
+ help='Path to the apksigner executable.')
+ parser.add_argument('--zipalign-path',
+ help='Path to the zipalign executable.')
+ parser.add_argument('--key-path',
+ help='Path to keystore for signing.')
+ parser.add_argument('--key-passwd',
+ help='Keystore password')
+ parser.add_argument('--key-name',
+ help='Keystore name')
+ options = parser.parse_args(args)
+ options.assets = build_utils.ParseGnList(options.assets)
+ options.uncompressed_assets = build_utils.ParseGnList(
+ options.uncompressed_assets)
+ options.native_lib_placeholders = build_utils.ParseGnList(
+ options.native_lib_placeholders)
+ options.secondary_native_lib_placeholders = build_utils.ParseGnList(
+ options.secondary_native_lib_placeholders)
+ options.java_resources = build_utils.ParseGnList(options.java_resources)
+ all_libs = []
+ for gyp_list in options.native_libs:
+ all_libs.extend(build_utils.ParseGnList(gyp_list))
+ options.native_libs = all_libs
+ secondary_libs = []
+ for gyp_list in options.secondary_native_libs:
+ secondary_libs.extend(build_utils.ParseGnList(gyp_list))
+ options.secondary_native_libs = secondary_libs
+
+ # --apksigner-path, --zipalign-path, --key-xxx arguments are
+ # required when building an APK, but not a bundle module.
+ if options.format == 'apk':
+ required_args = ['apksigner_path', 'zipalign_path', 'key_path',
+ 'key_passwd', 'key_name']
+ for required in required_args:
+ if not vars(options)[required]:
+ raise Exception('Argument --%s is required for APKs.' % (
+ required.replace('_', '-')))
+
+ options.uncompress_shared_libraries = \
+ options.uncompress_shared_libraries in [ 'true', 'True' ]
+
+ if not options.android_abi and (options.native_libs or
+ options.native_lib_placeholders):
+ raise Exception('Must specify --android-abi with --native-libs')
+ if not options.secondary_android_abi and (options.secondary_native_libs or
+ options.secondary_native_lib_placeholders):
+ raise Exception('Must specify --secondary-android-abi with'
+ ' --secondary-native-libs')
+ return options
+
+
+def _SplitAssetPath(path):
+ """Returns (src, dest) given an asset path in the form src[:dest]."""
+ path_parts = path.split(':')
+ src_path = path_parts[0]
+ if len(path_parts) > 1:
+ dest_path = path_parts[1]
+ else:
+ dest_path = os.path.basename(src_path)
+ return src_path, dest_path
+
+
+def _ExpandPaths(paths):
+ """Converts src:dst into tuples and enumerates files within directories.
+
+ Args:
+ paths: Paths in the form "src_path:dest_path"
+
+ Returns:
+ A list of (src_path, dest_path) tuples sorted by dest_path (for stable
+ ordering within output .apk).
+ """
+ ret = []
+ for path in paths:
+ src_path, dest_path = _SplitAssetPath(path)
+ if os.path.isdir(src_path):
+ for f in build_utils.FindInDirectory(src_path, '*'):
+ ret.append((f, os.path.join(dest_path, f[len(src_path) + 1:])))
+ else:
+ ret.append((src_path, dest_path))
+ ret.sort(key=lambda t:t[1])
+ return ret
+
+
+def _AddAssets(apk, path_tuples, disable_compression=False):
+ """Adds the given paths to the apk.
+
+ Args:
+ apk: ZipFile to write to.
+ paths: List of paths (with optional :zipPath suffix) to add.
+ disable_compression: Whether to disable compression.
+ """
+ # Group all uncompressed assets together in the hope that it will increase
+ # locality of mmap'ed files.
+ for target_compress in (False, True):
+ for src_path, dest_path in path_tuples:
+
+ compress = not disable_compression and (
+ os.path.splitext(src_path)[1] not in _NO_COMPRESS_EXTENSIONS)
+ if target_compress == compress:
+ apk_path = 'assets/' + dest_path
+ try:
+ apk.getinfo(apk_path)
+ # Should never happen since write_build_config.py handles merging.
+ raise Exception('Multiple targets specified the asset path: %s' %
+ apk_path)
+ except KeyError:
+ build_utils.AddToZipHermetic(apk, apk_path, src_path=src_path,
+ compress=compress)
+
+
+def _CreateAssetsList(path_tuples):
+ """Returns a newline-separated list of asset paths for the given paths."""
+ dests = sorted(t[1] for t in path_tuples)
+ return '\n'.join(dests) + '\n'
+
+
+def _AddNativeLibraries(out_apk, native_libs, android_abi, uncompress):
+ """Add native libraries to APK."""
+ has_crazy_linker = any('android_linker' in os.path.basename(p)
+ for p in native_libs)
+ for path in native_libs:
+ basename = os.path.basename(path)
+
+ compress = None
+ if (uncompress and os.path.splitext(basename)[1] == '.so'
+ and 'android_linker' not in basename
+ and (not has_crazy_linker or 'clang_rt' not in basename)
+ and (not has_crazy_linker or 'crashpad_handler' not in basename)):
+ compress = False
+ # Add prefix to prevent android install from extracting upon install.
+ if has_crazy_linker:
+ basename = 'crazy.' + basename
+
+ apk_path = 'lib/%s/%s' % (android_abi, basename)
+ build_utils.AddToZipHermetic(out_apk,
+ apk_path,
+ src_path=path,
+ compress=compress)
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseArgs(args)
+
+ native_libs = sorted(options.native_libs)
+
+ # Include native libs in the depfile_deps since GN doesn't know about the
+ # dependencies when is_component_build=true.
+ depfile_deps = list(native_libs)
+
+ secondary_native_libs = []
+ if options.secondary_native_libs:
+ secondary_native_libs = sorted(options.secondary_native_libs)
+ depfile_deps += secondary_native_libs
+
+ if options.java_resources:
+ # Included via .build_config, so need to write it to depfile.
+ depfile_deps.extend(options.java_resources)
+
+ assets = _ExpandPaths(options.assets)
+ uncompressed_assets = _ExpandPaths(options.uncompressed_assets)
+
+ # Included via .build_config, so need to write it to depfile.
+ depfile_deps.extend(x[0] for x in assets)
+ depfile_deps.extend(x[0] for x in uncompressed_assets)
+
+ # Bundle modules have a structure similar to APKs, except that resources
+ # are compiled in protobuf format (instead of binary xml), and that some
+ # files are located into different top-level directories, e.g.:
+ # AndroidManifest.xml -> manifest/AndroidManifest.xml
+ # classes.dex -> dex/classes.dex
+ # res/ -> res/ (unchanged)
+ # assets/ -> assets/ (unchanged)
+ # <other-file> -> root/<other-file>
+ #
+ # Hence, the following variables are used to control the location of files in
+ # the final archive.
+ if options.format == 'bundle-module':
+ apk_manifest_dir = 'manifest/'
+ apk_root_dir = 'root/'
+ apk_dex_dir = 'dex/'
+ else:
+ apk_manifest_dir = ''
+ apk_root_dir = ''
+ apk_dex_dir = ''
+
+ # Targets generally do not depend on apks, so no need for only_if_changed.
+ with build_utils.AtomicOutput(options.output_apk, only_if_changed=False) as f:
+ with zipfile.ZipFile(options.resource_apk) as resource_apk, \
+ zipfile.ZipFile(f, 'w', zipfile.ZIP_DEFLATED) as out_apk:
+
+ def copy_resource(zipinfo, out_dir=''):
+ compress = zipinfo.compress_type != zipfile.ZIP_STORED
+ build_utils.AddToZipHermetic(
+ out_apk,
+ out_dir + zipinfo.filename,
+ data=resource_apk.read(zipinfo.filename),
+ compress=compress)
+
+ # Make assets come before resources in order to maintain the same file
+ # ordering as GYP / aapt. http://crbug.com/561862
+ resource_infos = resource_apk.infolist()
+
+ # 1. AndroidManifest.xml
+ copy_resource(
+ resource_apk.getinfo('AndroidManifest.xml'), out_dir=apk_manifest_dir)
+
+ # 2. Assets
+ if options.write_asset_list:
+ data = _CreateAssetsList(itertools.chain(assets, uncompressed_assets))
+ build_utils.AddToZipHermetic(out_apk, 'assets/assets_list', data=data)
+
+ _AddAssets(out_apk, assets, disable_compression=False)
+ _AddAssets(out_apk, uncompressed_assets, disable_compression=True)
+
+ # 3. Dex files
+ if options.dex_file and options.dex_file.endswith('.zip'):
+ with zipfile.ZipFile(options.dex_file, 'r') as dex_zip:
+ for dex in (d for d in dex_zip.namelist() if d.endswith('.dex')):
+ build_utils.AddToZipHermetic(
+ out_apk,
+ apk_dex_dir + dex,
+ data=dex_zip.read(dex),
+ compress=not options.uncompress_dex)
+ elif options.dex_file:
+ build_utils.AddToZipHermetic(
+ out_apk,
+ apk_dex_dir + 'classes.dex',
+ src_path=options.dex_file,
+ compress=not options.uncompress_dex)
+
+ # 4. Native libraries.
+ _AddNativeLibraries(out_apk, native_libs, options.android_abi,
+ options.uncompress_shared_libraries)
+
+ if options.secondary_android_abi:
+ _AddNativeLibraries(out_apk, secondary_native_libs,
+ options.secondary_android_abi,
+ options.uncompress_shared_libraries)
+
+ for name in sorted(options.native_lib_placeholders):
+ # Note: Empty libs files are ignored by md5check (can cause issues
+ # with stale builds when the only change is adding/removing
+ # placeholders).
+ apk_path = 'lib/%s/%s' % (options.android_abi, name)
+ build_utils.AddToZipHermetic(out_apk, apk_path, data='')
+
+ for name in sorted(options.secondary_native_lib_placeholders):
+ # Note: Empty libs files are ignored by md5check (can cause issues
+ # with stale builds when the only change is adding/removing
+ # placeholders).
+ apk_path = 'lib/%s/%s' % (options.secondary_android_abi, name)
+ build_utils.AddToZipHermetic(out_apk, apk_path, data='')
+
+ # 5. Resources
+ for info in resource_infos:
+ if info.filename != 'AndroidManifest.xml':
+ copy_resource(info)
+
+ # 6. Java resources that should be accessible via
+ # Class.getResourceAsStream(), in particular parts of Emma jar.
+ # Prebuilt jars may contain class files which we shouldn't include.
+ for java_resource in options.java_resources:
+ with zipfile.ZipFile(java_resource, 'r') as java_resource_jar:
+ for apk_path in java_resource_jar.namelist():
+ apk_path_lower = apk_path.lower()
+
+ if apk_path_lower.startswith('meta-inf/'):
+ continue
+ if apk_path_lower.endswith('/'):
+ continue
+ if apk_path_lower.endswith('.class'):
+ continue
+
+ build_utils.AddToZipHermetic(
+ out_apk,
+ apk_root_dir + apk_path,
+ data=java_resource_jar.read(apk_path))
+
+ if options.format == 'apk':
+ finalize_apk.FinalizeApk(options.apksigner_path, options.zipalign_path,
+ f.name, f.name, options.key_path,
+ options.key_passwd, options.key_name)
+
+ if options.depfile:
+ build_utils.WriteDepfile(
+ options.depfile,
+ options.output_apk,
+ inputs=depfile_deps,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/apkbuilder.pydeps b/deps/v8/build/android/gyp/apkbuilder.pydeps
new file mode 100644
index 0000000000..3ae03319c9
--- /dev/null
+++ b/deps/v8/build/android/gyp/apkbuilder.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/apkbuilder.pydeps build/android/gyp/apkbuilder.py
+../../gn_helpers.py
+apkbuilder.py
+finalize_apk.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/assert_static_initializers.py b/deps/v8/build/android/gyp/assert_static_initializers.py
new file mode 100755
index 0000000000..019baface1
--- /dev/null
+++ b/deps/v8/build/android/gyp/assert_static_initializers.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Checks the number of static initializers in an APK's library."""
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+
+_DUMP_STATIC_INITIALIZERS_PATH = os.path.join(build_utils.DIR_SOURCE_ROOT,
+ 'tools', 'linux',
+ 'dump-static-initializers.py')
+
+
+def _RunReadelf(so_path, options, tool_prefix=''):
+ return subprocess.check_output([tool_prefix + 'readelf'] + options +
+ [so_path])
+
+
+def _ParseLibBuildId(so_path, tool_prefix):
+ """Returns the Build ID of the given native library."""
+ stdout = _RunReadelf(so_path, ['-n'], tool_prefix)
+ match = re.search(r'Build ID: (\w+)', stdout)
+ return match.group(1) if match else None
+
+
+def _VerifyLibBuildIdsMatch(tool_prefix, *so_files):
+ if len(set(_ParseLibBuildId(f, tool_prefix) for f in so_files)) > 1:
+ raise Exception('Found differing build ids in output directory and apk. '
+ 'Your output directory is likely stale.')
+
+
+def _GetStaticInitializers(so_path, tool_prefix):
+ output = subprocess.check_output(
+ [_DUMP_STATIC_INITIALIZERS_PATH, '-d', so_path, '-t', tool_prefix])
+ summary = re.search(r'Found \d+ static initializers in (\d+) files.', output)
+ return output.splitlines()[:-1], int(summary.group(1))
+
+
+def _PrintDumpSIsCount(apk_so_name, unzipped_so, out_dir, tool_prefix):
+ lib_name = os.path.basename(apk_so_name).replace('crazy.', '')
+ so_with_symbols_path = os.path.join(out_dir, 'lib.unstripped', lib_name)
+ if not os.path.exists(so_with_symbols_path):
+ raise Exception('Unstripped .so not found. Looked here: %s',
+ so_with_symbols_path)
+ _VerifyLibBuildIdsMatch(tool_prefix, unzipped_so, so_with_symbols_path)
+ sis, _ = _GetStaticInitializers(so_with_symbols_path, tool_prefix)
+ for si in sis:
+ print si
+
+
+# Mostly copied from //infra/scripts/legacy/scripts/slave/chromium/sizes.py.
+def _ReadInitArray(so_path, tool_prefix):
+ stdout = _RunReadelf(so_path, ['-SW'], tool_prefix)
+ # Matches: .ctors PROGBITS 000000000516add0 5169dd0 000010 00 WA 0 0 8
+ match = re.search(r'\.init_array.*$', stdout, re.MULTILINE)
+ if not match:
+ raise Exception('Did not find section: .init_array in:\n' + stdout)
+ size_str = re.split(r'\W+', match.group(0))[5]
+ return int(size_str, 16)
+
+
+def _CountStaticInitializers(so_path, tool_prefix):
+ # Find the number of files with at least one static initializer.
+ # First determine if we're 32 or 64 bit
+ stdout = _RunReadelf(so_path, ['-h'], tool_prefix)
+ elf_class_line = re.search('Class:.*$', stdout, re.MULTILINE).group(0)
+ elf_class = re.split(r'\W+', elf_class_line)[1]
+ if elf_class == 'ELF32':
+ word_size = 4
+ else:
+ word_size = 8
+
+ # Then find the number of files with global static initializers.
+ # NOTE: this is very implementation-specific and makes assumptions
+ # about how compiler and linker implement global static initializers.
+ init_array_size = _ReadInitArray(so_path, tool_prefix)
+ return init_array_size / word_size
+
+
+def _AnalyzeStaticInitializers(apk_filename, tool_prefix, dump_sis, out_dir,
+ ignored_libs):
+ # Static initializer counting mostly copies logic in
+ # infra/scripts/legacy/scripts/slave/chromium/sizes.py.
+ with zipfile.ZipFile(apk_filename) as z:
+ so_files = [
+ f for f in z.infolist() if f.filename.endswith('.so')
+ and f.file_size > 0 and os.path.basename(f.filename) not in ignored_libs
+ ]
+ # Skip checking static initializers for secondary abi libs. They will be
+ # checked by 32-bit bots. This avoids the complexity of finding 32 bit .so
+ # files in the output directory in 64 bit builds.
+ has_64 = any('64' in f.filename for f in so_files)
+ files_to_check = [f for f in so_files if not has_64 or '64' in f.filename]
+
+ si_count = 0
+ for f in files_to_check:
+ with tempfile.NamedTemporaryFile() as temp:
+ temp.write(z.read(f))
+ temp.flush()
+ si_count += _CountStaticInitializers(temp.name, tool_prefix)
+ if dump_sis:
+ # Print count and list of SIs reported by dump-static-initializers.py.
+ # Doesn't work well on all archs (particularly arm), which is why
+ # the readelf method is used for tracking SI counts.
+ _PrintDumpSIsCount(f.filename, temp.name, out_dir, tool_prefix)
+ return si_count
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--touch', help='File to touch upon success')
+ parser.add_argument('--tool-prefix', required=True,
+ help='Prefix for nm and friends')
+ parser.add_argument('--expected-count', required=True, type=int,
+ help='Fail if number of static initializers is not '
+ 'equal to this value.')
+ parser.add_argument('apk', help='APK file path.')
+ args = parser.parse_args()
+
+ #TODO(crbug.com/838414): add support for files included via loadable_modules.
+ ignored_libs = ['libarcore_sdk_c.so']
+
+ si_count = _AnalyzeStaticInitializers(args.apk, args.tool_prefix, False, '.',
+ ignored_libs)
+ if si_count != args.expected_count:
+ print 'Expected {} static initializers, but found {}.'.format(
+ args.expected_count, si_count)
+ if args.expected_count > si_count:
+ print 'You have removed one or more static initializers. Thanks!'
+ print 'To fix the build, update the expectation in:'
+ print ' //chrome/android/static_initializers.gni'
+ else:
+ print 'Dumping static initializers via dump-static-initializers.py:'
+ sys.stdout.flush()
+ _AnalyzeStaticInitializers(args.apk, args.tool_prefix, True, '.',
+ ignored_libs)
+ print
+ print 'If the above list is not useful, consider listing them with:'
+ print ' //tools/binary_size/diagnose_bloat.py'
+ print
+ print 'For more information:'
+ print (' https://chromium.googlesource.com/chromium/src/+/master/docs/'
+ 'static_initializers.md')
+ sys.exit(1)
+
+ if args.touch:
+ open(args.touch, 'w')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gyp/assert_static_initializers.pydeps b/deps/v8/build/android/gyp/assert_static_initializers.pydeps
new file mode 100644
index 0000000000..e031668f46
--- /dev/null
+++ b/deps/v8/build/android/gyp/assert_static_initializers.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/assert_static_initializers.pydeps build/android/gyp/assert_static_initializers.py
+../../gn_helpers.py
+assert_static_initializers.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/bundletool.py b/deps/v8/build/android/gyp/bundletool.py
new file mode 100755
index 0000000000..ac9561e768
--- /dev/null
+++ b/deps/v8/build/android/gyp/bundletool.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Simple wrapper around the bundletool tool.
+
+Bundletool is distributed as a versioned jar file. This script abstracts the
+location and version of this jar file, as well as the JVM invokation."""
+
+import logging
+import os
+import subprocess
+import sys
+
+# Assume this is stored under build/android/gyp/
+BUNDLETOOL_DIR = os.path.abspath(os.path.join(
+ __file__, '..', '..', '..', '..', 'third_party', 'android_build_tools',
+ 'bundletool'))
+
+BUNDLETOOL_VERSION = '0.9.0'
+
+BUNDLETOOL_JAR_PATH = os.path.join(
+ BUNDLETOOL_DIR, 'bundletool-all-%s.jar' % BUNDLETOOL_VERSION)
+
+def RunBundleTool(args):
+ args = ['java', '-jar', BUNDLETOOL_JAR_PATH] + args
+ logging.debug(' '.join(args))
+ subprocess.check_call(args)
+
+if __name__ == '__main__':
+ RunBundleTool(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/bytecode_processor.py b/deps/v8/build/android/gyp/bytecode_processor.py
new file mode 100755
index 0000000000..020b52f5f0
--- /dev/null
+++ b/deps/v8/build/android/gyp/bytecode_processor.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wraps bin/helper/java_bytecode_rewriter and expands @FileArgs."""
+
+import argparse
+import os
+import subprocess
+import sys
+
+from util import build_utils
+
+
+def _AddSwitch(parser, val):
+ parser.add_argument(
+ val, action='store_const', default='--disabled', const=val)
+
+
+def main(argv):
+ argv = build_utils.ExpandFileArgs(argv[1:])
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--script', required=True,
+ help='Path to the java binary wrapper script.')
+ parser.add_argument('--input-jar', required=True)
+ parser.add_argument('--output-jar', required=True)
+ parser.add_argument('--direct-classpath-jars', required=True)
+ parser.add_argument('--sdk-classpath-jars', required=True)
+ parser.add_argument('--extra-classpath-jars', dest='extra_jars',
+ action='append', default=[],
+ help='Extra inputs, passed last to the binary script.')
+ parser.add_argument('-v', '--verbose', action='store_true')
+ _AddSwitch(parser, '--is-prebuilt')
+ _AddSwitch(parser, '--enable-custom-resources')
+ _AddSwitch(parser, '--enable-assert')
+ _AddSwitch(parser, '--enable-thread-annotations')
+ _AddSwitch(parser, '--enable-check-class-path')
+ parser.add_argument(
+ '--split-compat-class-names',
+ action='append',
+ default=[],
+ help='Names of classes that need to be made SplitCompat-enabled.')
+ args = parser.parse_args(argv)
+
+ sdk_jars = build_utils.ParseGnList(args.sdk_classpath_jars)
+ assert len(sdk_jars) > 0
+
+ direct_jars = build_utils.ParseGnList(args.direct_classpath_jars)
+ assert len(direct_jars) > 0
+
+ extra_classpath_jars = []
+ for a in args.extra_jars:
+ extra_classpath_jars.extend(build_utils.ParseGnList(a))
+
+ split_compat_class_names = build_utils.ParseGnList(
+ args.split_compat_class_names)
+
+ if args.verbose:
+ verbose = '--verbose'
+ else:
+ verbose = '--not-verbose'
+
+ cmd = ([
+ args.script, args.input_jar, args.output_jar, verbose, args.is_prebuilt,
+ args.enable_assert, args.enable_custom_resources,
+ args.enable_thread_annotations, args.enable_check_class_path,
+ str(len(sdk_jars))
+ ] + sdk_jars + [str(len(direct_jars))] + direct_jars + [
+ str(len(split_compat_class_names))
+ ] + split_compat_class_names + extra_classpath_jars)
+ subprocess.check_call(cmd)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/deps/v8/build/android/gyp/bytecode_processor.pydeps b/deps/v8/build/android/gyp/bytecode_processor.pydeps
new file mode 100644
index 0000000000..d8ff396495
--- /dev/null
+++ b/deps/v8/build/android/gyp/bytecode_processor.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/bytecode_processor.pydeps build/android/gyp/bytecode_processor.py
+../../gn_helpers.py
+bytecode_processor.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/compile_resources.py b/deps/v8/build/android/gyp/compile_resources.py
new file mode 100755
index 0000000000..3f2f5dfe6e
--- /dev/null
+++ b/deps/v8/build/android/gyp/compile_resources.py
@@ -0,0 +1,916 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compile Android resources into an intermediate APK.
+
+This can also generate an R.txt, and an .srcjar file containing the proper
+final R.java class for all resource packages the APK depends on.
+
+This will crunch images with aapt2.
+"""
+
+import argparse
+import collections
+import contextlib
+import multiprocessing.pool
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import zipfile
+from xml.etree import ElementTree
+
+from util import build_utils
+from util import resource_utils
+
+# Name of environment variable that can be used to force this script to
+# put temporary resource files into specific sub-directories, instead of
+# temporary ones.
+_ENV_DEBUG_VARIABLE = 'ANDROID_DEBUG_TEMP_RESOURCES_DIR'
+
+# Import jinja2 from third_party/jinja2
+sys.path.insert(1, os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party'))
+from jinja2 import Template # pylint: disable=F0401
+
+# Pngs that we shouldn't convert to webp. Please add rationale when updating.
+_PNG_WEBP_BLACKLIST_PATTERN = re.compile('|'.join([
+ # Crashes on Galaxy S5 running L (https://crbug.com/807059).
+ r'.*star_gray\.png',
+ # Android requires pngs for 9-patch images.
+ r'.*\.9\.png',
+ # Daydream requires pngs for icon files.
+ r'.*daydream_icon_.*\.png']))
+
+
+def _ListToDictionary(lst, separator):
+ """Splits each element of the passed-in |lst| using |separator| and creates
+ dictionary treating first element of the split as the key and second as the
+ value."""
+ return dict(item.split(separator, 1) for item in lst)
+
+
+def _ParseArgs(args):
+ """Parses command line options.
+
+ Returns:
+ An options object as from argparse.ArgumentParser.parse_args()
+ """
+ parser, input_opts, output_opts = resource_utils.ResourceArgsParser()
+
+ input_opts.add_argument(
+ '--aapt2-path', required=True, help='Path to the Android aapt2 tool.')
+ input_opts.add_argument('--android-manifest', required=True,
+ help='AndroidManifest.xml path')
+ input_opts.add_argument(
+ '--shared-resources',
+ action='store_true',
+ help='Make all resources in R.java non-final and allow the resource IDs '
+ 'to be reset to a different package index when the apk is loaded by '
+ 'another application at runtime.')
+
+ input_opts.add_argument(
+ '--app-as-shared-lib',
+ action='store_true',
+ help='Same as --shared-resources, but also ensures all resource IDs are '
+ 'directly usable from the APK loaded as an application.')
+
+ input_opts.add_argument(
+ '--package-id',
+ help='Custom package ID for resources (instead of 0x7f). Cannot be used '
+ 'with --shared-resources.')
+
+ input_opts.add_argument(
+ '--package-name-to-id-mapping',
+ help='List containing mapping from package name to package IDs that will '
+ 'be assigned.')
+
+ input_opts.add_argument(
+ '--package-name',
+ help='Package name that will be used to determine package ID.')
+
+ input_opts.add_argument(
+ '--arsc-package-name', help='Package name to use for resources.arsc file')
+
+ input_opts.add_argument(
+ '--shared-resources-whitelist',
+ help='An R.txt file acting as a whitelist for resources that should be '
+ 'non-final and have their package ID changed at runtime in R.java. '
+ 'Implies and overrides --shared-resources.')
+
+ input_opts.add_argument(
+ '--shared-resources-whitelist-locales',
+ default='[]',
+ help='Optional GN-list of locales. If provided, all strings corresponding'
+ ' to this locale list will be kept in the final output for the '
+ 'resources identified through --shared-resources-whitelist, even '
+ 'if --locale-whitelist is being used.')
+
+ input_opts.add_argument(
+ '--use-resource-ids-path',
+ help='Use resource IDs generated by aapt --emit-ids')
+
+ input_opts.add_argument('--proto-format', action='store_true',
+ help='Compile resources to protocol buffer format.')
+
+ input_opts.add_argument('--support-zh-hk', action='store_true',
+ help='Use zh-rTW resources for zh-rHK.')
+
+ input_opts.add_argument('--debuggable',
+ action='store_true',
+ help='Whether to add android:debuggable="true"')
+
+ input_opts.add_argument('--version-code', help='Version code for apk.')
+ input_opts.add_argument('--version-name', help='Version name for apk.')
+
+ input_opts.add_argument(
+ '--no-compress',
+ help='disables compression for the given comma-separated list of '
+ 'extensions')
+
+ input_opts.add_argument(
+ '--locale-whitelist',
+ default='[]',
+ help='GN list of languages to include. All other language configs will '
+ 'be stripped out. List may include a combination of Android locales '
+ 'or Chrome locales.')
+
+ input_opts.add_argument('--resource-blacklist-regex', default='',
+ help='Do not include matching drawables.')
+
+ input_opts.add_argument(
+ '--resource-blacklist-exceptions',
+ default='[]',
+ help='GN list of globs that say which blacklisted images to include even '
+ 'when --resource-blacklist-regex is set.')
+
+ input_opts.add_argument('--png-to-webp', action='store_true',
+ help='Convert png files to webp format.')
+
+ input_opts.add_argument('--webp-binary', default='',
+ help='Path to the cwebp binary.')
+
+ input_opts.add_argument('--no-xml-namespaces',
+ action='store_true',
+ help='Whether to strip xml namespaces from processed '
+ 'xml resources')
+ input_opts.add_argument(
+ '--resources-config-path', help='Path to aapt2 resources config file.')
+ input_opts.add_argument(
+ '--optimized-resources-path',
+ help='Output for `aapt2 optimize` (also enables the step).')
+
+ output_opts.add_argument('--apk-path', required=True,
+ help='Path to output (partial) apk.')
+
+ output_opts.add_argument('--apk-info-path', required=True,
+ help='Path to output info file for the partial apk.')
+
+ output_opts.add_argument('--srcjar-out',
+ help='Path to srcjar to contain generated R.java.')
+
+ output_opts.add_argument('--r-text-out',
+ help='Path to store the generated R.txt file.')
+
+ output_opts.add_argument('--proguard-file',
+ help='Path to proguard.txt generated file')
+
+ output_opts.add_argument(
+ '--proguard-file-main-dex',
+ help='Path to proguard.txt generated file for main dex')
+
+ output_opts.add_argument(
+ '--emit-ids-out',
+ help=
+ 'Path to file produced by aapt2 --emit-ids (for use with --stable-ids)')
+
+ options = parser.parse_args(args)
+
+ resource_utils.HandleCommonOptions(options)
+
+ options.locale_whitelist = build_utils.ParseGnList(options.locale_whitelist)
+ options.shared_resources_whitelist_locales = build_utils.ParseGnList(
+ options.shared_resources_whitelist_locales)
+ options.resource_blacklist_exceptions = build_utils.ParseGnList(
+ options.resource_blacklist_exceptions)
+
+ if options.shared_resources and options.app_as_shared_lib:
+ raise Exception('Only one of --app-as-shared-lib or --shared-resources '
+ 'can be used.')
+
+ if options.package_name_to_id_mapping:
+ package_names_list = build_utils.ParseGnList(
+ options.package_name_to_id_mapping)
+ options.package_name_to_id_mapping = _ListToDictionary(
+ package_names_list, '=')
+
+ return options
+
+
+def _SortZip(original_path, sorted_path):
+ """Generate new zip archive by sorting all files in the original by name."""
+ with zipfile.ZipFile(sorted_path, 'w') as sorted_zip, \
+ zipfile.ZipFile(original_path, 'r') as original_zip:
+ for info in sorted(original_zip.infolist(), key=lambda i: i.filename):
+ sorted_zip.writestr(info, original_zip.read(info))
+
+
+def _IterFiles(root_dir):
+ for root, _, files in os.walk(root_dir):
+ for f in files:
+ yield os.path.join(root, f)
+
+
+def _DuplicateZhResources(resource_dirs):
+ """Duplicate Taiwanese resources into Hong-Kong specific directory."""
+ renamed_paths = dict()
+ for resource_dir in resource_dirs:
+ # We use zh-TW resources for zh-HK (if we have zh-TW resources).
+ for path in _IterFiles(resource_dir):
+ if 'zh-rTW' in path:
+ hk_path = path.replace('zh-rTW', 'zh-rHK')
+ build_utils.MakeDirectory(os.path.dirname(hk_path))
+ shutil.copyfile(path, hk_path)
+ renamed_paths[os.path.relpath(hk_path, resource_dir)] = os.path.relpath(
+ path, resource_dir)
+ return renamed_paths
+
+
+def _RenameLocaleResourceDirs(resource_dirs):
+ """Rename locale resource directories into standard names when necessary.
+
+ This is necessary to deal with the fact that older Android releases only
+ support ISO 639-1 two-letter codes, and sometimes even obsolete versions
+ of them.
+
+ In practice it means:
+ * 3-letter ISO 639-2 qualifiers are renamed under a corresponding
+ 2-letter one. E.g. for Filipino, strings under values-fil/ will be moved
+ to a new corresponding values-tl/ sub-directory.
+
+ * Modern ISO 639-1 codes will be renamed to their obsolete variant
+ for Indonesian, Hebrew and Yiddish (e.g. 'values-in/ -> values-id/).
+
+ * Norwegian macrolanguage strings will be renamed to Bokmål (main
+ Norway language). See http://crbug.com/920960. In practice this
+ means that 'values-no/ -> values-nb/' unless 'values-nb/' already
+ exists.
+
+ * BCP 47 langauge tags will be renamed to an equivalent ISO 639-1
+ locale qualifier if possible (e.g. 'values-b+en+US/ -> values-en-rUS').
+ Though this is not necessary at the moment, because no third-party
+ package that Chromium links against uses these for the current list of
+ supported locales, this may change when the list is extended in the
+ future).
+
+ Args:
+ resource_dirs: list of top-level resource directories.
+ Returns:
+ A dictionary mapping renamed paths to their original location
+ (e.g. '.../values-tl/strings.xml' -> ' .../values-fil/strings.xml').
+ """
+ renamed_paths = dict()
+ for resource_dir in resource_dirs:
+ for path in _IterFiles(resource_dir):
+ locale = resource_utils.FindLocaleInStringResourceFilePath(path)
+ if not locale:
+ continue
+ cr_locale = resource_utils.ToChromiumLocaleName(locale)
+ if not cr_locale:
+ continue # Unsupported Android locale qualifier!?
+ locale2 = resource_utils.ToAndroidLocaleName(cr_locale)
+ if locale != locale2:
+ path2 = path.replace('/values-%s/' % locale, '/values-%s/' % locale2)
+ if path == path2:
+ raise Exception('Could not substitute locale %s for %s in %s' %
+ (locale, locale2, path))
+ if os.path.exists(path2):
+ # This happens sometimes, e.g. some libraries provide both
+ # values-nb/ and values-no/ with the same content.
+ continue
+ build_utils.MakeDirectory(os.path.dirname(path2))
+ shutil.move(path, path2)
+ renamed_paths[os.path.relpath(path2, resource_dir)] = os.path.relpath(
+ path, resource_dir)
+ return renamed_paths
+
+
+def _ToAndroidLocales(locale_whitelist, support_zh_hk):
+ """Converts the list of Chrome locales to Android config locale qualifiers.
+
+ Args:
+ locale_whitelist: A list of Chromium locale names.
+ support_zh_hk: True if we need to support zh-HK by duplicating
+ the zh-TW strings.
+ Returns:
+ A set of matching Android config locale qualifier names.
+ """
+ ret = set()
+ for locale in locale_whitelist:
+ locale = resource_utils.ToAndroidLocaleName(locale)
+ if locale is None or ('-' in locale and '-r' not in locale):
+ raise Exception('Unsupported Chromium locale name: %s' % locale)
+ ret.add(locale)
+ # Always keep non-regional fall-backs.
+ language = locale.split('-')[0]
+ ret.add(language)
+
+ # We don't actually support zh-HK in Chrome on Android, but we mimic the
+ # native side behavior where we use zh-TW resources when the locale is set to
+ # zh-HK. See https://crbug.com/780847.
+ if support_zh_hk:
+ assert not any('HK' in l for l in locale_whitelist), (
+ 'Remove special logic if zh-HK is now supported (crbug.com/780847).')
+ ret.add('zh-rHK')
+ return set(ret)
+
+
+def _MoveImagesToNonMdpiFolders(res_root):
+ """Move images from drawable-*-mdpi-* folders to drawable-* folders.
+
+ Why? http://crbug.com/289843
+ """
+ renamed_paths = dict()
+ for src_dir_name in os.listdir(res_root):
+ src_components = src_dir_name.split('-')
+ if src_components[0] != 'drawable' or 'mdpi' not in src_components:
+ continue
+ src_dir = os.path.join(res_root, src_dir_name)
+ if not os.path.isdir(src_dir):
+ continue
+ dst_components = [c for c in src_components if c != 'mdpi']
+ assert dst_components != src_components
+ dst_dir_name = '-'.join(dst_components)
+ dst_dir = os.path.join(res_root, dst_dir_name)
+ build_utils.MakeDirectory(dst_dir)
+ for src_file_name in os.listdir(src_dir):
+ if not os.path.splitext(src_file_name)[1] in ('.png', '.webp'):
+ continue
+ src_file = os.path.join(src_dir, src_file_name)
+ dst_file = os.path.join(dst_dir, src_file_name)
+ assert not os.path.lexists(dst_file)
+ shutil.move(src_file, dst_file)
+ renamed_paths[os.path.relpath(dst_file, res_root)] = os.path.relpath(
+ src_file, res_root)
+ return renamed_paths
+
+
+def _PackageIdFromOptions(options):
+ package_id = None
+ if options.package_id:
+ package_id = options.package_id
+ if options.package_name:
+ package_id = options.package_name_to_id_mapping.get(options.package_name)
+ if package_id is None:
+ raise Exception(
+ 'Package name %s is not present in package_name_to_id_mapping.' %
+ options.package_name)
+ return package_id
+
+
+def _CreateLinkApkArgs(options):
+ """Create command-line arguments list to invoke 'aapt2 link'.
+
+ Args:
+ options: The command-line options tuple.
+ Returns:
+ A list of strings corresponding to the command-line invokation for
+ the command, matching the arguments from |options|.
+ """
+ link_command = [
+ options.aapt2_path,
+ 'link',
+ '--version-code', options.version_code,
+ '--version-name', options.version_name,
+ '--auto-add-overlay',
+ '--no-version-vectors',
+ ]
+
+ for j in options.include_resources:
+ link_command += ['-I', j]
+ if options.proguard_file:
+ link_command += ['--proguard', options.proguard_file]
+ if options.proguard_file_main_dex:
+ link_command += ['--proguard-main-dex', options.proguard_file_main_dex]
+ if options.emit_ids_out:
+ link_command += ['--emit-ids', options.emit_ids_out]
+
+ if options.no_compress:
+ for ext in options.no_compress.split(','):
+ link_command += ['-0', ext]
+
+ # Note: only one of --proto-format, --shared-lib or --app-as-shared-lib
+ # can be used with recent versions of aapt2.
+ if options.proto_format:
+ link_command.append('--proto-format')
+ elif options.shared_resources:
+ link_command.append('--shared-lib')
+
+ if options.no_xml_namespaces:
+ link_command.append('--no-xml-namespaces')
+
+ package_id = _PackageIdFromOptions(options)
+ if package_id is not None:
+ link_command += ['--package-id', package_id, '--allow-reserved-package-id']
+
+ return link_command
+
+
+def _FixManifest(options, temp_dir):
+ """Fix the APK's AndroidManifest.xml.
+
+ This adds any missing namespaces for 'android' and 'tools', and
+ sets certains elements like 'platformBuildVersionCode' or
+ 'android:debuggable' depending on the content of |options|.
+
+ Args:
+ options: The command-line arguments tuple.
+ temp_dir: A temporary directory where the fixed manifest will be written to.
+ Returns:
+ Tuple of:
+ * Manifest path within |temp_dir|.
+ * Original package_name (if different from arsc_package_name).
+ """
+ def maybe_extract_version(j):
+ try:
+ return resource_utils.ExtractBinaryManifestValues(options.aapt2_path, j)
+ except build_utils.CalledProcessError:
+ return None
+
+ android_sdk_jars = [j for j in options.include_resources
+ if os.path.basename(j) in ('android.jar',
+ 'android_system.jar')]
+ extract_all = [maybe_extract_version(j) for j in android_sdk_jars]
+ successful_extractions = [x for x in extract_all if x]
+ if len(successful_extractions) == 0:
+ raise Exception(
+ 'Unable to find android SDK jar among candidates: %s'
+ % ', '.join(android_sdk_jars))
+ elif len(successful_extractions) > 1:
+ raise Exception(
+ 'Found multiple android SDK jars among candidates: %s'
+ % ', '.join(android_sdk_jars))
+ version_code, version_name = successful_extractions.pop()[:2]
+
+ debug_manifest_path = os.path.join(temp_dir, 'AndroidManifest.xml')
+ doc, manifest_node, app_node = resource_utils.ParseAndroidManifest(
+ options.android_manifest)
+
+ manifest_node.set('platformBuildVersionCode', version_code)
+ manifest_node.set('platformBuildVersionName', version_name)
+
+ orig_package = manifest_node.get('package')
+ if options.arsc_package_name:
+ manifest_node.set('package', options.arsc_package_name)
+
+ if options.debuggable:
+ app_node.set('{%s}%s' % (resource_utils.ANDROID_NAMESPACE, 'debuggable'),
+ 'true')
+
+ with open(debug_manifest_path, 'w') as debug_manifest:
+ debug_manifest.write(ElementTree.tostring(doc.getroot(), encoding='UTF-8'))
+
+ return debug_manifest_path, orig_package
+
+
+def _ResourceNameFromPath(path):
+ return os.path.splitext(os.path.basename(path))[0]
+
+
+def _CreateKeepPredicate(resource_dirs, resource_blacklist_regex,
+ resource_blacklist_exceptions):
+ """Return a predicate lambda to determine which resource files to keep.
+
+ Args:
+ resource_dirs: list of top-level resource directories.
+ resource_blacklist_regex: A regular expression describing all resources
+ to exclude, except if they are mip-maps, or if they are listed
+ in |resource_blacklist_exceptions|.
+ resource_blacklist_exceptions: A list of glob patterns corresponding
+ to exceptions to the |resource_blacklist_regex|.
+ Returns:
+ A lambda that takes a path, and returns true if the corresponding file
+ must be kept.
+ """
+ naive_predicate = lambda path: os.path.basename(path)[0] != '.'
+ if resource_blacklist_regex == '':
+ # Do not extract dotfiles (e.g. ".gitkeep"). aapt ignores them anyways.
+ return naive_predicate
+
+ if resource_blacklist_regex != '':
+ # A simple predicate that only removes (returns False for) paths covered by
+ # the blacklist regex, except if they are mipmaps, or listed as exceptions.
+ naive_predicate = lambda path: (
+ not re.search(resource_blacklist_regex, path) or
+ re.search(r'[/-]mipmap[/-]', path) or
+ build_utils.MatchesGlob(path, resource_blacklist_exceptions))
+
+ # Build a set of all names from drawables kept by naive_predicate().
+ # Used later to ensure that we never exclude drawables from densities
+ # that are filtered-out by naive_predicate().
+ non_filtered_drawables = set()
+ for resource_dir in resource_dirs:
+ for path in _IterFiles(resource_dir):
+ if re.search(r'[/-]drawable[/-]', path) and naive_predicate(path):
+ non_filtered_drawables.add(_ResourceNameFromPath(path))
+
+ # NOTE: Defined as a function, instead of a lambda to avoid the
+ # auto-formatter to put this on a very long line that overflows.
+ def drawable_predicate(path):
+ return (naive_predicate(path)
+ or _ResourceNameFromPath(path) not in non_filtered_drawables)
+
+ return drawable_predicate
+
+
+def _ConvertToWebP(webp_binary, png_files):
+ renamed_paths = dict()
+ pool = multiprocessing.pool.ThreadPool(10)
+ def convert_image(png_path_tuple):
+ png_path, original_dir = png_path_tuple
+ root = os.path.splitext(png_path)[0]
+ webp_path = root + '.webp'
+ args = [webp_binary, png_path, '-mt', '-quiet', '-m', '6', '-q', '100',
+ '-lossless', '-o', webp_path]
+ subprocess.check_call(args)
+ os.remove(png_path)
+ renamed_paths[os.path.relpath(webp_path, original_dir)] = os.path.relpath(
+ png_path, original_dir)
+
+ pool.map(convert_image, [f for f in png_files
+ if not _PNG_WEBP_BLACKLIST_PATTERN.match(f[0])])
+ pool.close()
+ pool.join()
+ return renamed_paths
+
+
+def _CompileDeps(aapt2_path, dep_subdirs, temp_dir):
+ partials_dir = os.path.join(temp_dir, 'partials')
+ build_utils.MakeDirectory(partials_dir)
+ partial_compile_command = [
+ aapt2_path,
+ 'compile',
+ # TODO(wnwen): Turn this on once aapt2 forces 9-patch to be crunched.
+ # '--no-crunch',
+ ]
+ pool = multiprocessing.pool.ThreadPool(10)
+ def compile_partial(directory):
+ dirname = os.path.basename(directory)
+ partial_path = os.path.join(partials_dir, dirname + '.zip')
+ compile_command = (partial_compile_command +
+ ['--dir', directory, '-o', partial_path])
+ build_utils.CheckOutput(
+ compile_command,
+ stderr_filter=lambda output:
+ build_utils.FilterLines(
+ output, r'ignoring configuration .* for styleable'))
+
+ # Sorting the files in the partial ensures deterministic output from the
+ # aapt2 link step which uses order of files in the partial.
+ sorted_partial_path = os.path.join(partials_dir, dirname + '.sorted.zip')
+ _SortZip(partial_path, sorted_partial_path)
+
+ return sorted_partial_path
+
+ partials = pool.map(compile_partial, dep_subdirs)
+ pool.close()
+ pool.join()
+ return partials
+
+
+def _CreateResourceInfoFile(
+ renamed_paths, apk_info_path, dependencies_res_zips):
+ lines = set()
+ for zip_file in dependencies_res_zips:
+ zip_info_file_path = zip_file + '.info'
+ if os.path.exists(zip_info_file_path):
+ with open(zip_info_file_path, 'r') as zip_info_file:
+ lines.update(zip_info_file.readlines())
+ for dest, source in renamed_paths.iteritems():
+ lines.add('Rename:{},{}\n'.format(dest, source))
+ with build_utils.AtomicOutput(apk_info_path) as info_file:
+ info_file.writelines(sorted(lines))
+
+
+def _RemoveUnwantedLocalizedStrings(dep_subdirs, options):
+ """Remove localized strings that should not go into the final output.
+
+ Args:
+ dep_subdirs: List of resource dependency directories.
+ options: Command-line options namespace.
+ """
+ if (not options.locale_whitelist
+ and not options.shared_resources_whitelist_locales):
+ # Keep everything, there is nothing to do.
+ return
+
+ # Collect locale and file paths from the existing subdirs.
+ # The following variable maps Android locale names to
+ # sets of corresponding xml file paths.
+ locale_to_files_map = collections.defaultdict(set)
+ for directory in dep_subdirs:
+ for f in _IterFiles(directory):
+ locale = resource_utils.FindLocaleInStringResourceFilePath(f)
+ if locale:
+ locale_to_files_map[locale].add(f)
+
+ all_locales = set(locale_to_files_map)
+
+ # Set A: wanted locales, either all of them or the
+ # list provided by --locale-whitelist.
+ wanted_locales = all_locales
+ if options.locale_whitelist:
+ wanted_locales = _ToAndroidLocales(options.locale_whitelist,
+ options.support_zh_hk)
+
+ # Set B: shared resources locales, which is either set A
+ # or the list provided by --shared-resources-whitelist-locales
+ shared_resources_locales = wanted_locales
+ shared_names_whitelist = set()
+ if options.shared_resources_whitelist_locales:
+ shared_names_whitelist = set(
+ resource_utils.GetRTxtStringResourceNames(
+ options.shared_resources_whitelist))
+
+ shared_resources_locales = _ToAndroidLocales(
+ options.shared_resources_whitelist_locales, options.support_zh_hk)
+
+ # Remove any file that belongs to a locale not covered by
+ # either A or B.
+ removable_locales = (all_locales - wanted_locales - shared_resources_locales)
+ for locale in removable_locales:
+ for path in locale_to_files_map[locale]:
+ os.remove(path)
+
+ # For any locale in B but not in A, only keep the shared
+ # resource strings in each file.
+ for locale in shared_resources_locales - wanted_locales:
+ for path in locale_to_files_map[locale]:
+ resource_utils.FilterAndroidResourceStringsXml(
+ path, lambda x: x in shared_names_whitelist)
+
+ # For any locale in A but not in B, only keep the strings
+ # that are _not_ from shared resources in the file.
+ for locale in wanted_locales - shared_resources_locales:
+ for path in locale_to_files_map[locale]:
+ resource_utils.FilterAndroidResourceStringsXml(
+ path, lambda x: x not in shared_names_whitelist)
+
+
+def _PackageApk(options, dep_subdirs, temp_dir, gen_dir, r_txt_path):
+ """Compile resources with aapt2 and generate intermediate .ap_ file.
+
+ Args:
+ options: The command-line options tuple. E.g. the generated apk
+ will be written to |options.apk_path|.
+ dep_subdirs: The list of directories where dependency resource zips
+ were extracted (its content will be altered by this function).
+ temp_dir: A temporary directory.
+ gen_dir: Another temp directory where some intermediate files are
+ generated.
+ r_txt_path: The path where the R.txt file will written to.
+ """
+ renamed_paths = dict()
+ renamed_paths.update(_DuplicateZhResources(dep_subdirs))
+ renamed_paths.update(_RenameLocaleResourceDirs(dep_subdirs))
+
+ _RemoveUnwantedLocalizedStrings(dep_subdirs, options)
+
+ # Create a function that selects which resource files should be packaged
+ # into the final output. Any file that does not pass the predicate will
+ # be removed below.
+ keep_predicate = _CreateKeepPredicate(dep_subdirs,
+ options.resource_blacklist_regex,
+ options.resource_blacklist_exceptions)
+ png_paths = []
+ for directory in dep_subdirs:
+ for f in _IterFiles(directory):
+ if not keep_predicate(f):
+ os.remove(f)
+ elif f.endswith('.png'):
+ png_paths.append((f, directory))
+ if png_paths and options.png_to_webp:
+ renamed_paths.update(_ConvertToWebP(options.webp_binary, png_paths))
+ for directory in dep_subdirs:
+ renamed_paths.update(_MoveImagesToNonMdpiFolders(directory))
+
+ link_command = _CreateLinkApkArgs(options)
+ # TODO(digit): Is this below actually required for R.txt generation?
+ link_command += ['--java', gen_dir]
+
+ fixed_manifest, orig_package = _FixManifest(options, temp_dir)
+ link_command += [
+ '--manifest', fixed_manifest, '--rename-manifest-package', orig_package
+ ]
+
+ partials = _CompileDeps(options.aapt2_path, dep_subdirs, temp_dir)
+ for partial in partials:
+ link_command += ['-R', partial]
+
+ # Creates a .zip with AndroidManifest.xml, resources.arsc, res/*
+ # Also creates R.txt
+ with build_utils.AtomicOutput(options.apk_path) as unoptimized, \
+ build_utils.AtomicOutput(r_txt_path) as r_txt, \
+ _MaybeCreateStableIdsFile(options) as stable_ids:
+ if stable_ids:
+ link_command += ['--stable-ids', stable_ids.name]
+ link_command += ['-o', unoptimized.name]
+ link_command += ['--output-text-symbols', r_txt.name]
+ build_utils.CheckOutput(
+ link_command, print_stdout=False, print_stderr=False)
+
+ if options.optimized_resources_path:
+ with build_utils.AtomicOutput(options.optimized_resources_path) as opt:
+ _OptimizeApk(opt.name, options, temp_dir, unoptimized.name, r_txt.name)
+
+ _CreateResourceInfoFile(
+ renamed_paths, options.apk_info_path, options.dependencies_res_zips)
+
+
+def _OptimizeApk(output, options, temp_dir, unoptimized_apk_path, r_txt_path):
+ """Optimize intermediate .ap_ file with aapt2.
+
+ Args:
+ output: Path to write to.
+ options: The command-line options.
+ temp_dir: A temporary directory.
+ unoptimized_apk_path: path of the apk to optimize.
+ r_txt_path: path to the R.txt file of the unoptimized apk.
+ """
+ # Resources of type ID are references to UI elements/views. They are used by
+ # UI automation testing frameworks. They are kept in so that they dont break
+ # tests, even though they may not actually be used during runtime. See
+ # https://crbug.com/900993
+ id_resources = _ExtractIdResources(r_txt_path)
+ gen_config_path = os.path.join(temp_dir, 'aapt2.config')
+ if options.resources_config_path:
+ shutil.copyfile(options.resources_config_path, gen_config_path)
+ with open(gen_config_path, 'a+') as config:
+ for resource in id_resources:
+ config.write('{}#no_obfuscate\n'.format(resource))
+
+ # Optimize the resources.arsc file by obfuscating resource names and only
+ # allow usage via R.java constant.
+ optimize_command = [
+ options.aapt2_path,
+ 'optimize',
+ '--enable-resource-obfuscation',
+ '-o',
+ output,
+ '--resources-config-path',
+ gen_config_path,
+ unoptimized_apk_path,
+ ]
+ build_utils.CheckOutput(
+ optimize_command, print_stdout=False, print_stderr=False)
+
+
+def _ExtractIdResources(rtxt_path):
+ """Extract resources of type ID from the R.txt file
+
+ Args:
+ rtxt_path: Path to R.txt file with all the resources
+ Returns:
+ List of id resources in the form of id/<resource_name>
+ """
+ id_resources = []
+ with open(rtxt_path) as rtxt:
+ for line in rtxt:
+ if ' id ' in line:
+ resource_name = line.split()[2]
+ id_resources.append('id/{}'.format(resource_name))
+ return id_resources
+
+
+@contextlib.contextmanager
+def _MaybeCreateStableIdsFile(options):
+ """Transforms a file generated by --emit-ids from another package.
+
+ --stable-ids is generally meant to be used by different versions of the same
+ package. To make it work for other packages, we need to transform the package
+ name references to match the package that resources are being generated for.
+
+ Note: This will fail if the package ID of the resources in
+ |options.use_resource_ids_path| does not match the package ID of the
+ resources being linked.
+
+ Args:
+ options: The command-line options
+ Yields:
+ Path to the transformed resource IDs file (lines formatted like
+ package:type/name = 0xPPTTEEEE) or None
+ """
+ if options.use_resource_ids_path:
+ package_name = options.package_name
+ if not package_name:
+ package_name = resource_utils.ExtractPackageFromManifest(
+ options.android_manifest)
+ with open(options.use_resource_ids_path) as stable_ids_file:
+ with tempfile.NamedTemporaryFile() as output_ids_file:
+ output_stable_ids = re.sub(
+ r'^.*?:',
+ package_name + ':',
+ stable_ids_file.read(),
+ flags=re.MULTILINE)
+ output_ids_file.write(output_stable_ids)
+ output_ids_file.flush()
+ yield output_ids_file
+ else:
+ yield None
+
+
+def _WriteFinalRTxtFile(options, aapt_r_txt_path):
+ """Determine final R.txt and return its location.
+
+ This handles --r-text-in and --r-text-out options at the same time.
+
+ Args:
+ options: The command-line options tuple.
+ aapt_r_txt_path: The path to the R.txt generated by aapt.
+ Returns:
+ Path to the final R.txt file.
+ """
+ if options.r_text_in:
+ r_txt_file = options.r_text_in
+ else:
+ # When an empty res/ directory is passed, aapt does not write an R.txt.
+ r_txt_file = aapt_r_txt_path
+ if not os.path.exists(r_txt_file):
+ build_utils.Touch(r_txt_file)
+
+ if options.r_text_out:
+ shutil.copyfile(r_txt_file, options.r_text_out)
+
+ return r_txt_file
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseArgs(args)
+
+ debug_temp_resources_dir = os.environ.get(_ENV_DEBUG_VARIABLE)
+ if debug_temp_resources_dir:
+ debug_temp_resources_dir = os.path.join(debug_temp_resources_dir,
+ os.path.basename(options.apk_path))
+ build_utils.DeleteDirectory(debug_temp_resources_dir)
+ build_utils.MakeDirectory(debug_temp_resources_dir)
+
+ with resource_utils.BuildContext(debug_temp_resources_dir) as build:
+ dep_subdirs = resource_utils.ExtractDeps(options.dependencies_res_zips,
+ build.deps_dir)
+
+ _PackageApk(options, dep_subdirs, build.temp_dir, build.gen_dir,
+ build.r_txt_path)
+
+ r_txt_path = _WriteFinalRTxtFile(options, build.r_txt_path)
+
+ # If --shared-resources-whitelist is used, the all resources listed in
+ # the corresponding R.txt file will be non-final, and an onResourcesLoaded()
+ # will be generated to adjust them at runtime.
+ #
+ # Otherwise, if --shared-resources is used, the all resources will be
+ # non-final, and an onResourcesLoaded() method will be generated too.
+ #
+ # Otherwise, all resources will be final, and no method will be generated.
+ #
+ rjava_build_options = resource_utils.RJavaBuildOptions()
+ if options.shared_resources_whitelist:
+ rjava_build_options.ExportSomeResources(
+ options.shared_resources_whitelist)
+ rjava_build_options.GenerateOnResourcesLoaded()
+ elif options.shared_resources or options.app_as_shared_lib:
+ rjava_build_options.ExportAllResources()
+ rjava_build_options.GenerateOnResourcesLoaded()
+
+ resource_utils.CreateRJavaFiles(
+ build.srcjar_dir, None, r_txt_path, options.extra_res_packages,
+ options.extra_r_text_files, rjava_build_options)
+
+ if options.srcjar_out:
+ build_utils.ZipDir(options.srcjar_out, build.srcjar_dir)
+
+ # Sanity check that the created resources have the expected package ID.
+ expected_id = _PackageIdFromOptions(options)
+ if expected_id is None:
+ expected_id = '0x00' if options.shared_resources else '0x7f'
+ expected_id = int(expected_id, 16)
+ _, package_id = resource_utils.ExtractArscPackage(options.aapt2_path,
+ options.apk_path)
+ if package_id != expected_id:
+ raise Exception(
+ 'Invalid package ID 0x%x (expected 0x%x)' % (package_id, expected_id))
+
+ if options.depfile:
+ build_utils.WriteDepfile(
+ options.depfile,
+ options.apk_path,
+ inputs=options.dependencies_res_zips + options.extra_r_text_files,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/compile_resources.pydeps b/deps/v8/build/android/gyp/compile_resources.pydeps
new file mode 100644
index 0000000000..2ffcb52a2c
--- /dev/null
+++ b/deps/v8/build/android/gyp/compile_resources.pydeps
@@ -0,0 +1,29 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_resources.pydeps build/android/gyp/compile_resources.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+compile_resources.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/resource_utils.py
diff --git a/deps/v8/build/android/gyp/copy_ex.py b/deps/v8/build/android/gyp/copy_ex.py
new file mode 100755
index 0000000000..48d1b26df1
--- /dev/null
+++ b/deps/v8/build/android/gyp/copy_ex.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies files to a directory."""
+
+import filecmp
+import itertools
+import optparse
+import os
+import shutil
+import sys
+
+from util import build_utils
+
+
+def _get_all_files(base):
+ """Returns a list of all the files in |base|. Each entry is relative to the
+ last path entry of |base|."""
+ result = []
+ dirname = os.path.dirname(base)
+ for root, _, files in os.walk(base):
+ result.extend([os.path.join(root[len(dirname):], f) for f in files])
+ return result
+
+def CopyFile(f, dest, deps):
+ """Copy file or directory and update deps."""
+ if os.path.isdir(f):
+ shutil.copytree(f, os.path.join(dest, os.path.basename(f)))
+ deps.extend(_get_all_files(f))
+ else:
+ if os.path.isfile(os.path.join(dest, os.path.basename(f))):
+ dest = os.path.join(dest, os.path.basename(f))
+
+ deps.append(f)
+
+ if os.path.isfile(dest):
+ if filecmp.cmp(dest, f, shallow=False):
+ return
+ # The shutil.copy() below would fail if the file does not have write
+ # permissions. Deleting the file has similar costs to modifying the
+ # permissions.
+ os.unlink(dest)
+
+ shutil.copy(f, dest)
+
+def DoCopy(options, deps):
+ """Copy files or directories given in options.files and update deps."""
+ files = list(itertools.chain.from_iterable(build_utils.ParseGnList(f)
+ for f in options.files))
+
+ for f in files:
+ if os.path.isdir(f) and not options.clear:
+ print ('To avoid stale files you must use --clear when copying '
+ 'directories')
+ sys.exit(-1)
+ CopyFile(f, options.dest, deps)
+
+def DoRenaming(options, deps):
+ """Copy and rename files given in options.renaming_sources and update deps."""
+ src_files = list(itertools.chain.from_iterable(
+ build_utils.ParseGnList(f)
+ for f in options.renaming_sources))
+
+ dest_files = list(itertools.chain.from_iterable(
+ build_utils.ParseGnList(f)
+ for f in options.renaming_destinations))
+
+ if (len(src_files) != len(dest_files)):
+ print('Renaming source and destination files not match.')
+ sys.exit(-1)
+
+ for src, dest in itertools.izip(src_files, dest_files):
+ if os.path.isdir(src):
+ print ('renaming diretory is not supported.')
+ sys.exit(-1)
+ else:
+ CopyFile(src, os.path.join(options.dest, dest), deps)
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_option('--dest', help='Directory to copy files to.')
+ parser.add_option('--files', action='append',
+ help='List of files to copy.')
+ parser.add_option('--clear', action='store_true',
+ help='If set, the destination directory will be deleted '
+ 'before copying files to it. This is highly recommended to '
+ 'ensure that no stale files are left in the directory.')
+ parser.add_option('--stamp', help='Path to touch on success.')
+ parser.add_option('--renaming-sources',
+ action='append',
+ help='List of files need to be renamed while being '
+ 'copied to dest directory')
+ parser.add_option('--renaming-destinations',
+ action='append',
+ help='List of destination file name without path, the '
+ 'number of elements must match rename-sources.')
+
+ options, _ = parser.parse_args(args)
+
+ if options.clear:
+ build_utils.DeleteDirectory(options.dest)
+ build_utils.MakeDirectory(options.dest)
+
+ deps = []
+
+ if options.files:
+ DoCopy(options, deps)
+
+ if options.renaming_sources:
+ DoRenaming(options, deps)
+
+ if options.depfile:
+ build_utils.WriteDepfile(
+ options.depfile, options.stamp, deps, add_pydeps=False)
+
+ if options.stamp:
+ build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/copy_ex.pydeps b/deps/v8/build/android/gyp/copy_ex.pydeps
new file mode 100644
index 0000000000..e0fb31eaa9
--- /dev/null
+++ b/deps/v8/build/android/gyp/copy_ex.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/copy_ex.pydeps build/android/gyp/copy_ex.py
+../../gn_helpers.py
+copy_ex.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/create_apk_operations_script.py b/deps/v8/build/android/gyp/create_apk_operations_script.py
new file mode 100755
index 0000000000..cd2722f9c0
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_apk_operations_script.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import argparse
+import os
+import string
+import sys
+
+
+SCRIPT_TEMPLATE = string.Template("""\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_apk_operations_script.py
+
+import os
+import sys
+
+def main():
+ script_directory = os.path.dirname(__file__)
+ resolve = lambda p: p if p is None else os.path.abspath(os.path.join(
+ script_directory, p))
+ sys.path.append(resolve(${APK_OPERATIONS_DIR}))
+ import apk_operations
+ output_dir = resolve(${OUTPUT_DIR})
+ try:
+ apk_operations.Run(
+ output_dir,
+ resolve(${APK_PATH}),
+ resolve(${INC_JSON_PATH}),
+ ${FLAGS_FILE},
+ ${TARGET_CPU},
+ resolve(${MAPPING_PATH}))
+ except TypeError:
+ rel_output_dir = os.path.relpath(output_dir)
+ rel_script_path = os.path.relpath(sys.argv[0], output_dir)
+ sys.stderr.write('Script out-of-date. Rebuild via:\\n')
+ sys.stderr.write(' ninja -C %s %s\\n' % (rel_output_dir, rel_script_path))
+ return 1
+
+
+if __name__ == '__main__':
+ sys.exit(main())
+""")
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--script-output-path',
+ help='Output path for executable script.')
+ parser.add_argument('--apk-path')
+ parser.add_argument('--incremental-install-json-path')
+ parser.add_argument('--command-line-flags-file')
+ parser.add_argument('--target-cpu')
+ parser.add_argument('--proguard-mapping-path')
+ args = parser.parse_args(args)
+
+ def relativize(path):
+ """Returns the path relative to the output script directory."""
+ if path is None:
+ return path
+ return os.path.relpath(path, os.path.dirname(args.script_output_path))
+ apk_operations_dir = os.path.join(os.path.dirname(__file__), os.path.pardir)
+ apk_operations_dir = relativize(apk_operations_dir)
+
+ with open(args.script_output_path, 'w') as script:
+ script_dict = {
+ 'APK_OPERATIONS_DIR': repr(apk_operations_dir),
+ 'OUTPUT_DIR': repr(relativize('.')),
+ 'APK_PATH': repr(relativize(args.apk_path)),
+ 'INC_JSON_PATH': repr(relativize(args.incremental_install_json_path)),
+ 'MAPPING_PATH': repr(relativize(args.proguard_mapping_path)),
+ 'FLAGS_FILE': repr(args.command_line_flags_file),
+ 'TARGET_CPU': repr(args.target_cpu),
+ }
+ script.write(SCRIPT_TEMPLATE.substitute(script_dict))
+ os.chmod(args.script_output_path, 0750)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/create_apk_operations_script.pydeps b/deps/v8/build/android/gyp/create_apk_operations_script.pydeps
new file mode 100644
index 0000000000..9d4dcb8fe5
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_apk_operations_script.pydeps
@@ -0,0 +1,3 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_apk_operations_script.pydeps build/android/gyp/create_apk_operations_script.py
+create_apk_operations_script.py
diff --git a/deps/v8/build/android/gyp/create_app_bundle.py b/deps/v8/build/android/gyp/create_app_bundle.py
new file mode 100755
index 0000000000..9666feb3fe
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_app_bundle.py
@@ -0,0 +1,377 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create an Android application bundle from one or more bundle modules."""
+
+import argparse
+import itertools
+import json
+import os
+import shutil
+import sys
+import tempfile
+import zipfile
+
+# NOTE: Keep this consistent with the _create_app_bundle_py_imports definition
+# in build/config/android/rules.py
+from util import build_utils
+from util import resource_utils
+
+import bundletool
+
+# Location of language-based assets in bundle modules.
+_LOCALES_SUBDIR = 'assets/locales/'
+
+# The fallback locale should always have its .pak file included in
+# the base apk, i.e. not use language-based asset targetting. This ensures
+# that Chrome won't crash on startup if its bundle is installed on a device
+# with an unsupported system locale (e.g. fur-rIT).
+_FALLBACK_LOCALE = 'en-US'
+
+# List of split dimensions recognized by this tool.
+_ALL_SPLIT_DIMENSIONS = [ 'ABI', 'SCREEN_DENSITY', 'LANGUAGE' ]
+
+# Due to historical reasons, certain languages identified by Chromium with a
+# 3-letters ISO 639-2 code, are mapped to a nearly equivalent 2-letters
+# ISO 639-1 code instead (due to the fact that older Android releases only
+# supported the latter when matching resources).
+#
+# the same conversion as for Java resources.
+_SHORTEN_LANGUAGE_CODE_MAP = {
+ 'fil': 'tl', # Filipino to Tagalog.
+}
+
+# A list of extensions corresponding to files that should never be compressed
+# in the bundle. This used to be handled by bundletool automatically until
+# release 0.8.0, which required that this be passed to the BundleConfig
+# file instead.
+#
+# This is the original list, which was taken from aapt2, with 'webp' added to
+# it (which curiously was missing from the list).
+_UNCOMPRESSED_FILE_EXTS = [
+ '3g2', '3gp', '3gpp', '3gpp2', 'aac', 'amr', 'awb', 'git', 'imy', 'jet',
+ 'jpeg', 'jpg', 'm4a', 'm4v', 'mid', 'midi', 'mkv', 'mp2', 'mp3', 'mp4',
+ 'mpeg', 'mpg', 'ogg', 'png', 'rtttl', 'smf', 'wav', 'webm', 'webp', 'wmv',
+ 'xmf'
+]
+
+
+def _ParseArgs(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--out-bundle', required=True,
+ help='Output bundle zip archive.')
+ parser.add_argument('--module-zips', required=True,
+ help='GN-list of module zip archives.')
+ parser.add_argument(
+ '--rtxt-in-paths', action='append', help='GN-list of module R.txt files.')
+ parser.add_argument(
+ '--rtxt-out-path', help='Path to combined R.txt file for bundle.')
+ parser.add_argument('--uncompressed-assets', action='append',
+ help='GN-list of uncompressed assets.')
+ parser.add_argument(
+ '--compress-shared-libraries',
+ action='store_true',
+ help='Whether to store native libraries compressed.')
+ parser.add_argument('--split-dimensions',
+ help="GN-list of split dimensions to support.")
+ parser.add_argument(
+ '--base-module-rtxt-path',
+ help='Optional path to the base module\'s R.txt file, only used with '
+ 'language split dimension.')
+ parser.add_argument(
+ '--base-whitelist-rtxt-path',
+ help='Optional path to an R.txt file, string resources '
+ 'listed there _and_ in --base-module-rtxt-path will '
+ 'be kept in the base bundle module, even if language'
+ ' splitting is enabled.')
+
+ parser.add_argument('--keystore-path', help='Keystore path')
+ parser.add_argument('--keystore-password', help='Keystore password')
+ parser.add_argument('--key-name', help='Keystore key name')
+
+ options = parser.parse_args(args)
+ options.module_zips = build_utils.ParseGnList(options.module_zips)
+ options.rtxt_in_paths = build_utils.ExpandFileArgs(options.rtxt_in_paths)
+
+ if len(options.module_zips) == 0:
+ raise Exception('The module zip list cannot be empty.')
+
+ # Signing is optional, but all --keyXX parameters should be set.
+ if options.keystore_path or options.keystore_password or options.key_name:
+ if not options.keystore_path or not options.keystore_password or \
+ not options.key_name:
+ raise Exception('When signing the bundle, use --keystore-path, '
+ '--keystore-password and --key-name.')
+
+ # Merge all uncompressed assets into a set.
+ uncompressed_list = []
+ if options.uncompressed_assets:
+ for l in options.uncompressed_assets:
+ for entry in build_utils.ParseGnList(l):
+ # Each entry has the following format: 'zipPath' or 'srcPath:zipPath'
+ pos = entry.find(':')
+ if pos >= 0:
+ uncompressed_list.append(entry[pos + 1:])
+ else:
+ uncompressed_list.append(entry)
+
+ options.uncompressed_assets = set(uncompressed_list)
+
+ # Check that all split dimensions are valid
+ if options.split_dimensions:
+ options.split_dimensions = build_utils.ParseGnList(options.split_dimensions)
+ for dim in options.split_dimensions:
+ if dim.upper() not in _ALL_SPLIT_DIMENSIONS:
+ parser.error('Invalid split dimension "%s" (expected one of: %s)' % (
+ dim, ', '.join(x.lower() for x in _ALL_SPLIT_DIMENSIONS)))
+
+ # As a special case, --base-whitelist-rtxt-path can be empty to indicate
+ # that the module doesn't need such a whitelist. That's because it is easier
+ # to check this condition here than through GN rules :-(
+ if options.base_whitelist_rtxt_path == '':
+ options.base_module_rtxt_path = None
+
+ # Check --base-module-rtxt-path and --base-whitelist-rtxt-path usage.
+ if options.base_module_rtxt_path:
+ if not options.base_whitelist_rtxt_path:
+ parser.error(
+ '--base-module-rtxt-path requires --base-whitelist-rtxt-path')
+ if 'language' not in options.split_dimensions:
+ parser.error('--base-module-rtxt-path is only valid with '
+ 'language-based splits.')
+
+ return options
+
+
+def _MakeSplitDimension(value, enabled):
+ """Return dict modelling a BundleConfig splitDimension entry."""
+ return {'value': value, 'negate': not enabled}
+
+
+def _GenerateBundleConfigJson(uncompressed_assets, compress_shared_libraries,
+ split_dimensions, base_master_resource_ids):
+ """Generate a dictionary that can be written to a JSON BuildConfig.
+
+ Args:
+ uncompressed_assets: A list or set of file paths under assets/ that always
+ be stored uncompressed.
+ compress_shared_libraries: Boolean, whether to compress native libs.
+ split_dimensions: list of split dimensions.
+ base_master_resource_ids: Optional list of 32-bit resource IDs to keep
+ inside the base module, even when split dimensions are enabled.
+ Returns:
+ A dictionary that can be written as a json file.
+ """
+ # Compute splitsConfig list. Each item is a dictionary that can have
+ # the following keys:
+ # 'value': One of ['LANGUAGE', 'DENSITY', 'ABI']
+ # 'negate': Boolean, True to indicate that the bundle should *not* be
+ # split (unused at the moment by this script).
+
+ split_dimensions = [ _MakeSplitDimension(dim, dim in split_dimensions)
+ for dim in _ALL_SPLIT_DIMENSIONS ]
+
+ # Native libraries loaded by the crazy linker.
+ # Whether other .so files are compressed is controlled by
+ # "uncompressNativeLibraries".
+ uncompressed_globs = ['lib/*/crazy.*']
+ # Locale-specific pak files stored in bundle splits need not be compressed.
+ uncompressed_globs.extend(
+ ['assets/locales#lang_*/*.pak', 'assets/fallback-locales/*.pak'])
+ uncompressed_globs.extend('assets/' + x for x in uncompressed_assets)
+ # NOTE: Use '**' instead of '*' to work through directories!
+ uncompressed_globs.extend('**.' + ext for ext in _UNCOMPRESSED_FILE_EXTS)
+
+ data = {
+ 'optimizations': {
+ 'splitsConfig': {
+ 'splitDimension': split_dimensions,
+ },
+ 'uncompressNativeLibraries': {
+ 'enabled': not compress_shared_libraries,
+ },
+ },
+ 'compression': {
+ 'uncompressedGlob': sorted(uncompressed_globs),
+ },
+ }
+
+ if base_master_resource_ids:
+ data['master_resources'] = {
+ 'resource_ids': list(base_master_resource_ids),
+ }
+
+ return json.dumps(data, indent=2)
+
+
+def _RewriteLanguageAssetPath(src_path):
+ """Rewrite the destination path of a locale asset for language-based splits.
+
+ Should only be used when generating bundles with language-based splits.
+ This will rewrite paths that look like locales/<locale>.pak into
+ locales#<language>/<locale>.pak, where <language> is the language code
+ from the locale.
+
+ Returns new path.
+ """
+ if not src_path.startswith(_LOCALES_SUBDIR) or not src_path.endswith('.pak'):
+ return [src_path]
+
+ locale = src_path[len(_LOCALES_SUBDIR):-4]
+ android_locale = resource_utils.ToAndroidLocaleName(locale)
+
+ # The locale format is <lang>-<region> or <lang>. Extract the language.
+ pos = android_locale.find('-')
+ if pos >= 0:
+ android_language = android_locale[:pos]
+ else:
+ android_language = android_locale
+
+ if locale == _FALLBACK_LOCALE:
+ # Fallback locale .pak files must be placed in a different directory
+ # to ensure they are always stored in the base module.
+ result_path = 'assets/fallback-locales/%s.pak' % locale
+ else:
+ # Other language .pak files go into a language-specific asset directory
+ # that bundletool will store in separate split APKs.
+ result_path = 'assets/locales#lang_%s/%s.pak' % (android_language, locale)
+
+ return result_path
+
+
+def _SplitModuleForAssetTargeting(src_module_zip, tmp_dir, split_dimensions):
+ """Splits assets in a module if needed.
+
+ Args:
+ src_module_zip: input zip module path.
+ tmp_dir: Path to temporary directory, where the new output module might
+ be written to.
+ split_dimensions: list of split dimensions.
+
+ Returns:
+ If the module doesn't need asset targeting, doesn't do anything and
+ returns src_module_zip. Otherwise, create a new module zip archive under
+ tmp_dir with the same file name, but which contains assets paths targeting
+ the proper dimensions.
+ """
+ split_language = 'LANGUAGE' in split_dimensions
+ if not split_language:
+ # Nothing to target, so return original module path.
+ return src_module_zip
+
+ with zipfile.ZipFile(src_module_zip, 'r') as src_zip:
+ language_files = [
+ f for f in src_zip.namelist() if f.startswith(_LOCALES_SUBDIR)]
+
+ if not language_files:
+ # Not language-based assets to split in this module.
+ return src_module_zip
+
+ tmp_zip = os.path.join(tmp_dir, os.path.basename(src_module_zip))
+ with zipfile.ZipFile(tmp_zip, 'w') as dst_zip:
+ for info in src_zip.infolist():
+ src_path = info.filename
+ is_compressed = info.compress_type != zipfile.ZIP_STORED
+
+ dst_path = src_path
+ if src_path in language_files:
+ dst_path = _RewriteLanguageAssetPath(src_path)
+
+ build_utils.AddToZipHermetic(
+ dst_zip,
+ dst_path,
+ data=src_zip.read(src_path),
+ compress=is_compressed)
+
+ return tmp_zip
+
+
+def _GenerateBaseResourcesWhitelist(base_module_rtxt_path,
+ base_whitelist_rtxt_path):
+ """Generate a whitelist of base master resource ids.
+
+ Args:
+ base_module_rtxt_path: Path to base module R.txt file.
+ base_whitelist_rtxt_path: Path to base whitelist R.txt file.
+ Returns:
+ list of resource ids.
+ """
+ ids_map = resource_utils.GenerateStringResourcesWhitelist(
+ base_module_rtxt_path, base_whitelist_rtxt_path)
+ return ids_map.keys()
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseArgs(args)
+
+ split_dimensions = []
+ if options.split_dimensions:
+ split_dimensions = [x.upper() for x in options.split_dimensions]
+
+
+ with build_utils.TempDir() as tmp_dir:
+ module_zips = [
+ _SplitModuleForAssetTargeting(module, tmp_dir, split_dimensions) \
+ for module in options.module_zips]
+
+ base_master_resource_ids = None
+ if options.base_module_rtxt_path:
+ base_master_resource_ids = _GenerateBaseResourcesWhitelist(
+ options.base_module_rtxt_path, options.base_whitelist_rtxt_path)
+
+ bundle_config = _GenerateBundleConfigJson(
+ options.uncompressed_assets, options.compress_shared_libraries,
+ split_dimensions, base_master_resource_ids)
+
+ tmp_bundle = os.path.join(tmp_dir, 'tmp_bundle')
+
+ tmp_unsigned_bundle = tmp_bundle
+ if options.keystore_path:
+ tmp_unsigned_bundle = tmp_bundle + '.unsigned'
+
+ # Important: bundletool requires that the bundle config file is
+ # named with a .pb.json extension.
+ tmp_bundle_config = tmp_bundle + '.BundleConfig.pb.json'
+
+ with open(tmp_bundle_config, 'w') as f:
+ f.write(bundle_config)
+
+ cmd_args = ['java', '-jar', bundletool.BUNDLETOOL_JAR_PATH, 'build-bundle']
+ cmd_args += ['--modules=%s' % ','.join(module_zips)]
+ cmd_args += ['--output=%s' % tmp_unsigned_bundle]
+ cmd_args += ['--config=%s' % tmp_bundle_config]
+
+ build_utils.CheckOutput(cmd_args, print_stdout=True, print_stderr=True)
+
+ if options.keystore_path:
+ # NOTE: As stated by the public documentation, apksigner cannot be used
+ # to sign the bundle (because it rejects anything that isn't an APK).
+ # The signature and digest algorithm selection come from the internal
+ # App Bundle documentation. There is no corresponding public doc :-(
+ signing_cmd_args = [
+ 'jarsigner', '-sigalg', 'SHA256withRSA', '-digestalg', 'SHA-256',
+ '-keystore', 'file:' + options.keystore_path,
+ '-storepass' , options.keystore_password,
+ '-signedjar', tmp_bundle,
+ tmp_unsigned_bundle,
+ options.key_name,
+ ]
+ build_utils.CheckOutput(signing_cmd_args, print_stderr=True)
+
+ shutil.move(tmp_bundle, options.out_bundle)
+
+ if options.rtxt_out_path:
+ with open(options.rtxt_out_path, 'w') as rtxt_out:
+ for rtxt_in_path in options.rtxt_in_paths:
+ with open(rtxt_in_path, 'r') as rtxt_in:
+ rtxt_out.write('-- Contents of {}\n'.format(
+ os.path.basename(rtxt_in_path)))
+ rtxt_out.write(rtxt_in.read())
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/create_app_bundle.pydeps b/deps/v8/build/android/gyp/create_app_bundle.pydeps
new file mode 100644
index 0000000000..fef04fab53
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_app_bundle.pydeps
@@ -0,0 +1,30 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle.pydeps build/android/gyp/create_app_bundle.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+bundletool.py
+create_app_bundle.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/resource_utils.py
diff --git a/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.py b/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.py
new file mode 100755
index 0000000000..f01691e418
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates an .apks from an .aab with only English strings."""
+
+import argparse
+import os
+import sys
+
+sys.path.append(
+ os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from pylib.utils import app_bundle_utils
+
+
+def main():
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument(
+ '--bundle', required=True, help='Path to input .aab file.')
+ parser.add_argument(
+ '--output', required=True, help='Path to output .apks file.')
+ parser.add_argument('--aapt2-path', required=True, help='Path to aapt2.')
+ parser.add_argument(
+ '--keystore-path', required=True, help='Path to keystore.')
+ parser.add_argument(
+ '--keystore-password', required=True, help='Keystore password.')
+ parser.add_argument(
+ '--keystore-name', required=True, help='Key name within keystore')
+
+ args = parser.parse_args()
+
+ app_bundle_utils.GenerateBundleApks(
+ args.bundle,
+ args.output,
+ args.aapt2_path,
+ args.keystore_path,
+ args.keystore_password,
+ args.keystore_name,
+ minimal=True,
+ check_for_noop=False)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.pydeps b/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.pydeps
new file mode 100644
index 0000000000..cd5b08158f
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.pydeps
@@ -0,0 +1,33 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle_minimal_apks.pydeps build/android/gyp/create_app_bundle_minimal_apks.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../pylib/__init__.py
+../pylib/utils/__init__.py
+../pylib/utils/app_bundle_utils.py
+bundletool.py
+create_app_bundle_minimal_apks.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/resource_utils.py
diff --git a/deps/v8/build/android/gyp/create_bundle_wrapper_script.py b/deps/v8/build/android/gyp/create_bundle_wrapper_script.py
new file mode 100755
index 0000000000..a1a34fe77c
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_bundle_wrapper_script.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create a wrapper script to manage an Android App Bundle."""
+
+import argparse
+import os
+import string
+import sys
+
+# Import apk_operations even though this script doesn't use it so that
+# targets that depend on the wrapper scripts will rebuild when apk_operations
+# or its deps change.
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), os.pardir))
+import apk_operations # pylint: disable=unused-import
+from util import build_utils
+
+SCRIPT_TEMPLATE = string.Template("""\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_bundle_wrapper_script.py
+
+import os
+import sys
+
+def main():
+ script_directory = os.path.dirname(__file__)
+ resolve = lambda p: p if p is None else os.path.abspath(os.path.join(
+ script_directory, p))
+ sys.path.append(resolve(${WRAPPED_SCRIPT_DIR}))
+ import apk_operations
+
+ apk_operations.RunForBundle(output_directory=resolve(${OUTPUT_DIR}),
+ bundle_path=resolve(${BUNDLE_PATH}),
+ bundle_apks_path=resolve(${BUNDLE_APKS_PATH}),
+ aapt2_path=resolve(${AAPT2_PATH}),
+ keystore_path=resolve(${KEYSTORE_PATH}),
+ keystore_password=${KEYSTORE_PASSWORD},
+ keystore_alias=${KEY_NAME},
+ package_name=${PACKAGE_NAME},
+ command_line_flags_file=${FLAGS_FILE},
+ proguard_mapping_path=resolve(${MAPPING_PATH}),
+ target_cpu=${TARGET_CPU},
+ system_image_locales=${SYSTEM_IMAGE_LOCALES})
+
+if __name__ == '__main__':
+ sys.exit(main())
+""")
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--script-output-path', required=True,
+ help='Output path for executable script.')
+ parser.add_argument('--bundle-path', required=True)
+ parser.add_argument('--bundle-apks-path', required=True)
+ parser.add_argument('--package-name', required=True)
+ parser.add_argument('--aapt2-path', required=True)
+ parser.add_argument('--keystore-path', required=True)
+ parser.add_argument('--keystore-password', required=True)
+ parser.add_argument('--key-name', required=True)
+ parser.add_argument('--command-line-flags-file')
+ parser.add_argument('--proguard-mapping-path')
+ parser.add_argument('--target-cpu')
+ parser.add_argument('--system-image-locales')
+ args = parser.parse_args(args)
+
+ def relativize(path):
+ """Returns the path relative to the output script directory."""
+ if path is None:
+ return path
+ return os.path.relpath(path, os.path.dirname(args.script_output_path))
+
+ wrapped_script_dir = os.path.join(os.path.dirname(__file__), os.path.pardir)
+ wrapped_script_dir = relativize(wrapped_script_dir)
+
+ with open(args.script_output_path, 'w') as script:
+ script_dict = {
+ 'WRAPPED_SCRIPT_DIR':
+ repr(wrapped_script_dir),
+ 'OUTPUT_DIR':
+ repr(relativize('.')),
+ 'BUNDLE_PATH':
+ repr(relativize(args.bundle_path)),
+ 'BUNDLE_APKS_PATH':
+ repr(relativize(args.bundle_apks_path)),
+ 'PACKAGE_NAME':
+ repr(args.package_name),
+ 'AAPT2_PATH':
+ repr(relativize(args.aapt2_path)),
+ 'KEYSTORE_PATH':
+ repr(relativize(args.keystore_path)),
+ 'KEYSTORE_PASSWORD':
+ repr(args.keystore_password),
+ 'KEY_NAME':
+ repr(args.key_name),
+ 'MAPPING_PATH':
+ repr(relativize(args.proguard_mapping_path)),
+ 'FLAGS_FILE':
+ repr(args.command_line_flags_file),
+ 'TARGET_CPU':
+ repr(args.target_cpu),
+ 'SYSTEM_IMAGE_LOCALES':
+ repr(build_utils.ParseGnList(args.system_image_locales)),
+ }
+ script.write(SCRIPT_TEMPLATE.substitute(script_dict))
+ os.chmod(args.script_output_path, 0750)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/create_bundle_wrapper_script.pydeps b/deps/v8/build/android/gyp/create_bundle_wrapper_script.pydeps
new file mode 100644
index 0000000000..5587566f50
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_bundle_wrapper_script.pydeps
@@ -0,0 +1,102 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_bundle_wrapper_script.pydeps build/android/gyp/create_bundle_wrapper_script.py
+../../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
+../../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../../third_party/catapult/devil/devil/__init__.py
+../../../third_party/catapult/devil/devil/android/__init__.py
+../../../third_party/catapult/devil/devil/android/apk_helper.py
+../../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../../third_party/catapult/devil/devil/android/constants/file_system.py
+../../../third_party/catapult/devil/devil/android/decorators.py
+../../../third_party/catapult/devil/devil/android/device_errors.py
+../../../third_party/catapult/devil/devil/android/device_signal.py
+../../../third_party/catapult/devil/devil/android/device_temp_file.py
+../../../third_party/catapult/devil/devil/android/device_utils.py
+../../../third_party/catapult/devil/devil/android/flag_changer.py
+../../../third_party/catapult/devil/devil/android/install_commands.py
+../../../third_party/catapult/devil/devil/android/logcat_monitor.py
+../../../third_party/catapult/devil/devil/android/md5sum.py
+../../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../../third_party/catapult/devil/devil/android/sdk/aapt.py
+../../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py
+../../../third_party/catapult/devil/devil/android/sdk/build_tools.py
+../../../third_party/catapult/devil/devil/android/sdk/intent.py
+../../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../../third_party/catapult/devil/devil/android/sdk/split_select.py
+../../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../../third_party/catapult/devil/devil/base_error.py
+../../../third_party/catapult/devil/devil/constants/__init__.py
+../../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../../third_party/catapult/devil/devil/devil_env.py
+../../../third_party/catapult/devil/devil/utils/__init__.py
+../../../third_party/catapult/devil/devil/utils/cmd_helper.py
+../../../third_party/catapult/devil/devil/utils/host_utils.py
+../../../third_party/catapult/devil/devil/utils/lazy/__init__.py
+../../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py
+../../../third_party/catapult/devil/devil/utils/logging_common.py
+../../../third_party/catapult/devil/devil/utils/lsusb.py
+../../../third_party/catapult/devil/devil/utils/parallelizer.py
+../../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../../third_party/catapult/devil/devil/utils/reset_usb.py
+../../../third_party/catapult/devil/devil/utils/run_tests_helper.py
+../../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../../third_party/catapult/devil/devil/utils/zip_utils.py
+../../../third_party/catapult/third_party/zipfile/zipfile_2_7_13.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../apk_operations.py
+../devil_chromium.py
+../incremental_install/__init__.py
+../incremental_install/installer.py
+../pylib/__init__.py
+../pylib/constants/__init__.py
+../pylib/constants/host_paths.py
+../pylib/symbols/__init__.py
+../pylib/symbols/deobfuscator.py
+../pylib/utils/__init__.py
+../pylib/utils/app_bundle_utils.py
+../pylib/utils/simpleperf.py
+../pylib/utils/time_profile.py
+bundletool.py
+create_bundle_wrapper_script.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/resource_utils.py
diff --git a/deps/v8/build/android/gyp/create_java_binary_script.py b/deps/v8/build/android/gyp/create_java_binary_script.py
new file mode 100755
index 0000000000..4469381c7c
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_java_binary_script.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a simple script to run a java "binary".
+
+This creates a script that sets up the java command line for running a java
+jar. This includes correctly setting the classpath and the main class.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+# The java command must be executed in the current directory because there may
+# be user-supplied paths in the args. The script receives the classpath relative
+# to the directory that the script is written in and then, when run, must
+# recalculate the paths relative to the current directory.
+script_template = """\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_java_binary_script.py
+
+import argparse
+import os
+import sys
+
+self_dir = os.path.dirname(__file__)
+classpath = [{classpath}]
+bootclasspath = [{bootclasspath}]
+extra_program_args = {extra_program_args}
+if os.getcwd() != self_dir:
+ offset = os.path.relpath(self_dir, os.getcwd())
+ classpath = [os.path.join(offset, p) for p in classpath]
+ bootclasspath = [os.path.join(offset, p) for p in bootclasspath]
+java_cmd = ["java"]
+# This is a simple argparser for jvm and jar arguments.
+parser = argparse.ArgumentParser()
+parser.add_argument('--jar-args')
+parser.add_argument('--jvm-args')
+
+known_args, unknown_args = parser.parse_known_args(sys.argv[1:])
+if known_args.jvm_args:
+ jvm_arguments = known_args.jvm_args.strip('"').split()
+ java_cmd.extend(jvm_arguments)
+if known_args.jar_args:
+ jar_arguments = known_args.jar_args.strip('"').split()
+ if unknown_args:
+ raise Exception('There are unknown arguments')
+else:
+ jar_arguments = unknown_args
+
+{noverify_flag}
+if bootclasspath:
+ java_cmd.append("-Xbootclasspath/p:" + ":".join(bootclasspath))
+java_cmd.extend(
+ ["-classpath", ":".join(classpath), "-enableassertions", \"{main_class}\"])
+java_cmd.extend(extra_program_args)
+java_cmd.extend(jar_arguments)
+os.execvp("java", java_cmd)
+"""
+
+def main(argv):
+ argv = build_utils.ExpandFileArgs(argv)
+ parser = optparse.OptionParser()
+ parser.add_option('--output', help='Output path for executable script.')
+ parser.add_option('--main-class',
+ help='Name of the java class with the "main" entry point.')
+ parser.add_option('--classpath', action='append', default=[],
+ help='Classpath for running the jar.')
+ parser.add_option('--bootclasspath', action='append', default=[],
+ help='zip/jar files to add to bootclasspath for java cmd.')
+ parser.add_option('--noverify', action='store_true',
+ help='JVM flag: noverify.')
+
+ options, extra_program_args = parser.parse_args(argv)
+
+ if (options.noverify):
+ noverify_flag = 'java_cmd.append("-noverify")'
+ else:
+ noverify_flag = ''
+
+ classpath = []
+ for cp_arg in options.classpath:
+ classpath += build_utils.ParseGnList(cp_arg)
+
+ bootclasspath = []
+ for bootcp_arg in options.bootclasspath:
+ bootclasspath += build_utils.ParseGnList(bootcp_arg)
+
+ run_dir = os.path.dirname(options.output)
+ bootclasspath = [os.path.relpath(p, run_dir) for p in bootclasspath]
+ classpath = [os.path.relpath(p, run_dir) for p in classpath]
+
+ with build_utils.AtomicOutput(options.output) as script:
+ script.write(script_template.format(
+ classpath=('"%s"' % '", "'.join(classpath)),
+ bootclasspath=('"%s"' % '", "'.join(bootclasspath)
+ if bootclasspath else ''),
+ main_class=options.main_class,
+ extra_program_args=repr(extra_program_args),
+ noverify_flag=noverify_flag))
+
+ os.chmod(options.output, 0750)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/create_java_binary_script.pydeps b/deps/v8/build/android/gyp/create_java_binary_script.pydeps
new file mode 100644
index 0000000000..96d79bf609
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_java_binary_script.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_java_binary_script.pydeps build/android/gyp/create_java_binary_script.py
+../../gn_helpers.py
+create_java_binary_script.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/create_size_info_files.py b/deps/v8/build/android/gyp/create_size_info_files.py
new file mode 100755
index 0000000000..5b248e4195
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_size_info_files.py
@@ -0,0 +1,167 @@
+#!/usr/bin/env python
+
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates size-info/*.info files used by SuperSize."""
+
+import argparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+from util import jar_info_utils
+from util import md5_check
+
+
+def _MergeResInfoFiles(res_info_path, info_paths):
+ # Concatenate them all.
+ # only_if_changed=False since no build rules depend on this as an input.
+ with build_utils.AtomicOutput(res_info_path, only_if_changed=False) as dst:
+ for p in info_paths:
+ with open(p) as src:
+ dst.write(src.read())
+
+
+def _PakInfoPathsForAssets(assets):
+ return [f.split(':')[0] + '.info' for f in assets if f.endswith('.pak')]
+
+
+def _MergePakInfoFiles(merged_path, pak_infos):
+ info_lines = set()
+ for pak_info_path in pak_infos:
+ with open(pak_info_path, 'r') as src_info_file:
+ info_lines.update(src_info_file.readlines())
+ # only_if_changed=False since no build rules depend on this as an input.
+ with build_utils.AtomicOutput(merged_path, only_if_changed=False) as f:
+ f.writelines(sorted(info_lines))
+
+
+def _FullJavaNameFromClassFilePath(path):
+ # Input: base/android/java/src/org/chromium/Foo.class
+ # Output: base.android.java.src.org.chromium.Foo
+ if not path.endswith('.class'):
+ return ''
+ path = os.path.splitext(path)[0]
+ parts = []
+ while path:
+ # Use split to be platform independent.
+ head, tail = os.path.split(path)
+ path = head
+ parts.append(tail)
+ parts.reverse() # Package comes first
+ return '.'.join(parts)
+
+
+def _MergeJarInfoFiles(output, inputs):
+ """Merge several .jar.info files to generate an .apk.jar.info.
+
+ Args:
+ output: output file path.
+ inputs: List of .info.jar or .jar files.
+ """
+ info_data = dict()
+ for path in inputs:
+ # android_java_prebuilt adds jar files in the src directory (relative to
+ # the output directory, usually ../../third_party/example.jar).
+ # android_aar_prebuilt collects jar files in the aar file and uses the
+ # java_prebuilt rule to generate gen/example/classes.jar files.
+ # We scan these prebuilt jars to parse each class path for the FQN. This
+ # allows us to later map these classes back to their respective src
+ # directories.
+ # TODO(agrieve): This should probably also check that the mtime of the .info
+ # is newer than that of the .jar, or change prebuilts to always output
+ # .info files so that they always exist (and change the depfile to
+ # depend directly on them).
+ if path.endswith('.info'):
+ info_data.update(jar_info_utils.ParseJarInfoFile(path))
+ else:
+ with zipfile.ZipFile(path) as zip_info:
+ for name in zip_info.namelist():
+ fully_qualified_name = _FullJavaNameFromClassFilePath(name)
+ if fully_qualified_name:
+ info_data[fully_qualified_name] = '{}/{}'.format(path, name)
+
+ # only_if_changed=False since no build rules depend on this as an input.
+ with build_utils.AtomicOutput(output, only_if_changed=False) as f:
+ jar_info_utils.WriteJarInfoFile(f, info_data)
+
+
+def _FindJarInputs(jar_paths):
+ ret = []
+ for jar_path in jar_paths:
+ jar_info_path = jar_path + '.info'
+ if os.path.exists(jar_info_path):
+ ret.append(jar_info_path)
+ else:
+ ret.append(jar_path)
+ return ret
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser(description=__doc__)
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument(
+ '--jar-info-path', required=True, help='Output .jar.info file')
+ parser.add_argument(
+ '--pak-info-path', required=True, help='Output .pak.info file')
+ parser.add_argument(
+ '--res-info-path', required=True, help='Output .res.info file')
+ parser.add_argument(
+ '--jar-files',
+ required=True,
+ action='append',
+ help='GN-list of .jar file paths')
+ parser.add_argument(
+ '--assets',
+ required=True,
+ action='append',
+ help='GN-list of files to add as assets in the form '
+ '"srcPath:zipPath", where ":zipPath" is optional.')
+ parser.add_argument(
+ '--uncompressed-assets',
+ required=True,
+ action='append',
+ help='Same as --assets, except disables compression.')
+ parser.add_argument(
+ '--resource-apk',
+ dest='resource_apks',
+ required=True,
+ action='append',
+ help='An .ap_ file built using aapt')
+
+ options = parser.parse_args(args)
+
+ options.jar_files = build_utils.ParseGnList(options.jar_files)
+ options.assets = build_utils.ParseGnList(options.assets)
+ options.uncompressed_assets = build_utils.ParseGnList(
+ options.uncompressed_assets)
+
+ jar_inputs = _FindJarInputs(set(options.jar_files))
+ pak_inputs = _PakInfoPathsForAssets(options.assets +
+ options.uncompressed_assets)
+ res_inputs = [p + '.info' for p in options.resource_apks]
+
+ # Don't bother re-running if no .info files have changed (saves ~250ms).
+ md5_check.CallAndRecordIfStale(
+ lambda: _MergeJarInfoFiles(options.jar_info_path, jar_inputs),
+ input_paths=jar_inputs,
+ output_paths=[options.jar_info_path])
+
+ # Always recreate these (just as fast as md5 checking them).
+ _MergePakInfoFiles(options.pak_info_path, pak_inputs)
+ _MergeResInfoFiles(options.res_info_path, res_inputs)
+
+ all_inputs = jar_inputs + pak_inputs + res_inputs
+ build_utils.WriteDepfile(
+ options.depfile,
+ options.jar_info_path,
+ inputs=all_inputs,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/create_size_info_files.pydeps b/deps/v8/build/android/gyp/create_size_info_files.pydeps
new file mode 100644
index 0000000000..4ab7f94ea6
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_size_info_files.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_size_info_files.pydeps build/android/gyp/create_size_info_files.py
+../../gn_helpers.py
+create_size_info_files.py
+util/__init__.py
+util/build_utils.py
+util/jar_info_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/create_stack_script.py b/deps/v8/build/android/gyp/create_stack_script.py
new file mode 100755
index 0000000000..6ccdc384e2
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_stack_script.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import argparse
+import os
+import sys
+import textwrap
+
+from util import build_utils
+
+SCRIPT_TEMPLATE = textwrap.dedent(
+ """\
+ #!/usr/bin/env python
+ #
+ # This file was generated by build/android/gyp/create_stack_script.py
+
+ import os
+ import sys
+
+ def main(argv):
+ script_directory = os.path.dirname(__file__)
+ resolve = lambda p: os.path.abspath(os.path.join(script_directory, p))
+ script_path = resolve('{script_path}')
+ script_args = {script_args}
+ script_path_args = {script_path_args}
+ for arg, path in script_path_args:
+ script_args.extend([arg, resolve(path)])
+ script_cmd = [script_path] + script_args + argv
+ print ' '.join(script_cmd)
+ os.execv(script_path, script_cmd)
+
+ if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
+ """)
+
+
+def main(args):
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--script-path',
+ help='Path to the wrapped script.')
+ parser.add_argument(
+ '--script-output-path',
+ help='Path to the output script.')
+ group = parser.add_argument_group('Path arguments')
+ group.add_argument('--output-directory')
+ group.add_argument('--packed-libs')
+
+ args, script_args = parser.parse_known_args(build_utils.ExpandFileArgs(args))
+
+ def relativize(p):
+ return os.path.relpath(p, os.path.dirname(args.script_output_path))
+
+ script_path = relativize(args.script_path)
+
+ script_path_args = []
+ if args.output_directory:
+ script_path_args.append(
+ ('--output-directory', relativize(args.output_directory)))
+ if args.packed_libs:
+ for p in build_utils.ParseGnList(args.packed_libs):
+ script_path_args.append(('--packed-lib', relativize(p)))
+
+ with build_utils.AtomicOutput(args.script_output_path) as script:
+ script.write(SCRIPT_TEMPLATE.format(
+ script_path=script_path,
+ script_args=script_args,
+ script_path_args=script_path_args))
+
+ os.chmod(args.script_output_path, 0750)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/create_stack_script.pydeps b/deps/v8/build/android/gyp/create_stack_script.pydeps
new file mode 100644
index 0000000000..7bddb156f4
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_stack_script.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_stack_script.pydeps build/android/gyp/create_stack_script.py
+../../gn_helpers.py
+create_stack_script.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/create_tool_wrapper.py b/deps/v8/build/android/gyp/create_tool_wrapper.py
new file mode 100755
index 0000000000..4433004541
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_tool_wrapper.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a simple wrapper script that passes the correct --output-directory.
+"""
+
+import argparse
+import os
+
+_TEMPLATE = """\
+#!/usr/bin/env python
+#
+# This file was generated by //build/android/gyp/create_tool_script.py
+
+import os
+import sys
+
+cmd = '{cmd}'
+args = [os.path.basename(cmd), '{flag_name}={output_directory}'] + sys.argv[1:]
+os.execv(cmd, args)
+"""
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--output', help='Output path for executable script.')
+ parser.add_argument('--target', help='Path to script being wrapped.')
+ parser.add_argument('--output-directory', help='Value for --output-directory')
+ parser.add_argument('--flag-name',
+ help='Flag name to use instead of --output-directory',
+ default='--output-directory')
+ args = parser.parse_args()
+
+ with open(args.output, 'w') as script:
+ script.write(_TEMPLATE.format(
+ cmd=os.path.abspath(args.target),
+ flag_name=args.flag_name,
+ output_directory=os.path.abspath(args.output_directory)))
+
+ os.chmod(args.output, 0750)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gyp/create_tool_wrapper.pydeps b/deps/v8/build/android/gyp/create_tool_wrapper.pydeps
new file mode 100644
index 0000000000..75b8326e70
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_tool_wrapper.pydeps
@@ -0,0 +1,3 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_tool_wrapper.pydeps build/android/gyp/create_tool_wrapper.py
+create_tool_wrapper.py
diff --git a/deps/v8/build/android/gyp/create_ui_locale_resources.py b/deps/v8/build/android/gyp/create_ui_locale_resources.py
new file mode 100755
index 0000000000..97868cbfde
--- /dev/null
+++ b/deps/v8/build/android/gyp/create_ui_locale_resources.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate a zip archive containing localized locale name Android resource
+strings!
+
+This script takes a list of input Chrome-specific locale names, as well as an
+output zip file path.
+
+Each output file will contain the definition of a single string resource,
+named 'current_locale', whose value will be the matching Chromium locale name.
+E.g. values-en-rUS/strings.xml will define 'current_locale' as 'en-US'.
+"""
+
+import argparse
+import os
+import sys
+import zipfile
+
+sys.path.insert(
+ 0,
+ os.path.join(
+ os.path.dirname(__file__), '..', '..', '..', 'build', 'android', 'gyp'))
+
+from util import build_utils
+from util import resource_utils
+
+# A small string template for the content of each strings.xml file.
+# NOTE: The name is chosen to avoid any conflicts with other string defined
+# by other resource archives.
+_TEMPLATE = """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <string name="current_detected_ui_locale_name">{resource_text}</string>
+</resources>
+"""
+
+# The default Chrome locale value.
+_DEFAULT_CHROME_LOCALE = 'en-US'
+
+
+def _GenerateLocaleStringsXml(locale):
+ return _TEMPLATE.format(resource_text=locale)
+
+
+def _AddLocaleResourceFileToZip(out_zip, android_locale, locale):
+ locale_data = _GenerateLocaleStringsXml(locale)
+ if android_locale:
+ zip_path = 'values-%s/strings.xml' % android_locale
+ else:
+ zip_path = 'values/strings.xml'
+ build_utils.AddToZipHermetic(
+ out_zip, zip_path, data=locale_data, compress=False)
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument(
+ '--locale-list',
+ required=True,
+ help='GN-list of Chrome-specific locale names.')
+ parser.add_argument(
+ '--output-zip', required=True, help='Output zip archive path.')
+
+ args = parser.parse_args()
+
+ locale_list = build_utils.ParseGnList(args.locale_list)
+ if not locale_list:
+ raise Exception('Locale list cannot be empty!')
+
+ with build_utils.AtomicOutput(args.output_zip) as tmp_file:
+ with zipfile.ZipFile(tmp_file, 'w') as out_zip:
+ # First, write the default value, since aapt requires one.
+ _AddLocaleResourceFileToZip(out_zip, '', _DEFAULT_CHROME_LOCALE)
+
+ for locale in locale_list:
+ android_locale = resource_utils.ToAndroidLocaleName(locale)
+ _AddLocaleResourceFileToZip(out_zip, android_locale, locale)
+
+ if args.depfile:
+ build_utils.WriteDepfile(args.depfile, args.output_zip)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gyp/desugar.py b/deps/v8/build/android/gyp/desugar.py
new file mode 100755
index 0000000000..b9d04059e5
--- /dev/null
+++ b/deps/v8/build/android/gyp/desugar.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import sys
+
+from util import build_utils
+
+
+def main():
+ args = build_utils.ExpandFileArgs(sys.argv[1:])
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--desugar-jar', required=True,
+ help='Path to Desugar.jar.')
+ parser.add_argument('--input-jar', required=True,
+ help='Jar input path to include .class files from.')
+ parser.add_argument('--output-jar', required=True,
+ help='Jar output path.')
+ parser.add_argument('--classpath', required=True,
+ help='Classpath.')
+ parser.add_argument('--bootclasspath', required=True,
+ help='Path to javac bootclasspath interface jar.')
+ options = parser.parse_args(args)
+
+ options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
+ options.classpath = build_utils.ParseGnList(options.classpath)
+
+ cmd = [
+ 'java',
+ '-jar',
+ options.desugar_jar,
+ '--input',
+ options.input_jar,
+ '--output',
+ options.output_jar,
+ # Don't include try-with-resources files in every .jar. Instead, they
+ # are included via //third_party/bazel/desugar:desugar_runtime_java.
+ '--desugar_try_with_resources_omit_runtime_classes',
+ ]
+ for path in options.bootclasspath:
+ cmd += ['--bootclasspath_entry', path]
+ for path in options.classpath:
+ cmd += ['--classpath_entry', path]
+ build_utils.CheckOutput(cmd, print_stdout=False)
+
+ if options.depfile:
+ build_utils.WriteDepfile(
+ options.depfile,
+ options.output_jar,
+ inputs=options.bootclasspath + options.classpath,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/desugar.pydeps b/deps/v8/build/android/gyp/desugar.pydeps
new file mode 100644
index 0000000000..a40f3aa7dd
--- /dev/null
+++ b/deps/v8/build/android/gyp/desugar.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/desugar.pydeps build/android/gyp/desugar.py
+../../gn_helpers.py
+desugar.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/dex.py b/deps/v8/build/android/gyp/dex.py
new file mode 100755
index 0000000000..cba8c7f17f
--- /dev/null
+++ b/deps/v8/build/android/gyp/dex.py
@@ -0,0 +1,388 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import logging
+import optparse
+import os
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), os.path.pardir))
+
+import convert_dex_profile
+
+
+def _CheckFilePathEndsWithJar(parser, file_path):
+ if not file_path.endswith(".jar"):
+ parser.error("%s does not end in .jar" % file_path)
+
+
+def _CheckFilePathsEndWithJar(parser, file_paths):
+ for file_path in file_paths:
+ _CheckFilePathEndsWithJar(parser, file_path)
+
+
+def _ParseArgs(args):
+ args = build_utils.ExpandFileArgs(args)
+
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_option('--output-directory',
+ default=os.getcwd(),
+ help='Path to the output build directory.')
+ parser.add_option('--dex-path', help='Dex output path.')
+ parser.add_option('--configuration-name',
+ help='The build CONFIGURATION_NAME.')
+ parser.add_option('--proguard-enabled',
+ help='"true" if proguard is enabled.')
+ parser.add_option('--debug-build-proguard-enabled',
+ help='"true" if proguard is enabled for debug build.')
+ parser.add_option('--proguard-enabled-input-path',
+ help=('Path to dex in Release mode when proguard '
+ 'is enabled.'))
+ parser.add_option('--inputs', help='A list of additional input paths.')
+ parser.add_option('--excluded-paths',
+ help='A list of paths to exclude from the dex file.')
+ parser.add_option('--main-dex-list-path',
+ help='A file containing a list of the classes to '
+ 'include in the main dex.')
+ parser.add_option('--multidex-configuration-path',
+ help='A JSON file containing multidex build configuration.')
+ parser.add_option('--multi-dex', default=False, action='store_true',
+ help='Generate multiple dex files.')
+ parser.add_option('--d8-jar-path', help='Path to D8 jar.')
+ parser.add_option('--release', action='store_true', default=False,
+ help='Run D8 in release mode. Release mode maximises main '
+ 'dex and deletes non-essential line number information '
+ '(vs debug which minimizes main dex and keeps all line '
+ 'number information, and then some.')
+ parser.add_option('--min-api',
+ help='Minimum Android API level compatibility.')
+
+ parser.add_option('--dexlayout-profile',
+ help=('Text profile for dexlayout. If present, a dexlayout '
+ 'pass will happen'))
+ parser.add_option('--profman-path',
+ help=('Path to ART profman binary. There should be a '
+ 'lib/ directory at the same path containing shared '
+ 'libraries (shared with dexlayout).'))
+ parser.add_option('--dexlayout-path',
+ help=('Path to ART dexlayout binary. There should be a '
+ 'lib/ directory at the same path containing shared '
+ 'libraries (shared with dexlayout).'))
+ parser.add_option('--dexdump-path', help='Path to dexdump binary.')
+ parser.add_option(
+ '--proguard-mapping-path',
+ help=('Path to proguard map from obfuscated symbols in the jar to '
+ 'unobfuscated symbols present in the code. If not '
+ 'present, the jar is assumed not to be obfuscated.'))
+
+ options, paths = parser.parse_args(args)
+
+ required_options = ('d8_jar_path',)
+ build_utils.CheckOptions(options, parser, required=required_options)
+
+ if options.dexlayout_profile:
+ build_utils.CheckOptions(
+ options,
+ parser,
+ required=('profman_path', 'dexlayout_path', 'dexdump_path'))
+ elif options.proguard_mapping_path is not None:
+ raise Exception('Unexpected proguard mapping without dexlayout')
+
+ if options.multidex_configuration_path:
+ with open(options.multidex_configuration_path) as multidex_config_file:
+ multidex_config = json.loads(multidex_config_file.read())
+ options.multi_dex = multidex_config.get('enabled', False)
+
+ if options.multi_dex and not options.main_dex_list_path:
+ logging.warning('multidex cannot be enabled without --main-dex-list-path')
+ options.multi_dex = False
+ elif options.main_dex_list_path and not options.multi_dex:
+ logging.warning('--main-dex-list-path is unused if multidex is not enabled')
+
+ if options.inputs:
+ options.inputs = build_utils.ParseGnList(options.inputs)
+ _CheckFilePathsEndWithJar(parser, options.inputs)
+ if options.excluded_paths:
+ options.excluded_paths = build_utils.ParseGnList(options.excluded_paths)
+
+ if options.proguard_enabled_input_path:
+ _CheckFilePathEndsWithJar(parser, options.proguard_enabled_input_path)
+ _CheckFilePathsEndWithJar(parser, paths)
+
+ return options, paths
+
+
+def _MoveTempDexFile(tmp_dex_dir, dex_path):
+ """Move the temp dex file out of |tmp_dex_dir|.
+
+ Args:
+ tmp_dex_dir: Path to temporary directory created with tempfile.mkdtemp().
+ The directory should have just a single file.
+ dex_path: Target path to move dex file to.
+
+ Raises:
+ Exception if there are multiple files in |tmp_dex_dir|.
+ """
+ tempfiles = os.listdir(tmp_dex_dir)
+ if len(tempfiles) > 1:
+ raise Exception('%d files created, expected 1' % len(tempfiles))
+
+ tmp_dex_path = os.path.join(tmp_dex_dir, tempfiles[0])
+ shutil.move(tmp_dex_path, dex_path)
+
+
+def _NoClassFiles(jar_paths):
+ """Returns True if there are no .class files in the given JARs.
+
+ Args:
+ jar_paths: list of strings representing JAR file paths.
+
+ Returns:
+ (bool) True if no .class files are found.
+ """
+ for jar_path in jar_paths:
+ with zipfile.ZipFile(jar_path) as jar:
+ if any(name.endswith('.class') for name in jar.namelist()):
+ return False
+ return True
+
+
+def _RunD8(dex_cmd, input_paths, output_path):
+ dex_cmd += ['--output', output_path]
+ dex_cmd += input_paths
+ build_utils.CheckOutput(dex_cmd, print_stderr=False)
+
+
+def _EnvWithArtLibPath(binary_path):
+ """Return an environment dictionary for ART host shared libraries.
+
+ Args:
+ binary_path: the path to an ART host binary.
+
+ Returns:
+ An environment dictionary where LD_LIBRARY_PATH has been augmented with the
+ shared library path for the binary. This assumes that there is a lib/
+ directory in the same location as the binary.
+ """
+ lib_path = os.path.join(os.path.dirname(binary_path), 'lib')
+ env = os.environ.copy()
+ libraries = [l for l in env.get('LD_LIBRARY_PATH', '').split(':') if l]
+ libraries.append(lib_path)
+ env['LD_LIBRARY_PATH'] = ':'.join(libraries)
+ return env
+
+
+def _CreateBinaryProfile(text_profile, input_dex, profman_path, temp_dir):
+ """Create a binary profile for dexlayout.
+
+ Args:
+ text_profile: The ART text profile that will be converted to a binary
+ profile.
+ input_dex: The input dex file to layout.
+ profman_path: Path to the profman binary.
+ temp_dir: Directory to work in.
+
+ Returns:
+ The name of the binary profile, which will live in temp_dir.
+ """
+ binary_profile = os.path.join(
+ temp_dir, 'binary_profile-for-' + os.path.basename(text_profile))
+ open(binary_profile, 'w').close() # Touch binary_profile.
+ profman_cmd = [profman_path,
+ '--apk=' + input_dex,
+ '--dex-location=' + input_dex,
+ '--create-profile-from=' + text_profile,
+ '--reference-profile-file=' + binary_profile]
+ build_utils.CheckOutput(
+ profman_cmd,
+ env=_EnvWithArtLibPath(profman_path),
+ stderr_filter=lambda output:
+ build_utils.FilterLines(output, '|'.join(
+ [r'Could not find (method_id|proto_id|name):',
+ r'Could not create type list'])))
+ return binary_profile
+
+
+def _LayoutDex(binary_profile, input_dex, dexlayout_path, temp_dir):
+ """Layout a dexfile using a profile.
+
+ Args:
+ binary_profile: An ART binary profile, eg output from _CreateBinaryProfile.
+ input_dex: The dex file used to create the binary profile.
+ dexlayout_path: Path to the dexlayout binary.
+ temp_dir: Directory to work in.
+
+ Returns:
+ List of output files produced by dexlayout. This will be one if the input
+ was a single dexfile, or multiple files if the input was a multidex
+ zip. These output files are located in temp_dir.
+ """
+ dexlayout_output_dir = os.path.join(temp_dir, 'dexlayout_output')
+ os.mkdir(dexlayout_output_dir)
+ dexlayout_cmd = [ dexlayout_path,
+ '-u', # Update checksum
+ '-p', binary_profile,
+ '-w', dexlayout_output_dir,
+ input_dex ]
+ build_utils.CheckOutput(
+ dexlayout_cmd,
+ env=_EnvWithArtLibPath(dexlayout_path),
+ stderr_filter=lambda output:
+ build_utils.FilterLines(output,
+ r'Can.t mmap dex file.*please zipalign'))
+ output_files = os.listdir(dexlayout_output_dir)
+ if not output_files:
+ raise Exception('dexlayout unexpectedly produced no output')
+ return [os.path.join(dexlayout_output_dir, f) for f in output_files]
+
+
+def _ZipMultidex(file_dir, dex_files):
+ """Zip dex files into a multidex.
+
+ Args:
+ file_dir: The directory into which to write the output.
+ dex_files: The dexfiles forming the multizip. Their names must end with
+ classes.dex, classes2.dex, ...
+
+ Returns:
+ The name of the multidex file, which will live in file_dir.
+ """
+ ordered_files = [] # List of (archive name, file name)
+ for f in dex_files:
+ if f.endswith('classes.dex.zip'):
+ ordered_files.append(('classes.dex', f))
+ break
+ if not ordered_files:
+ raise Exception('Could not find classes.dex multidex file in %s',
+ dex_files)
+ for dex_idx in xrange(2, len(dex_files) + 1):
+ archive_name = 'classes%d.dex' % dex_idx
+ for f in dex_files:
+ if f.endswith(archive_name):
+ ordered_files.append((archive_name, f))
+ break
+ else:
+ raise Exception('Could not find classes%d.dex multidex file in %s',
+ dex_files)
+ if len(set(f[1] for f in ordered_files)) != len(ordered_files):
+ raise Exception('Unexpected clashing filenames for multidex in %s',
+ dex_files)
+
+ zip_name = os.path.join(file_dir, 'multidex_classes.zip')
+ build_utils.DoZip(((archive_name, os.path.join(file_dir, file_name))
+ for archive_name, file_name in ordered_files),
+ zip_name)
+ return zip_name
+
+
+def _ZipSingleDex(dex_file, zip_name):
+ """Zip up a single dex file.
+
+ Args:
+ dex_file: A dexfile whose name is ignored.
+ zip_name: The output file in which to write the zip.
+ """
+ build_utils.DoZip([('classes.dex', dex_file)], zip_name)
+
+
+def main(args):
+ options, paths = _ParseArgs(args)
+ if ((options.proguard_enabled == 'true'
+ and options.configuration_name == 'Release')
+ or (options.debug_build_proguard_enabled == 'true'
+ and options.configuration_name == 'Debug')):
+ paths = [options.proguard_enabled_input_path]
+
+ if options.inputs:
+ paths += options.inputs
+
+ if options.excluded_paths:
+ # Excluded paths are relative to the output directory.
+ exclude_paths = options.excluded_paths
+ paths = [p for p in paths if not
+ os.path.relpath(p, options.output_directory) in exclude_paths]
+
+ input_paths = list(paths)
+ if options.multi_dex:
+ input_paths.append(options.main_dex_list_path)
+
+ dex_cmd = ['java', '-jar', options.d8_jar_path, '--no-desugaring']
+ if options.multi_dex:
+ dex_cmd += ['--main-dex-list', options.main_dex_list_path]
+ if options.release:
+ dex_cmd += ['--release']
+ if options.min_api:
+ dex_cmd += ['--min-api', options.min_api]
+
+ is_dex = options.dex_path.endswith('.dex')
+ is_jar = options.dex_path.endswith('.jar')
+
+ with build_utils.TempDir() as tmp_dir:
+ tmp_dex_dir = os.path.join(tmp_dir, 'tmp_dex_dir')
+ os.mkdir(tmp_dex_dir)
+ if is_jar and _NoClassFiles(paths):
+ # Handle case where no classfiles are specified in inputs
+ # by creating an empty JAR
+ with zipfile.ZipFile(options.dex_path, 'w') as outfile:
+ outfile.comment = 'empty'
+ else:
+ # .dex files can't specify a name for D8. Instead, we output them to a
+ # temp directory then move them after the command has finished running
+ # (see _MoveTempDexFile). For other files, tmp_dex_dir is None.
+ _RunD8(dex_cmd, paths, tmp_dex_dir)
+
+ tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output')
+ if is_dex:
+ _MoveTempDexFile(tmp_dex_dir, tmp_dex_output)
+ else:
+ # d8 supports outputting to a .zip, but does not have deterministic file
+ # ordering: https://issuetracker.google.com/issues/119945929
+ build_utils.ZipDir(tmp_dex_output, tmp_dex_dir)
+
+ if options.dexlayout_profile:
+ if options.proguard_mapping_path is not None:
+ matching_profile = os.path.join(tmp_dir, 'obfuscated_profile')
+ convert_dex_profile.ObfuscateProfile(
+ options.dexlayout_profile, tmp_dex_output,
+ options.proguard_mapping_path, options.dexdump_path,
+ matching_profile)
+ else:
+ logging.warning('No obfuscation for %s', options.dexlayout_profile)
+ matching_profile = options.dexlayout_profile
+ binary_profile = _CreateBinaryProfile(matching_profile, tmp_dex_output,
+ options.profman_path, tmp_dir)
+ output_files = _LayoutDex(binary_profile, tmp_dex_output,
+ options.dexlayout_path, tmp_dir)
+ target = None
+ if len(output_files) > 1:
+ target = _ZipMultidex(tmp_dir, output_files)
+ else:
+ output = output_files[0]
+ if not zipfile.is_zipfile(output):
+ target = os.path.join(tmp_dir, 'dex_classes.zip')
+ _ZipSingleDex(output, target)
+ else:
+ target = output
+ shutil.move(os.path.join(tmp_dir, target), tmp_dex_output)
+
+ # The dex file is complete and can be moved out of tmp_dir.
+ shutil.move(tmp_dex_output, options.dex_path)
+
+ build_utils.WriteDepfile(
+ options.depfile, options.dex_path, input_paths, add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/dex.pydeps b/deps/v8/build/android/gyp/dex.pydeps
new file mode 100644
index 0000000000..e5ecbd2335
--- /dev/null
+++ b/deps/v8/build/android/gyp/dex.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex.pydeps build/android/gyp/dex.py
+../../gn_helpers.py
+../convert_dex_profile.py
+dex.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/dexsplitter.py b/deps/v8/build/android/gyp/dexsplitter.py
new file mode 100755
index 0000000000..a0761581bd
--- /dev/null
+++ b/deps/v8/build/android/gyp/dexsplitter.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import shutil
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def _ParseOptions(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--depfile', help='Path to the depfile to write to.')
+ parser.add_argument('--stamp', help='Path to stamp to mark when finished.')
+ parser.add_argument('--r8-path', help='Path to the r8.jar to use.')
+ parser.add_argument(
+ '--input-dex-zip', help='Path to dex files in zip being split.')
+ parser.add_argument(
+ '--proguard-mapping-file', help='Path to proguard mapping file.')
+ parser.add_argument(
+ '--feature-name',
+ action='append',
+ dest='feature_names',
+ help='The name of the feature module.')
+ parser.add_argument(
+ '--feature-jars',
+ action='append',
+ help='GN list of path to jars which compirse the corresponding feature.')
+ parser.add_argument(
+ '--dex-dest',
+ action='append',
+ dest='dex_dests',
+ help='Destination for dex file of the corresponding feature.')
+ options = parser.parse_args(args)
+
+ assert len(options.feature_names) == len(options.feature_jars) and len(
+ options.feature_names) == len(options.dex_dests)
+ options.features = {}
+ for i, name in enumerate(options.feature_names):
+ options.features[name] = build_utils.ParseGnList(options.feature_jars[i])
+
+ return options
+
+
+def _RunDexsplitter(options, output_dir):
+ cmd = [
+ 'java',
+ '-jar',
+ options.r8_path,
+ 'dexsplitter',
+ '--output',
+ output_dir,
+ '--proguard-map',
+ options.proguard_mapping_file,
+ ]
+
+ for base_jar in options.features['base']:
+ cmd += ['--base-jar', base_jar]
+
+ base_jars_lookup = set(options.features['base'])
+ for feature in options.features:
+ if feature == 'base':
+ continue
+ for feature_jar in options.features[feature]:
+ if feature_jar not in base_jars_lookup:
+ cmd += ['--feature-jar', feature_jar + ':' + feature]
+
+ with build_utils.TempDir() as temp_dir:
+ unzipped_files = build_utils.ExtractAll(options.input_dex_zip, temp_dir)
+ for file_name in unzipped_files:
+ cmd += ['--input', file_name]
+ build_utils.CheckOutput(cmd)
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseOptions(args)
+
+ input_paths = []
+ for feature_jars in options.features.itervalues():
+ for feature_jar in feature_jars:
+ input_paths.append(feature_jar)
+
+ with build_utils.TempDir() as dexsplitter_output_dir:
+ curr_location_to_dest = []
+ if len(options.features) == 1:
+ # Don't run dexsplitter since it needs at least 1 feature module.
+ curr_location_to_dest.append((options.input_dex_zip,
+ options.dex_dests[0]))
+ else:
+ _RunDexsplitter(options, dexsplitter_output_dir)
+
+ for i, dest in enumerate(options.dex_dests):
+ module_dex_file = os.path.join(dexsplitter_output_dir,
+ options.feature_names[i], 'classes.dex')
+ if os.path.exists(module_dex_file):
+ curr_location_to_dest.append((module_dex_file, dest))
+ else:
+ module_dex_file += '.zip'
+ assert os.path.exists(
+ module_dex_file), 'Dexsplitter tool output not found.'
+ curr_location_to_dest.append((module_dex_file + '.zip', dest))
+
+ for curr_location, dest in curr_location_to_dest:
+ with build_utils.AtomicOutput(dest) as f:
+ if curr_location.endswith('.zip'):
+ if dest.endswith('.zip'):
+ shutil.copy(curr_location, f.name)
+ else:
+ with zipfile.ZipFile(curr_location, 'r') as z:
+ namelist = z.namelist()
+ assert len(namelist) == 1, (
+ 'Unzipping to single dex file, but not single dex file in ' +
+ options.input_dex_zip)
+ z.extract(namelist[0], f.name)
+ else:
+ if dest.endswith('.zip'):
+ build_utils.ZipDir(
+ f.name, os.path.abspath(os.path.join(curr_location, os.pardir)))
+ else:
+ shutil.move(curr_location, f.name)
+
+ build_utils.Touch(options.stamp)
+ build_utils.WriteDepfile(options.depfile, options.stamp, inputs=input_paths)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/dexsplitter.pydeps b/deps/v8/build/android/gyp/dexsplitter.pydeps
new file mode 100644
index 0000000000..5935d23885
--- /dev/null
+++ b/deps/v8/build/android/gyp/dexsplitter.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dexsplitter.pydeps build/android/gyp/dexsplitter.py
+../../gn_helpers.py
+dexsplitter.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/dist_aar.py b/deps/v8/build/android/gyp/dist_aar.py
new file mode 100755
index 0000000000..ed823f18b7
--- /dev/null
+++ b/deps/v8/build/android/gyp/dist_aar.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates an Android .aar file."""
+
+import argparse
+import os
+import posixpath
+import shutil
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+
+
+_ANDROID_BUILD_DIR = os.path.dirname(os.path.dirname(__file__))
+
+
+def _MergeRTxt(r_paths):
+ """Merging the given R.txt files and returns them as a string."""
+ all_lines = set()
+ for r_path in r_paths:
+ with open(r_path) as f:
+ all_lines.update(f.readlines())
+ return ''.join(sorted(all_lines))
+
+
+def _MergeProguardConfigs(proguard_configs):
+ """Merging the given proguard config files and returns them as a string."""
+ ret = []
+ for config in proguard_configs:
+ ret.append('# FROM: {}'.format(config))
+ with open(config) as f:
+ ret.append(f.read())
+ return '\n'.join(ret)
+
+
+def _AddResources(aar_zip, resource_zips):
+ """Adds all resource zips to the given aar_zip.
+
+ Ensures all res/values/* files have unique names by prefixing them.
+ """
+ for i, path in enumerate(resource_zips):
+ with zipfile.ZipFile(path) as res_zip:
+ for info in res_zip.infolist():
+ data = res_zip.read(info)
+ dirname, basename = posixpath.split(info.filename)
+ if 'values' in dirname:
+ basename = '{}_{}'.format(basename, i)
+ info.filename = posixpath.join(dirname, basename)
+ info.filename = posixpath.join('res', info.filename)
+ aar_zip.writestr(info, data)
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--output', required=True, help='Path to output aar.')
+ parser.add_argument('--jars', required=True, help='GN list of jar inputs.')
+ parser.add_argument('--dependencies-res-zips', required=True,
+ help='GN list of resource zips')
+ parser.add_argument('--r-text-files', required=True,
+ help='GN list of R.txt files to merge')
+ parser.add_argument('--proguard-configs', required=True,
+ help='GN list of ProGuard flag files to merge.')
+ parser.add_argument(
+ '--android-manifest',
+ help='Path to AndroidManifest.xml to include.',
+ default=os.path.join(_ANDROID_BUILD_DIR, 'AndroidManifest.xml'))
+ parser.add_argument('--native-libraries', default='',
+ help='GN list of native libraries. If non-empty then '
+ 'ABI must be specified.')
+ parser.add_argument('--abi',
+ help='ABI (e.g. armeabi-v7a) for native libraries.')
+
+ options = parser.parse_args(args)
+
+ if options.native_libraries and not options.abi:
+ parser.error('You must provide --abi if you have native libs')
+
+ options.jars = build_utils.ParseGnList(options.jars)
+ options.dependencies_res_zips = build_utils.ParseGnList(
+ options.dependencies_res_zips)
+ options.r_text_files = build_utils.ParseGnList(options.r_text_files)
+ options.proguard_configs = build_utils.ParseGnList(options.proguard_configs)
+ options.native_libraries = build_utils.ParseGnList(options.native_libraries)
+
+ with tempfile.NamedTemporaryFile(delete=False) as staging_file:
+ try:
+ with zipfile.ZipFile(staging_file.name, 'w') as z:
+ build_utils.AddToZipHermetic(
+ z, 'AndroidManifest.xml', src_path=options.android_manifest)
+
+ with tempfile.NamedTemporaryFile() as jar_file:
+ build_utils.MergeZips(jar_file.name, options.jars)
+ build_utils.AddToZipHermetic(z, 'classes.jar', src_path=jar_file.name)
+
+ build_utils.AddToZipHermetic(
+ z, 'R.txt', data=_MergeRTxt(options.r_text_files))
+ build_utils.AddToZipHermetic(z, 'public.txt', data='')
+
+ if options.proguard_configs:
+ build_utils.AddToZipHermetic(
+ z, 'proguard.txt',
+ data=_MergeProguardConfigs(options.proguard_configs))
+
+ _AddResources(z, options.dependencies_res_zips)
+
+ for native_library in options.native_libraries:
+ libname = os.path.basename(native_library)
+ build_utils.AddToZipHermetic(
+ z, os.path.join('jni', options.abi, libname),
+ src_path=native_library)
+ except:
+ os.unlink(staging_file.name)
+ raise
+ shutil.move(staging_file.name, options.output)
+
+ if options.depfile:
+ all_inputs = (options.jars + options.dependencies_res_zips +
+ options.r_text_files + options.proguard_configs)
+ build_utils.WriteDepfile(options.depfile, options.output, all_inputs,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/dist_aar.pydeps b/deps/v8/build/android/gyp/dist_aar.pydeps
new file mode 100644
index 0000000000..da5ea8da23
--- /dev/null
+++ b/deps/v8/build/android/gyp/dist_aar.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dist_aar.pydeps build/android/gyp/dist_aar.py
+../../gn_helpers.py
+dist_aar.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/emma_instr.py b/deps/v8/build/android/gyp/emma_instr.py
new file mode 100755
index 0000000000..cbe913eb26
--- /dev/null
+++ b/deps/v8/build/android/gyp/emma_instr.py
@@ -0,0 +1,271 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Instruments classes and jar files.
+
+This script corresponds to the 'emma_instr' action in the java build process.
+Depending on whether emma_instrument is set, the 'emma_instr' action will either
+call the instrument command or the copy command.
+
+Possible commands are:
+- instrument_jar: Accepts a jar and instruments it using emma.jar.
+- copy: Called when EMMA coverage is not enabled. This allows us to make
+ this a required step without necessarily instrumenting on every build.
+ Also removes any stale coverage files.
+"""
+
+import collections
+import json
+import optparse
+import os
+import shutil
+import sys
+import tempfile
+
+from util import build_utils
+
+
+def _AddCommonOptions(option_parser):
+ """Adds common options to |option_parser|."""
+ option_parser.add_option('--input-path',
+ help=('Path to input file(s). Either the classes '
+ 'directory, or the path to a jar.'))
+ option_parser.add_option('--output-path',
+ help=('Path to output final file(s) to. Either the '
+ 'final classes directory, or the directory in '
+ 'which to place the instrumented/copied jar.'))
+ option_parser.add_option('--coverage-file',
+ help='File to create with coverage metadata.')
+ option_parser.add_option('--sources-list-file',
+ help='File to create with the list of sources.')
+
+
+def _AddInstrumentOptions(option_parser):
+ """Adds options related to instrumentation to |option_parser|."""
+ _AddCommonOptions(option_parser)
+ option_parser.add_option('--source-dirs',
+ help='Space separated list of source directories. '
+ 'source-files should not be specified if '
+ 'source-dirs is specified')
+ option_parser.add_option('--source-files',
+ help='Space separated list of source files. '
+ 'source-dirs should not be specified if '
+ 'source-files is specified')
+ option_parser.add_option('--java-sources-file',
+ help='File containing newline-separated .java paths')
+ option_parser.add_option('--src-root',
+ help='Root of the src repository.')
+ option_parser.add_option('--emma-jar',
+ help='Path to emma.jar.')
+ option_parser.add_option(
+ '--filter-string', default='',
+ help=('Filter string consisting of a list of inclusion/exclusion '
+ 'patterns separated with whitespace and/or comma.'))
+
+
+def _RunCopyCommand(_command, options, _, option_parser):
+ """Copies the jar from input to output locations.
+
+ Also removes any old coverage/sources file.
+
+ Args:
+ command: String indicating the command that was received to trigger
+ this function.
+ options: optparse options dictionary.
+ args: List of extra args from optparse.
+ option_parser: optparse.OptionParser object.
+
+ Returns:
+ An exit code.
+ """
+ if not (options.input_path and options.output_path and
+ options.coverage_file and options.sources_list_file):
+ option_parser.error('All arguments are required.')
+
+ if os.path.exists(options.coverage_file):
+ os.remove(options.coverage_file)
+ if os.path.exists(options.sources_list_file):
+ os.remove(options.sources_list_file)
+
+ shutil.copy(options.input_path, options.output_path)
+
+
+def _GetSourceDirsFromSourceFiles(source_files):
+ """Returns list of directories for the files in |source_files|.
+
+ Args:
+ source_files: List of source files.
+
+ Returns:
+ List of source directories.
+ """
+ return list(set(os.path.dirname(source_file) for source_file in source_files))
+
+
+def _CreateSourcesListFile(source_dirs, sources_list_file, src_root):
+ """Adds all normalized source directories to |sources_list_file|.
+
+ Args:
+ source_dirs: List of source directories.
+ sources_list_file: File into which to write the JSON list of sources.
+ src_root: Root which sources added to the file should be relative to.
+
+ Returns:
+ An exit code.
+ """
+ src_root = os.path.abspath(src_root)
+ relative_sources = []
+ for s in source_dirs:
+ abs_source = os.path.abspath(s)
+ if abs_source[:len(src_root)] != src_root:
+ print ('Error: found source directory not under repository root: %s %s'
+ % (abs_source, src_root))
+ return 1
+ rel_source = os.path.relpath(abs_source, src_root)
+
+ relative_sources.append(rel_source)
+
+ with open(sources_list_file, 'w') as f:
+ json.dump(relative_sources, f)
+
+
+def _RunInstrumentCommand(_command, options, _, option_parser):
+ """Instruments jar files using EMMA.
+
+ Args:
+ command: String indicating the command that was received to trigger
+ this function.
+ options: optparse options dictionary.
+ args: List of extra args from optparse.
+ option_parser: optparse.OptionParser object.
+
+ Returns:
+ An exit code.
+ """
+ if not (options.input_path and options.output_path and
+ options.coverage_file and options.sources_list_file and
+ (options.source_files or options.source_dirs or
+ options.java_sources_file) and
+ options.src_root and options.emma_jar):
+ option_parser.error('All arguments are required.')
+
+ if os.path.exists(options.coverage_file):
+ os.remove(options.coverage_file)
+ temp_dir = tempfile.mkdtemp()
+ try:
+ cmd = ['java', '-cp', options.emma_jar,
+ 'emma', 'instr',
+ '-ip', options.input_path,
+ '-ix', options.filter_string,
+ '-d', temp_dir,
+ '-out', options.coverage_file,
+ '-m', 'fullcopy']
+ build_utils.CheckOutput(cmd)
+
+ # File is not generated when filter_string doesn't match any files.
+ if not os.path.exists(options.coverage_file):
+ build_utils.Touch(options.coverage_file)
+
+ temp_jar_dir = os.path.join(temp_dir, 'lib')
+ jars = os.listdir(temp_jar_dir)
+ if len(jars) != 1:
+ print('Error: multiple output files in: %s' % (temp_jar_dir))
+ return 1
+
+ # Delete output_path first to avoid modifying input_path in the case where
+ # input_path is a hardlink to output_path. http://crbug.com/571642
+ if os.path.exists(options.output_path):
+ os.unlink(options.output_path)
+ shutil.move(os.path.join(temp_jar_dir, jars[0]), options.output_path)
+ finally:
+ shutil.rmtree(temp_dir)
+
+ if options.source_dirs:
+ source_dirs = build_utils.ParseGnList(options.source_dirs)
+ else:
+ source_files = []
+ if options.source_files:
+ source_files += build_utils.ParseGnList(options.source_files)
+ if options.java_sources_file:
+ source_files.extend(
+ build_utils.ReadSourcesList(options.java_sources_file))
+ source_dirs = _GetSourceDirsFromSourceFiles(source_files)
+
+ # TODO(GYP): In GN, we are passed the list of sources, detecting source
+ # directories, then walking them to re-establish the list of sources.
+ # This can obviously be simplified!
+ _CreateSourcesListFile(source_dirs, options.sources_list_file,
+ options.src_root)
+
+ return 0
+
+
+CommandFunctionTuple = collections.namedtuple(
+ 'CommandFunctionTuple', ['add_options_func', 'run_command_func'])
+VALID_COMMANDS = {
+ 'copy': CommandFunctionTuple(_AddCommonOptions,
+ _RunCopyCommand),
+ 'instrument_jar': CommandFunctionTuple(_AddInstrumentOptions,
+ _RunInstrumentCommand),
+}
+
+
+class CommandOptionParser(optparse.OptionParser):
+ """Wrapper class for OptionParser to help with listing commands."""
+
+ def __init__(self, *args, **kwargs):
+ """Creates a CommandOptionParser.
+
+ Args:
+ commands_dict: A dictionary mapping command strings to an object defining
+ - add_options_func: Adds options to the option parser
+ - run_command_func: Runs the command itself.
+ example: An example command.
+ everything else: Passed to optparse.OptionParser contructor.
+ """
+ self.commands_dict = kwargs.pop('commands_dict', {})
+ self.example = kwargs.pop('example', '')
+ if not 'usage' in kwargs:
+ kwargs['usage'] = 'Usage: %prog <command> [options]'
+ optparse.OptionParser.__init__(self, *args, **kwargs)
+
+ #override
+ def get_usage(self):
+ normal_usage = optparse.OptionParser.get_usage(self)
+ command_list = self.get_command_list()
+ example = self.get_example()
+ return self.expand_prog_name(normal_usage + example + command_list)
+
+ #override
+ def get_command_list(self):
+ if self.commands_dict.keys():
+ return '\nCommands:\n %s\n' % '\n '.join(
+ sorted(self.commands_dict.keys()))
+ return ''
+
+ def get_example(self):
+ if self.example:
+ return '\nExample:\n %s\n' % self.example
+ return ''
+
+
+def main():
+ option_parser = CommandOptionParser(commands_dict=VALID_COMMANDS)
+ argv = sys.argv
+
+ if len(argv) < 2 or argv[1] not in option_parser.commands_dict:
+ # Parse args first, if this is '--help', optparse will print help and exit
+ option_parser.parse_args(argv)
+ option_parser.error('Invalid command.')
+
+ cmd = option_parser.commands_dict[argv[1]]
+ cmd.add_options_func(option_parser)
+ options, args = option_parser.parse_args(argv)
+ return cmd.run_command_func(argv[1], options, args, option_parser)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/emma_instr.pydeps b/deps/v8/build/android/gyp/emma_instr.pydeps
new file mode 100644
index 0000000000..88f752a0f9
--- /dev/null
+++ b/deps/v8/build/android/gyp/emma_instr.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/emma_instr.pydeps build/android/gyp/emma_instr.py
+../../gn_helpers.py
+emma_instr.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/extract_unwind_tables.py b/deps/v8/build/android/gyp/extract_unwind_tables.py
new file mode 100755
index 0000000000..37a8421449
--- /dev/null
+++ b/deps/v8/build/android/gyp/extract_unwind_tables.py
@@ -0,0 +1,288 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Extracts the unwind tables in from breakpad symbol files
+
+Runs dump_syms on the given binary file and extracts the CFI data into the
+given output file.
+The output file is a binary file containing CFI rows ordered based on function
+address. The output file only contains rows that match the most popular rule
+type in CFI table, to reduce the output size and specify data in compact format.
+See doc https://github.com/google/breakpad/blob/master/docs/symbol_files.md.
+1. The CFA rules should be of postfix form "SP <val> +".
+2. The RA rules should be of postfix form "CFA <val> + ^".
+Note: breakpad represents dereferencing address with '^' operator.
+
+The output file has 2 tables UNW_INDEX and UNW_DATA, inspired from ARM EHABI
+format. The first table contains function addresses and an index into the
+UNW_DATA table. The second table contains one or more rows for the function
+unwind information.
+
+The output file starts with 4 bytes counting the size of UNW_INDEX in bytes.
+Then UNW_INDEX table and UNW_DATA table.
+
+UNW_INDEX contains two columns of N rows each, where N is the number of
+functions.
+ 1. First column 4 byte rows of all the function start address as offset from
+ start of the binary, in sorted order.
+ 2. For each function addr, the second column contains 2 byte indices in order.
+ The indices are offsets (in count of 2 bytes) of the CFI data from start of
+ UNW_DATA.
+The last entry in the table always contains CANT_UNWIND index to specify the
+end address of the last function.
+
+UNW_DATA contains data of all the functions. Each function data contains N rows.
+The data found at the address pointed from UNW_INDEX will be:
+ 2 bytes: N - number of rows that belong to current function.
+ N * 4 bytes: N rows of data. 16 bits : Address offset from function start.
+ 14 bits : CFA offset / 4.
+ 2 bits : RA offset / 4.
+
+The function is not added to the unwind table in following conditions:
+C1. If length of the function code (number of instructions) is greater than
+ 0xFFFF (2 byte address span). This is because we use 16 bits to refer to
+ offset of instruction from start of the address.
+C2. If the function moves the SP by more than 0xFFFF bytes. This is because we
+ use 14 bits to denote CFA offset (last 2 bits are 0).
+C3. If the Return Address is stored at an offset >= 16 from the CFA. Some
+ functions which have variable arguments can have offset upto 16.
+ TODO(ssid): We can actually store offset 16 by subtracting 1 from RA/4 since
+ we never have 0.
+C4: Some functions do not have unwind information defined in dwarf info. These
+ functions have index value CANT_UNWIND(0xFFFF) in UNW_INDEX table.
+
+
+Usage:
+ extract_unwind_tables.py --input_path [root path to unstripped chrome.so]
+ --output_path [output path] --dump_syms_path [path to dump_syms binary]
+"""
+
+import argparse
+import re
+import struct
+import subprocess
+import sys
+import tempfile
+
+
+_CFA_REG = '.cfa'
+_RA_REG = '.ra'
+
+_ADDR_ENTRY = 0
+_LENGTH_ENTRY = 1
+
+_CANT_UNWIND = 0xFFFF
+
+
+def _Write4Bytes(output_file, val):
+ """Writes a 32 bit unsigned integer to the given output file."""
+ output_file.write(struct.pack('<L', val));
+
+
+def _Write2Bytes(output_file, val):
+ """Writes a 16 bit unsigned integer to the given output file."""
+ output_file.write(struct.pack('<H', val));
+
+
+def _FindRuleForRegister(cfi_row, reg):
+ """Returns the postfix expression as string for a given register.
+
+ Breakpad CFI row format specifies rules for unwinding each register in postfix
+ expression form separated by space. Each rule starts with register name and a
+ colon. Eg: "CFI R1: <rule> R2: <rule>".
+ """
+ out = []
+ found_register = False
+ for part in cfi_row:
+ if found_register:
+ if part[-1] == ':':
+ break
+ out.append(part)
+ elif part == reg + ':':
+ found_register = True
+ return ' '.join(out)
+
+
+def _GetCfaAndRaOffset(cfi_row):
+ """Returns a tuple with 2 numbers (cfa_offset, ra_offset).
+
+ Returns right values if rule matches the predefined criteria. Returns (0, 0)
+ otherwise. The criteria for CFA rule is postfix form "SP <val> +" and RA rule
+ is postfix form "CFA -<val> + ^".
+ """
+ cfa_offset = 0
+ ra_offset = 0
+ cfa_rule = _FindRuleForRegister(cfi_row, _CFA_REG)
+ ra_rule = _FindRuleForRegister(cfi_row, _RA_REG)
+ if cfa_rule and re.match(r'sp [0-9]+ \+', cfa_rule):
+ cfa_offset = int(cfa_rule.split()[1], 10)
+ if ra_rule:
+ if not re.match(r'.cfa -[0-9]+ \+ \^', ra_rule):
+ return (0, 0)
+ ra_offset = -1 * int(ra_rule.split()[1], 10)
+ return (cfa_offset, ra_offset)
+
+
+def _GetAllCfiRows(symbol_file):
+ """Returns parsed CFI data from given symbol_file.
+
+ Each entry in the cfi data dictionary returned is a map from function start
+ address to array of function rows, starting with FUNCTION type, followed by
+ one or more CFI rows.
+ """
+ cfi_data = {}
+ current_func = []
+ for line in symbol_file:
+ if 'STACK CFI' not in line:
+ continue
+
+ parts = line.split()
+ data = {}
+ if parts[2] == 'INIT':
+ # Add the previous function to the output
+ if len(current_func) > 1:
+ cfi_data[current_func[0][_ADDR_ENTRY]] = current_func
+ current_func = []
+
+ # The function line is of format "STACK CFI INIT <addr> <length> ..."
+ data[_ADDR_ENTRY] = int(parts[3], 16)
+ data[_LENGTH_ENTRY] = int(parts[4], 16)
+
+ # Condition C1: Skip if length is large.
+ if data[_LENGTH_ENTRY] == 0 or data[_LENGTH_ENTRY] > 0xffff:
+ continue # Skip the current function.
+ else:
+ # The current function is skipped.
+ if len(current_func) == 0:
+ continue
+
+ # The CFI row is of format "STACK CFI <addr> .cfa: <expr> .ra: <expr> ..."
+ data[_ADDR_ENTRY] = int(parts[2], 16)
+ (data[_CFA_REG], data[_RA_REG]) = _GetCfaAndRaOffset(parts)
+
+ # Condition C2 and C3: Skip based on limits on offsets.
+ if data[_CFA_REG] == 0 or data[_RA_REG] >= 16 or data[_CFA_REG] > 0xffff:
+ current_func = []
+ continue
+ assert data[_CFA_REG] % 4 == 0
+ # Since we skipped functions with code size larger than 0xffff, we should
+ # have no function offset larger than the same value.
+ assert data[_ADDR_ENTRY] - current_func[0][_ADDR_ENTRY] < 0xffff
+
+ if data[_ADDR_ENTRY] == 0:
+ # Skip current function, delete all previous entries.
+ current_func = []
+ continue
+ assert data[_ADDR_ENTRY] % 2 == 0
+ current_func.append(data)
+
+ # Condition C4: Skip function without CFI rows.
+ if len(current_func) > 1:
+ cfi_data[current_func[0][_ADDR_ENTRY]] = current_func
+ return cfi_data
+
+
+def _WriteCfiData(cfi_data, out_file):
+ """Writes the CFI data in defined format to out_file."""
+ # Stores the final data that will be written to UNW_DATA table, in order
+ # with 2 byte items.
+ unw_data = []
+
+ # Represent all the CFI data of functions as set of numbers and map them to an
+ # index in the |unw_data|. This index is later written to the UNW_INDEX table
+ # for each function. This map is used to find index of the data for functions.
+ data_to_index = {}
+ # Store mapping between the functions to the index.
+ func_addr_to_index = {}
+ previous_func_end = 0
+ for addr, function in sorted(cfi_data.iteritems()):
+ # Add an empty function entry when functions CFIs are missing between 2
+ # functions.
+ if previous_func_end != 0 and addr - previous_func_end > 4:
+ func_addr_to_index[previous_func_end + 2] = _CANT_UNWIND
+ previous_func_end = addr + cfi_data[addr][0][_LENGTH_ENTRY]
+
+ assert len(function) > 1
+ func_data_arr = []
+ func_data = 0
+ # The first row contains the function address and length. The rest of the
+ # rows have CFI data. Create function data array as given in the format.
+ for row in function[1:]:
+ addr_offset = row[_ADDR_ENTRY] - addr
+ cfa_offset = (row[_CFA_REG]) | (row[_RA_REG] / 4)
+
+ func_data_arr.append(addr_offset)
+ func_data_arr.append(cfa_offset)
+
+ # Consider all the rows in the data as one large integer and add it as a key
+ # to the |data_to_index|.
+ for data in func_data_arr:
+ func_data = (func_data << 16) | data
+
+ row_count = len(func_data_arr) / 2
+ if func_data not in data_to_index:
+ # When data is not found, create a new index = len(unw_data), and write
+ # the data to |unw_data|.
+ index = len(unw_data)
+ data_to_index[func_data] = index
+ unw_data.append(row_count)
+ for row in func_data_arr:
+ unw_data.append(row)
+ else:
+ # If the data was found, then use the same index for the function.
+ index = data_to_index[func_data]
+ assert row_count == unw_data[index]
+ func_addr_to_index[addr] = data_to_index[func_data]
+
+ # Mark the end end of last function entry.
+ func_addr_to_index[previous_func_end + 2] = _CANT_UNWIND
+
+ # Write the size of UNW_INDEX file in bytes.
+ _Write4Bytes(out_file, len(func_addr_to_index) * 6)
+
+ # Write the UNW_INDEX table. First list of addresses and then indices.
+ sorted_unw_index = sorted(func_addr_to_index.iteritems())
+ for addr, index in sorted_unw_index:
+ _Write4Bytes(out_file, addr)
+ for addr, index in sorted_unw_index:
+ _Write2Bytes(out_file, index)
+
+ # Write the UNW_DATA table.
+ for data in unw_data:
+ _Write2Bytes(out_file, data)
+
+
+def _ParseCfiData(sym_file, output_path):
+ with open(sym_file, 'r') as f:
+ cfi_data = _GetAllCfiRows(f)
+
+ with open(output_path, 'wb') as out_file:
+ _WriteCfiData(cfi_data, out_file)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--input_path', required=True,
+ help='The input path of the unstripped binary')
+ parser.add_argument(
+ '--output_path', required=True,
+ help='The path of the output file')
+ parser.add_argument(
+ '--dump_syms_path', required=True,
+ help='The path of the dump_syms binary')
+
+ args = parser.parse_args()
+
+ with tempfile.NamedTemporaryFile() as sym_file:
+ out = subprocess.call(
+ ['./' +args.dump_syms_path, args.input_path], stdout=sym_file)
+ assert not out
+ sym_file.flush()
+ _ParseCfiData(sym_file.name, args.output_path)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/extract_unwind_tables_tests.py b/deps/v8/build/android/gyp/extract_unwind_tables_tests.py
new file mode 100755
index 0000000000..02c70eb049
--- /dev/null
+++ b/deps/v8/build/android/gyp/extract_unwind_tables_tests.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for extract_unwind_tables.py
+
+This test suite contains various tests for extracting CFI tables from breakpad
+symbol files.
+"""
+
+import optparse
+import os
+import struct
+import sys
+import tempfile
+import unittest
+
+import extract_unwind_tables
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "gyp"))
+from util import build_utils
+
+
+class TestExtractUnwindTables(unittest.TestCase):
+ def testExtractCfi(self):
+ with tempfile.NamedTemporaryFile() as input_file, \
+ tempfile.NamedTemporaryFile() as output_file:
+ input_file.write("""
+MODULE Linux arm CDE12FE1DF2B37A9C6560B4CBEE056420 lib_chrome.so
+INFO CODE_ID E12FE1CD2BDFA937C6560B4CBEE05642
+FILE 0 ../../base/allocator/allocator_check.cc
+FILE 1 ../../base/allocator/allocator_extension.cc
+FILE 2 ../../base/allocator/allocator_shim.cc
+FUNC 1adcb60 54 0 i2d_name_canon
+1adcb60 1a 509 17054
+3b94c70 2 69 40
+PUBLIC e17001 0 assist_ranker::(anonymous namespace)::FakePredict::Initialize()
+PUBLIC e17005 0 (anonymous namespace)::FileDeleter(base::File)
+STACK CFI INIT e17000 4 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 0 4 .cfa: sp 0 + .ra: lr
+STACK CFI 2 .cfa: sp 4 +
+STACK CFI 4 .cfa: sp 12 + .ra: .cfa -8 + ^ r7: .cfa -12 + ^
+STACK CFI 6 .cfa: sp 16 +
+STACK CFI INIT e1a96e 20 .cfa: sp 0 + .ra: lr
+STACK CFI e1a970 .cfa: sp 4 +
+STACK CFI e1a972 .cfa: sp 12 + .ra: .cfa -8 + ^ r7: .cfa -12 + ^
+STACK CFI e1a974 .cfa: sp 16 +
+STACK CFI INIT e1a1e4 b0 .cfa: sp 0 + .ra: lr
+STACK CFI e1a1e6 .cfa: sp 16 + .ra: .cfa -4 + ^ r4: .cfa -16 + ^ r5: .cfa -12 +
+STACK CFI e1a1e8 .cfa: sp 80 +
+STACK CFI INIT 0 4 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 3b92e24 3c .cfa: sp 0 + .ra: lr
+STACK CFI 3b92e4c .cfa: sp 16 + .ra: .cfa -12 + ^
+STACK CFI INIT e17004 0 .cfa: sp 0 + .ra: lr
+STACK CFI e17004 2 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 3b92e70 38 .cfa: sp 0 + .ra: lr
+STACK CFI 3b92e74 .cfa: sp 8 + .ra: .cfa -4 + ^ r4: .cfa -8 + ^
+STACK CFI 3b92e90 .cfa: sp 0 + .ra: .ra r4: r4
+STACK CFI INIT 3b93114 6c .cfa: sp 0 + .ra: lr
+STACK CFI 3b93118 .cfa: r7 16 + .ra: .cfa -4 + ^
+STACK CFI INIT 3b92114 6c .cfa: sp 0 + .ra: lr
+STACK CFI 3b92118 .cfa: r7 16 + .ra: .cfa -20 + ^
+STACK CFI INIT 3b93214 fffff .cfa: sp 0 + .ra: lr
+STACK CFI 3b93218 .cfa: r7 16 + .ra: .cfa -4 + ^
+""")
+ input_file.flush()
+ extract_unwind_tables._ParseCfiData(input_file.name, output_file.name)
+
+ expected_cfi_data = {
+ 0xe1a1e4 : [0x2, 0x11, 0x4, 0x50],
+ 0xe1a296 : [],
+ 0xe1a96e : [0x2, 0x4, 0x4, 0xe, 0x6, 0x10],
+ 0xe1a990 : [],
+ 0x3b92e24: [0x28, 0x13],
+ 0x3b92e62: [],
+ }
+ expected_function_count = len(expected_cfi_data)
+
+ actual_output = []
+ with open(output_file.name, 'rb') as f:
+ while True:
+ read = f.read(2)
+ if not read:
+ break
+ actual_output.append(struct.unpack('H', read)[0])
+
+ # First value is size of unw_index table.
+ unw_index_size = actual_output[1] << 16 | actual_output[0]
+ # Each function index is 6 bytes data.
+ self.assertEqual(expected_function_count * 6, unw_index_size)
+ # |actual_output| is in blocks of 2 bytes. Skip first 4 bytes representing
+ # size.
+ unw_index_start = 2
+ unw_index_addr_end = unw_index_start + expected_function_count * 2
+ unw_index_end = unw_index_addr_end + expected_function_count
+ unw_index_addr_col = actual_output[unw_index_start : unw_index_addr_end]
+ unw_index_index_col = actual_output[unw_index_addr_end : unw_index_end]
+
+ unw_data_start = unw_index_end
+ unw_data = actual_output[unw_data_start:]
+
+ for func_iter in range(0, expected_function_count):
+ func_addr = (unw_index_addr_col[func_iter * 2 + 1] << 16 |
+ unw_index_addr_col[func_iter * 2])
+ index = unw_index_index_col[func_iter]
+ # If index is CANT_UNWIND then invalid function.
+ if index == 0xFFFF:
+ self.assertEqual(expected_cfi_data[func_addr], [])
+ continue
+
+ func_start = index + 1
+ func_end = func_start + unw_data[index] * 2
+ self.assertEquals(
+ len(expected_cfi_data[func_addr]), func_end - func_start)
+ func_cfi = unw_data[func_start : func_end]
+ self.assertEqual(expected_cfi_data[func_addr], func_cfi)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/gyp/filter_zip.py b/deps/v8/build/android/gyp/filter_zip.py
new file mode 100755
index 0000000000..2182042df5
--- /dev/null
+++ b/deps/v8/build/android/gyp/filter_zip.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import sys
+
+from util import build_utils
+
+
+_RESOURCE_CLASSES = [
+ "R.class",
+ "R##*.class",
+ "Manifest.class",
+ "Manifest##*.class",
+]
+
+
+def _CreatePathTransform(exclude_globs, include_globs,
+ strip_resource_classes_for):
+ exclude_globs = list(exclude_globs or [])
+ if strip_resource_classes_for:
+ exclude_globs.extend(p.replace('.', '/') + '/' + f
+ for p in strip_resource_classes_for
+ for f in _RESOURCE_CLASSES)
+ def path_transform(path):
+ # Exclude filters take precidence over include filters.
+ if build_utils.MatchesGlob(path, exclude_globs):
+ return None
+ if include_globs and not build_utils.MatchesGlob(path, include_globs):
+ return None
+ return path
+
+ return path_transform
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--input', required=True,
+ help='Input zip file.')
+ parser.add_argument('--output', required=True,
+ help='Output zip file')
+ parser.add_argument('--exclude-globs',
+ help='GN list of exclude globs')
+ parser.add_argument('--include-globs',
+ help='GN list of include globs')
+ parser.add_argument('--strip-resource-classes-for',
+ help='GN list of java package names exclude R.class files in.')
+
+ argv = build_utils.ExpandFileArgs(sys.argv[1:])
+ args = parser.parse_args(argv)
+
+ if args.exclude_globs:
+ args.exclude_globs = build_utils.ParseGnList(args.exclude_globs)
+ if args.include_globs:
+ args.include_globs= build_utils.ParseGnList(args.include_globs)
+ if args.strip_resource_classes_for:
+ args.strip_resource_classes_for = build_utils.ParseGnList(
+ args.strip_resource_classes_for)
+
+ path_transform = _CreatePathTransform(
+ args.exclude_globs, args.include_globs, args.strip_resource_classes_for)
+ with build_utils.AtomicOutput(args.output) as f:
+ build_utils.MergeZips(
+ f.name, [args.input], path_transform=path_transform)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gyp/filter_zip.pydeps b/deps/v8/build/android/gyp/filter_zip.pydeps
new file mode 100644
index 0000000000..67c989cf88
--- /dev/null
+++ b/deps/v8/build/android/gyp/filter_zip.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/filter_zip.pydeps build/android/gyp/filter_zip.py
+../../gn_helpers.py
+filter_zip.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/finalize_apk.py b/deps/v8/build/android/gyp/finalize_apk.py
new file mode 100644
index 0000000000..2440fe40a1
--- /dev/null
+++ b/deps/v8/build/android/gyp/finalize_apk.py
@@ -0,0 +1,32 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Signs and aligns an APK."""
+
+import argparse
+import shutil
+import subprocess
+import tempfile
+
+
+def FinalizeApk(apksigner_path, zipalign_path, unsigned_apk_path,
+ final_apk_path, key_path, key_passwd, key_name):
+ # Use a tempfile so that Ctrl-C does not leave the file with a fresh mtime
+ # and a corrupted state.
+ with tempfile.NamedTemporaryFile() as staging_file:
+ # v2 signing requires that zipalign happen first.
+ subprocess.check_output([
+ zipalign_path, '-p', '-f', '4',
+ unsigned_apk_path, staging_file.name])
+ subprocess.check_output([
+ apksigner_path, 'sign',
+ '--in', staging_file.name,
+ '--out', staging_file.name,
+ '--ks', key_path,
+ '--ks-key-alias', key_name,
+ '--ks-pass', 'pass:' + key_passwd,
+ # Force SHA-1 (makes signing faster; insecure is fine for local builds).
+ '--min-sdk-version', '1',
+ ])
+ shutil.move(staging_file.name, final_apk_path)
+ staging_file.delete = False
diff --git a/deps/v8/build/android/gyp/find.py b/deps/v8/build/android/gyp/find.py
new file mode 100755
index 0000000000..a9f1d49855
--- /dev/null
+++ b/deps/v8/build/android/gyp/find.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Finds files in directories.
+"""
+
+import fnmatch
+import optparse
+import os
+import sys
+
+
+def main(argv):
+ parser = optparse.OptionParser()
+ parser.add_option('--pattern', default='*', help='File pattern to match.')
+ options, directories = parser.parse_args(argv)
+
+ for d in directories:
+ if not os.path.exists(d):
+ print >> sys.stderr, '%s does not exist' % d
+ return 1
+ for root, _, filenames in os.walk(d):
+ for f in fnmatch.filter(filenames, options.pattern):
+ print os.path.join(root, f)
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/find_sun_tools_jar.py b/deps/v8/build/android/gyp/find_sun_tools_jar.py
new file mode 100755
index 0000000000..7cd4c33984
--- /dev/null
+++ b/deps/v8/build/android/gyp/find_sun_tools_jar.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This finds the java distribution's tools.jar and copies it somewhere.
+"""
+
+import argparse
+import os
+import re
+import shutil
+import sys
+
+from util import build_utils
+
+RT_JAR_FINDER = re.compile(r'\[Opened (.*)/jre/lib/rt.jar\]')
+
+def main():
+ parser = argparse.ArgumentParser(description='Find Sun Tools Jar')
+ parser.add_argument('--depfile',
+ help='Path to depfile. This must be specified as the '
+ 'action\'s first output.')
+ parser.add_argument('--output', required=True)
+ args = parser.parse_args()
+
+ sun_tools_jar_path = FindSunToolsJarPath()
+
+ if sun_tools_jar_path is None:
+ raise Exception("Couldn\'t find tools.jar")
+
+ # Using copyfile instead of copy() because copy() calls copymode()
+ # We don't want the locked mode because we may copy over this file again
+ shutil.copyfile(sun_tools_jar_path, args.output)
+
+ if args.depfile:
+ build_utils.WriteDepfile(args.depfile, args.output, [sun_tools_jar_path])
+
+
+def FindSunToolsJarPath():
+ # This works with at least openjdk 1.6, 1.7 and sun java 1.6, 1.7
+ stdout = build_utils.CheckOutput(
+ ["java", "-verbose", "-version"], print_stderr=False)
+ for ln in stdout.splitlines():
+ match = RT_JAR_FINDER.match(ln)
+ if match:
+ return os.path.join(match.group(1), 'lib', 'tools.jar')
+
+ return None
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/gcc_preprocess.py b/deps/v8/build/android/gyp/gcc_preprocess.py
new file mode 100755
index 0000000000..8b3444c2b0
--- /dev/null
+++ b/deps/v8/build/android/gyp/gcc_preprocess.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+def DoGcc(options):
+ build_utils.MakeDirectory(os.path.dirname(options.output))
+
+ gcc_cmd = [ 'gcc' ] # invoke host gcc.
+ if options.defines:
+ gcc_cmd.extend(sum(map(lambda w: ['-D', w], options.defines), []))
+
+ with build_utils.AtomicOutput(options.output) as f:
+ gcc_cmd.extend([
+ '-E', # stop after preprocessing.
+ '-D', 'ANDROID', # Specify ANDROID define for pre-processor.
+ '-x', 'c-header', # treat sources as C header files
+ '-P', # disable line markers, i.e. '#line 309'
+ '-I', options.include_path,
+ '-o', f.name,
+ options.template
+ ])
+
+ build_utils.CheckOutput(gcc_cmd)
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_option('--include-path', help='Include path for gcc.')
+ parser.add_option('--template', help='Path to template.')
+ parser.add_option('--output', help='Path for generated file.')
+ parser.add_option('--defines', help='Pre-defines macros', action='append')
+
+ options, _ = parser.parse_args(args)
+
+ DoGcc(options)
+
+ if options.depfile:
+ build_utils.WriteDepfile(options.depfile, options.output, add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/gcc_preprocess.pydeps b/deps/v8/build/android/gyp/gcc_preprocess.pydeps
new file mode 100644
index 0000000000..64e776b633
--- /dev/null
+++ b/deps/v8/build/android/gyp/gcc_preprocess.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/gcc_preprocess.pydeps build/android/gyp/gcc_preprocess.py
+../../gn_helpers.py
+gcc_preprocess.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/generate_android_wrapper.py b/deps/v8/build/android/gyp/generate_android_wrapper.py
new file mode 100755
index 0000000000..f8e1815324
--- /dev/null
+++ b/deps/v8/build/android/gyp/generate_android_wrapper.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import sys
+
+from util import build_utils
+
+sys.path.append(
+ os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', '..', 'util')))
+
+import generate_wrapper
+
+_WRAPPED_PATH_LIST_RE = re.compile(r'@WrappedPathList\(([^,]+), ([^)]+)\)')
+
+
+def ExpandWrappedPathLists(args):
+ expanded_args = []
+ for arg in args:
+ m = _WRAPPED_PATH_LIST_RE.match(arg)
+ if m:
+ for p in build_utils.ParseGnList(m.group(2)):
+ expanded_args.extend([m.group(1), '@WrappedPath(%s)' % p])
+ else:
+ expanded_args.append(arg)
+ return expanded_args
+
+
+def main(raw_args):
+ parser = generate_wrapper.CreateArgumentParser()
+ expanded_raw_args = build_utils.ExpandFileArgs(raw_args)
+ expanded_raw_args = ExpandWrappedPathLists(expanded_raw_args)
+ args = parser.parse_args(expanded_raw_args)
+ return generate_wrapper.Wrap(args)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/generate_linker_version_script.py b/deps/v8/build/android/gyp/generate_linker_version_script.py
new file mode 100755
index 0000000000..34c72eb818
--- /dev/null
+++ b/deps/v8/build/android/gyp/generate_linker_version_script.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env vpython
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Generate linker version scripts for Chrome on Android shared libraries."""
+
+import argparse
+import os
+
+from util import build_utils
+
+_SCRIPT_HEADER = """\
+# AUTO-GENERATED FILE. DO NOT MODIFY.
+#
+# See: %s
+
+{
+ global:
+""" % os.path.relpath(__file__, build_utils.DIR_SOURCE_ROOT)
+
+_SCRIPT_FOOTER = """\
+ local:
+ *;
+};
+"""
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--output',
+ required=True,
+ help='Path to output linker version script file.')
+ parser.add_argument(
+ '--export-java-symbols',
+ action='store_true',
+ help='Export Java_* JNI methods')
+ parser.add_argument(
+ '--export-symbol-whitelist-file',
+ help='Path to input file containing whitelist of extra '
+ 'symbols to export. One symbol per line.')
+ options = parser.parse_args()
+
+ # JNI_OnLoad is always exported.
+ # CrashpadHandlerMain() is the entry point to the Crashpad handler, required
+ # for libcrashpad_handler_trampoline.so.
+ symbol_list = ['CrashpadHandlerMain', 'JNI_OnLoad']
+
+ if options.export_java_symbols:
+ symbol_list.append('Java_*')
+
+ if options.export_symbol_whitelist_file:
+ with open(options.export_symbol_whitelist_file, 'rt') as f:
+ for line in f:
+ line = line.strip()
+ if not line or line[0] == '#':
+ continue
+ symbol_list.append(line)
+
+ script_content = [_SCRIPT_HEADER]
+ for symbol in symbol_list:
+ script_content.append(' %s;\n' % symbol)
+ script_content.append(_SCRIPT_FOOTER)
+
+ script = ''.join(script_content)
+
+ with build_utils.AtomicOutput(options.output) as f:
+ f.write(script)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gyp/generate_linker_version_script.pydeps b/deps/v8/build/android/gyp/generate_linker_version_script.pydeps
new file mode 100644
index 0000000000..d1e3ad6181
--- /dev/null
+++ b/deps/v8/build/android/gyp/generate_linker_version_script.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/generate_linker_version_script.pydeps build/android/gyp/generate_linker_version_script.py
+../../gn_helpers.py
+generate_linker_version_script.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/generate_resource_rewriter.py b/deps/v8/build/android/gyp/generate_resource_rewriter.py
new file mode 100755
index 0000000000..ba635a293d
--- /dev/null
+++ b/deps/v8/build/android/gyp/generate_resource_rewriter.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate ResourceRewriter.java which overwrites the given package's
+ resource id.
+"""
+
+import argparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+# Import jinja2 from third_party/jinja2
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
+ '..',
+ '..',
+ '..',
+ 'third_party')))
+import jinja2
+
+
+RESOURCE_REWRITER_JAVA="ResourceRewriter.java"
+
+RESOURCE_REWRITER="""/* AUTO-GENERATED FILE. DO NOT MODIFY. */
+
+package {{ package }};
+/**
+ * Helper class used to fix up resource ids.
+ */
+class ResourceRewriter {
+ /**
+ * Rewrite the R 'constants' for the WebView.
+ */
+ public static void rewriteRValues(final int packageId) {
+ {% for res_package in res_packages %}
+ {{ res_package }}.R.onResourcesLoaded(packageId);
+ {% endfor %}
+ }
+}
+"""
+
+def ParseArgs(args):
+ """Parses command line options.
+
+ Returns:
+ An Namespace from argparse.parse_args()
+ """
+ parser = argparse.ArgumentParser(prog='generate_resource_rewriter')
+
+ parser.add_argument('--package-name',
+ required=True,
+ help='The package name of ResourceRewriter.')
+ parser.add_argument('--dep-packages',
+ required=True,
+ help='A list of packages whose resource id will be'
+ 'overwritten in ResourceRewriter.')
+ parser.add_argument('--output-dir',
+ help='A output directory of generated'
+ ' ResourceRewriter.java')
+ parser.add_argument('--srcjar',
+ help='The path of generated srcjar which has'
+ ' ResourceRewriter.java')
+
+ return parser.parse_args(args)
+
+
+def CreateResourceRewriter(package, res_packages, output_dir):
+ build_utils.MakeDirectory(output_dir)
+ java_path = os.path.join(output_dir, RESOURCE_REWRITER_JAVA)
+ template = jinja2.Template(RESOURCE_REWRITER,
+ trim_blocks=True,
+ lstrip_blocks=True)
+ output = template.render(package=package, res_packages=res_packages)
+ with open(java_path, 'w') as f:
+ f.write(output)
+
+def CreateResourceRewriterSrcjar(package, res_packages, srcjar_path):
+ with build_utils.TempDir() as temp_dir:
+ output_dir = os.path.join(temp_dir, *package.split('.'))
+ CreateResourceRewriter(package, res_packages, output_dir)
+ build_utils.DoZip([os.path.join(output_dir, RESOURCE_REWRITER_JAVA)],
+ srcjar_path,
+ temp_dir)
+
+
+def main():
+ options = ParseArgs(build_utils.ExpandFileArgs(sys.argv[1:]))
+ package = options.package_name
+ if options.output_dir:
+ output_dir = os.path.join(options.output_dir, *package.split('.'))
+ CreateResourceRewriter(
+ package,
+ build_utils.ParseGnList(options.dep_packages),
+ output_dir)
+ else:
+ CreateResourceRewriterSrcjar(
+ package,
+ build_utils.ParseGnList(options.dep_packages),
+ options.srcjar)
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/generate_v14_compatible_resources.py b/deps/v8/build/android/gyp/generate_v14_compatible_resources.py
new file mode 100755
index 0000000000..f9e8a3783a
--- /dev/null
+++ b/deps/v8/build/android/gyp/generate_v14_compatible_resources.py
@@ -0,0 +1,281 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Convert Android xml resources to API 14 compatible.
+
+There are two reasons that we cannot just use API 17 attributes,
+so we are generating another set of resources by this script.
+
+1. paddingStart attribute can cause a crash on Galaxy Tab 2.
+2. There is a bug that paddingStart does not override paddingLeft on
+ JB-MR1. This is fixed on JB-MR2. b/8654490
+
+Therefore, this resource generation script can be removed when
+we drop the support for JB-MR1.
+
+Please refer to http://crbug.com/235118 for the details.
+"""
+
+import codecs
+import os
+import re
+import shutil
+import sys
+import xml.dom.minidom as minidom
+
+from util import build_utils
+
+# Note that we are assuming 'android:' is an alias of
+# the namespace 'http://schemas.android.com/apk/res/android'.
+
+GRAVITY_ATTRIBUTES = ('android:gravity', 'android:layout_gravity')
+
+# Almost all the attributes that has "Start" or "End" in
+# its name should be mapped.
+ATTRIBUTES_TO_MAP = {'paddingStart' : 'paddingLeft',
+ 'drawableStart' : 'drawableLeft',
+ 'layout_alignStart' : 'layout_alignLeft',
+ 'layout_marginStart' : 'layout_marginLeft',
+ 'layout_alignParentStart' : 'layout_alignParentLeft',
+ 'layout_toStartOf' : 'layout_toLeftOf',
+ 'paddingEnd' : 'paddingRight',
+ 'drawableEnd' : 'drawableRight',
+ 'layout_alignEnd' : 'layout_alignRight',
+ 'layout_marginEnd' : 'layout_marginRight',
+ 'layout_alignParentEnd' : 'layout_alignParentRight',
+ 'layout_toEndOf' : 'layout_toRightOf'}
+
+ATTRIBUTES_TO_MAP = dict(['android:' + k, 'android:' + v] for k, v
+ in ATTRIBUTES_TO_MAP.iteritems())
+
+ATTRIBUTES_TO_MAP_REVERSED = dict([v, k] for k, v
+ in ATTRIBUTES_TO_MAP.iteritems())
+
+
+def IterateXmlElements(node):
+ """minidom helper function that iterates all the element nodes.
+ Iteration order is pre-order depth-first."""
+ if node.nodeType == node.ELEMENT_NODE:
+ yield node
+ for child_node in node.childNodes:
+ for child_node_element in IterateXmlElements(child_node):
+ yield child_node_element
+
+
+def ParseAndReportErrors(filename):
+ try:
+ return minidom.parse(filename)
+ except Exception: # pylint: disable=broad-except
+ import traceback
+ traceback.print_exc()
+ sys.stderr.write('Failed to parse XML file: %s\n' % filename)
+ sys.exit(1)
+
+
+def AssertNotDeprecatedAttribute(name, value, filename):
+ """Raises an exception if the given attribute is deprecated."""
+ msg = None
+ if name in ATTRIBUTES_TO_MAP_REVERSED:
+ msg = '{0} should use {1} instead of {2}'.format(filename,
+ ATTRIBUTES_TO_MAP_REVERSED[name], name)
+ elif name in GRAVITY_ATTRIBUTES and ('left' in value or 'right' in value):
+ msg = '{0} should use start/end instead of left/right for {1}'.format(
+ filename, name)
+
+ if msg:
+ msg += ('\nFor background, see: http://android-developers.blogspot.com/'
+ '2013/03/native-rtl-support-in-android-42.html\n'
+ 'If you have a legitimate need for this attribute, discuss with '
+ 'kkimlabs@chromium.org or newt@chromium.org')
+ raise Exception(msg)
+
+
+def WriteDomToFile(dom, filename):
+ """Write the given dom to filename."""
+ build_utils.MakeDirectory(os.path.dirname(filename))
+ with codecs.open(filename, 'w', 'utf-8') as f:
+ dom.writexml(f, '', ' ', '\n', encoding='utf-8')
+
+
+def HasStyleResource(dom):
+ """Return True if the dom is a style resource, False otherwise."""
+ root_node = IterateXmlElements(dom).next()
+ return bool(root_node.nodeName == 'resources' and
+ list(root_node.getElementsByTagName('style')))
+
+
+def ErrorIfStyleResourceExistsInDir(input_dir):
+ """If a style resource is in input_dir, raises an exception."""
+ for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+ dom = ParseAndReportErrors(input_filename)
+ if HasStyleResource(dom):
+ # Allow style file in third_party to exist in non-v17 directories so long
+ # as they do not contain deprecated attributes.
+ if not 'third_party' in input_dir or (
+ GenerateV14StyleResourceDom(dom, input_filename)):
+ raise Exception('error: style file ' + input_filename +
+ ' should be under ' + input_dir +
+ '-v17 directory. Please refer to '
+ 'http://crbug.com/243952 for the details.')
+
+
+def GenerateV14LayoutResourceDom(dom, filename, assert_not_deprecated=True):
+ """Convert layout resource to API 14 compatible layout resource.
+
+ Args:
+ dom: Parsed minidom object to be modified.
+ filename: Filename that the DOM was parsed from.
+ assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
+ cause an exception to be thrown.
+
+ Returns:
+ True if dom is modified, False otherwise.
+ """
+ is_modified = False
+
+ # Iterate all the elements' attributes to find attributes to convert.
+ for element in IterateXmlElements(dom):
+ for name, value in list(element.attributes.items()):
+ # Convert any API 17 Start/End attributes to Left/Right attributes.
+ # For example, from paddingStart="10dp" to paddingLeft="10dp"
+ # Note: gravity attributes are not necessary to convert because
+ # start/end values are backward-compatible. Explained at
+ # https://plus.sandbox.google.com/+RomanNurik/posts/huuJd8iVVXY?e=Showroom
+ if name in ATTRIBUTES_TO_MAP:
+ element.setAttribute(ATTRIBUTES_TO_MAP[name], value)
+ del element.attributes[name]
+ is_modified = True
+ elif assert_not_deprecated:
+ AssertNotDeprecatedAttribute(name, value, filename)
+
+ return is_modified
+
+
+def GenerateV14StyleResourceDom(dom, filename, assert_not_deprecated=True):
+ """Convert style resource to API 14 compatible style resource.
+
+ Args:
+ dom: Parsed minidom object to be modified.
+ filename: Filename that the DOM was parsed from.
+ assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
+ cause an exception to be thrown.
+
+ Returns:
+ True if dom is modified, False otherwise.
+ """
+ is_modified = False
+
+ for style_element in dom.getElementsByTagName('style'):
+ for item_element in style_element.getElementsByTagName('item'):
+ name = item_element.attributes['name'].value
+ value = item_element.childNodes[0].nodeValue
+ if name in ATTRIBUTES_TO_MAP:
+ item_element.attributes['name'].value = ATTRIBUTES_TO_MAP[name]
+ is_modified = True
+ elif assert_not_deprecated:
+ AssertNotDeprecatedAttribute(name, value, filename)
+
+ return is_modified
+
+
+def GenerateV14LayoutResource(input_filename, output_v14_filename,
+ output_v17_filename):
+ """Convert API 17 layout resource to API 14 compatible layout resource.
+
+ It's mostly a simple replacement, s/Start/Left s/End/Right,
+ on the attribute names.
+ If the generated resource is identical to the original resource,
+ don't do anything. If not, write the generated resource to
+ output_v14_filename, and copy the original resource to output_v17_filename.
+ """
+ dom = ParseAndReportErrors(input_filename)
+ is_modified = GenerateV14LayoutResourceDom(dom, input_filename)
+
+ if is_modified:
+ # Write the generated resource.
+ WriteDomToFile(dom, output_v14_filename)
+
+ # Copy the original resource.
+ build_utils.MakeDirectory(os.path.dirname(output_v17_filename))
+ shutil.copy2(input_filename, output_v17_filename)
+
+
+def GenerateV14StyleResource(input_filename, output_v14_filename):
+ """Convert API 17 style resources to API 14 compatible style resource.
+
+ Write the generated style resource to output_v14_filename.
+ It's mostly a simple replacement, s/Start/Left s/End/Right,
+ on the attribute names.
+ """
+ dom = ParseAndReportErrors(input_filename)
+ GenerateV14StyleResourceDom(dom, input_filename)
+
+ # Write the generated resource.
+ WriteDomToFile(dom, output_v14_filename)
+
+
+def GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir, output_v17_dir):
+ """Convert layout resources to API 14 compatible resources in input_dir."""
+ for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+ rel_filename = os.path.relpath(input_filename, input_dir)
+ output_v14_filename = os.path.join(output_v14_dir, rel_filename)
+ output_v17_filename = os.path.join(output_v17_dir, rel_filename)
+ GenerateV14LayoutResource(input_filename, output_v14_filename,
+ output_v17_filename)
+
+
+def GenerateV14StyleResourcesInDir(input_dir, output_v14_dir):
+ """Convert style resources to API 14 compatible resources in input_dir."""
+ for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+ rel_filename = os.path.relpath(input_filename, input_dir)
+ output_v14_filename = os.path.join(output_v14_dir, rel_filename)
+ GenerateV14StyleResource(input_filename, output_v14_filename)
+
+
+def GenerateV14Resources(res_dir, res_v14_dir):
+ for name in os.listdir(res_dir):
+ if not os.path.isdir(os.path.join(res_dir, name)):
+ continue
+
+ dir_pieces = name.split('-')
+ resource_type = dir_pieces[0]
+ qualifiers = dir_pieces[1:]
+
+ api_level_qualifier_index = -1
+ api_level_qualifier = ''
+ for index, qualifier in enumerate(qualifiers):
+ if re.match('v[0-9]+$', qualifier):
+ api_level_qualifier_index = index
+ api_level_qualifier = qualifier
+ break
+
+ # Android pre-v17 API doesn't support RTL. Skip.
+ if 'ldrtl' in qualifiers:
+ continue
+
+ input_dir = os.path.abspath(os.path.join(res_dir, name))
+
+ # We also need to copy the original v17 resource to *-v17 directory
+ # because the generated v14 resource will hide the original resource.
+ output_v14_dir = os.path.join(res_v14_dir, name)
+ output_v17_dir = os.path.join(res_v14_dir, name + '-v17')
+
+ # We only convert layout resources under layout*/, xml*/,
+ # and style resources under values*/.
+ if resource_type in ('layout', 'xml'):
+ if not api_level_qualifier:
+ GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir,
+ output_v17_dir)
+ elif resource_type == 'values':
+ if api_level_qualifier == 'v17':
+ output_qualifiers = qualifiers[:]
+ del output_qualifiers[api_level_qualifier_index]
+ output_v14_dir = os.path.join(res_v14_dir,
+ '-'.join([resource_type] +
+ output_qualifiers))
+ GenerateV14StyleResourcesInDir(input_dir, output_v14_dir)
+ elif not api_level_qualifier:
+ ErrorIfStyleResourceExistsInDir(input_dir) \ No newline at end of file
diff --git a/deps/v8/build/android/gyp/ijar.py b/deps/v8/build/android/gyp/ijar.py
new file mode 100755
index 0000000000..89108087ed
--- /dev/null
+++ b/deps/v8/build/android/gyp/ijar.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import subprocess
+import sys
+
+from util import build_utils
+
+
+def main():
+ # The point of this wrapper is to use AtomicOutput so that output timestamps
+ # are not updated when outputs are unchanged.
+ ijar_bin, in_jar, out_jar = sys.argv[1:]
+ with build_utils.AtomicOutput(out_jar) as f:
+ subprocess.check_call([ijar_bin, in_jar, f.name])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gyp/ijar.pydeps b/deps/v8/build/android/gyp/ijar.pydeps
new file mode 100644
index 0000000000..ca10697c1f
--- /dev/null
+++ b/deps/v8/build/android/gyp/ijar.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/ijar.pydeps build/android/gyp/ijar.py
+../../gn_helpers.py
+ijar.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/jar.py b/deps/v8/build/android/gyp/jar.py
new file mode 100755
index 0000000000..7f2c9f0b21
--- /dev/null
+++ b/deps/v8/build/android/gyp/jar.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+
+def Jar(class_files,
+ classes_dir,
+ jar_path,
+ provider_configurations=None,
+ additional_files=None):
+ files = [(os.path.relpath(f, classes_dir), f) for f in class_files]
+
+ if additional_files:
+ for filepath, jar_filepath in additional_files:
+ files.append((jar_filepath, filepath))
+
+ if provider_configurations:
+ for config in provider_configurations:
+ files.append(('META-INF/services/' + os.path.basename(config), config))
+
+ # Zeros out timestamps so that builds are hermetic.
+ with build_utils.AtomicOutput(jar_path) as f:
+ build_utils.DoZip(files, f)
+
+
+def JarDirectory(classes_dir,
+ jar_path,
+ predicate=None,
+ provider_configurations=None,
+ additional_files=None):
+ all_files = build_utils.FindInDirectory(classes_dir, '*')
+ if predicate:
+ all_files = [
+ f for f in all_files if predicate(os.path.relpath(f, classes_dir))]
+ all_files.sort()
+
+ Jar(all_files,
+ classes_dir,
+ jar_path,
+ provider_configurations=provider_configurations,
+ additional_files=additional_files)
+
+
+def _CreateFilterPredicate(excluded_classes, included_classes):
+ if not excluded_classes and not included_classes:
+ return None
+
+ def predicate(f):
+ # Exclude filters take precidence over include filters.
+ if build_utils.MatchesGlob(f, excluded_classes):
+ return False
+ if included_classes and not build_utils.MatchesGlob(f, included_classes):
+ return False
+ return True
+
+ return predicate
+
+
+# TODO(agrieve): Change components/cronet/android/BUILD.gn to use filter_zip.py
+# and delete main().
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option('--classes-dir', help='Directory containing .class files.')
+ parser.add_option('--jar-path', help='Jar output path.')
+ parser.add_option('--excluded-classes',
+ help='GN list of .class file patterns to exclude from the jar.')
+ parser.add_option('--included-classes',
+ help='GN list of .class file patterns to include in the jar.')
+
+ args = build_utils.ExpandFileArgs(sys.argv[1:])
+ options, _ = parser.parse_args(args)
+
+ excluded_classes = []
+ if options.excluded_classes:
+ excluded_classes = build_utils.ParseGnList(options.excluded_classes)
+ included_classes = []
+ if options.included_classes:
+ included_classes = build_utils.ParseGnList(options.included_classes)
+
+ predicate = _CreateFilterPredicate(excluded_classes, included_classes)
+ JarDirectory(options.classes_dir, options.jar_path, predicate=predicate)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/java_cpp_enum.py b/deps/v8/build/android/gyp/java_cpp_enum.py
new file mode 100755
index 0000000000..bacc8e3d46
--- /dev/null
+++ b/deps/v8/build/android/gyp/java_cpp_enum.py
@@ -0,0 +1,435 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+from datetime import date
+import re
+import optparse
+import os
+from string import Template
+import sys
+import textwrap
+import zipfile
+
+from util import build_utils
+from util import java_cpp_utils
+
+# List of C++ types that are compatible with the Java code generated by this
+# script.
+#
+# This script can parse .idl files however, at present it ignores special
+# rules such as [cpp_enum_prefix_override="ax_attr"].
+ENUM_FIXED_TYPE_WHITELIST = ['char', 'unsigned char',
+ 'short', 'unsigned short',
+ 'int', 'int8_t', 'int16_t', 'int32_t', 'uint8_t', 'uint16_t']
+
+class EnumDefinition(object):
+ def __init__(self, original_enum_name=None, class_name_override=None,
+ enum_package=None, entries=None, comments=None, fixed_type=None):
+ self.original_enum_name = original_enum_name
+ self.class_name_override = class_name_override
+ self.enum_package = enum_package
+ self.entries = collections.OrderedDict(entries or [])
+ self.comments = collections.OrderedDict(comments or [])
+ self.prefix_to_strip = None
+ self.fixed_type = fixed_type
+
+ def AppendEntry(self, key, value):
+ if key in self.entries:
+ raise Exception('Multiple definitions of key %s found.' % key)
+ self.entries[key] = value
+
+ def AppendEntryComment(self, key, value):
+ if key in self.comments:
+ raise Exception('Multiple definitions of key %s found.' % key)
+ self.comments[key] = value
+
+ @property
+ def class_name(self):
+ return self.class_name_override or self.original_enum_name
+
+ def Finalize(self):
+ self._Validate()
+ self._AssignEntryIndices()
+ self._StripPrefix()
+ self._NormalizeNames()
+
+ def _Validate(self):
+ assert self.class_name
+ assert self.enum_package
+ assert self.entries
+ if self.fixed_type and self.fixed_type not in ENUM_FIXED_TYPE_WHITELIST:
+ raise Exception('Fixed type %s for enum %s not whitelisted.' %
+ (self.fixed_type, self.class_name))
+
+ def _AssignEntryIndices(self):
+ # Enums, if given no value, are given the value of the previous enum + 1.
+ if not all(self.entries.values()):
+ prev_enum_value = -1
+ for key, value in self.entries.iteritems():
+ if not value:
+ self.entries[key] = prev_enum_value + 1
+ elif value in self.entries:
+ self.entries[key] = self.entries[value]
+ else:
+ try:
+ self.entries[key] = int(value)
+ except ValueError:
+ raise Exception('Could not interpret integer from enum value "%s" '
+ 'for key %s.' % (value, key))
+ prev_enum_value = self.entries[key]
+
+
+ def _StripPrefix(self):
+ prefix_to_strip = self.prefix_to_strip
+ if not prefix_to_strip:
+ shout_case = self.original_enum_name
+ shout_case = re.sub('(?!^)([A-Z]+)', r'_\1', shout_case).upper()
+ shout_case += '_'
+
+ prefixes = [shout_case, self.original_enum_name,
+ 'k' + self.original_enum_name]
+
+ for prefix in prefixes:
+ if all([w.startswith(prefix) for w in self.entries.keys()]):
+ prefix_to_strip = prefix
+ break
+ else:
+ prefix_to_strip = ''
+
+ def StripEntries(entries):
+ ret = collections.OrderedDict()
+ for k, v in entries.iteritems():
+ stripped_key = k.replace(prefix_to_strip, '', 1)
+ if isinstance(v, basestring):
+ stripped_value = v.replace(prefix_to_strip, '')
+ else:
+ stripped_value = v
+ ret[stripped_key] = stripped_value
+
+ return ret
+
+ self.entries = StripEntries(self.entries)
+ self.comments = StripEntries(self.comments)
+
+ def _NormalizeNames(self):
+ self.entries = _TransformKeys(self.entries, java_cpp_utils.KCamelToShouty)
+ self.comments = _TransformKeys(self.comments, java_cpp_utils.KCamelToShouty)
+
+
+def _TransformKeys(d, func):
+ """Normalize keys in |d| and update references to old keys in |d| values."""
+ normal_keys = {k: func(k) for k in d}
+ ret = collections.OrderedDict()
+ for k, v in d.iteritems():
+ # Need to transform values as well when the entry value was explicitly set
+ # (since it could contain references to other enum entry values).
+ if isinstance(v, basestring):
+ for normal_key in normal_keys:
+ v = v.replace(normal_key, normal_keys[normal_key])
+ ret[normal_keys[k]] = v
+ return ret
+
+
+class DirectiveSet(object):
+ class_name_override_key = 'CLASS_NAME_OVERRIDE'
+ enum_package_key = 'ENUM_PACKAGE'
+ prefix_to_strip_key = 'PREFIX_TO_STRIP'
+
+ known_keys = [class_name_override_key, enum_package_key, prefix_to_strip_key]
+
+ def __init__(self):
+ self._directives = {}
+
+ def Update(self, key, value):
+ if key not in DirectiveSet.known_keys:
+ raise Exception("Unknown directive: " + key)
+ self._directives[key] = value
+
+ @property
+ def empty(self):
+ return len(self._directives) == 0
+
+ def UpdateDefinition(self, definition):
+ definition.class_name_override = self._directives.get(
+ DirectiveSet.class_name_override_key, '')
+ definition.enum_package = self._directives.get(
+ DirectiveSet.enum_package_key)
+ definition.prefix_to_strip = self._directives.get(
+ DirectiveSet.prefix_to_strip_key)
+
+
+class HeaderParser(object):
+ single_line_comment_re = re.compile(r'\s*//\s*([^\n]*)')
+ multi_line_comment_start_re = re.compile(r'\s*/\*')
+ enum_line_re = re.compile(r'^\s*(\w+)(\s*\=\s*([^,\n]+))?,?')
+ enum_end_re = re.compile(r'^\s*}\s*;\.*$')
+ generator_error_re = re.compile(r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*$')
+ generator_directive_re = re.compile(
+ r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*([\.\w]+)$')
+ multi_line_generator_directive_start_re = re.compile(
+ r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*\(([\.\w]*)$')
+ multi_line_directive_continuation_re = re.compile(r'^\s*//\s+([\.\w]+)$')
+ multi_line_directive_end_re = re.compile(r'^\s*//\s+([\.\w]*)\)$')
+
+ optional_class_or_struct_re = r'(class|struct)?'
+ enum_name_re = r'(\w+)'
+ optional_fixed_type_re = r'(\:\s*(\w+\s*\w+?))?'
+ enum_start_re = re.compile(r'^\s*(?:\[cpp.*\])?\s*enum\s+' +
+ optional_class_or_struct_re + '\s*' + enum_name_re + '\s*' +
+ optional_fixed_type_re + '\s*{\s*')
+ enum_single_line_re = re.compile(
+ r'^\s*(?:\[cpp.*\])?\s*enum.*{(?P<enum_entries>.*)}.*$')
+
+ def __init__(self, lines, path=''):
+ self._lines = lines
+ self._path = path
+ self._enum_definitions = []
+ self._in_enum = False
+ self._current_definition = None
+ self._current_comments = []
+ self._generator_directives = DirectiveSet()
+ self._multi_line_generator_directive = None
+ self._current_enum_entry = ''
+
+ def _ApplyGeneratorDirectives(self):
+ self._generator_directives.UpdateDefinition(self._current_definition)
+ self._generator_directives = DirectiveSet()
+
+ def ParseDefinitions(self):
+ for line in self._lines:
+ self._ParseLine(line)
+ return self._enum_definitions
+
+ def _ParseLine(self, line):
+ if self._multi_line_generator_directive:
+ self._ParseMultiLineDirectiveLine(line)
+ elif not self._in_enum:
+ self._ParseRegularLine(line)
+ else:
+ self._ParseEnumLine(line)
+
+ def _ParseEnumLine(self, line):
+ if HeaderParser.multi_line_comment_start_re.match(line):
+ raise Exception('Multi-line comments in enums are not supported in ' +
+ self._path)
+
+ enum_comment = HeaderParser.single_line_comment_re.match(line)
+ if enum_comment:
+ comment = enum_comment.groups()[0]
+ if comment:
+ self._current_comments.append(comment)
+ elif HeaderParser.enum_end_re.match(line):
+ self._FinalizeCurrentEnumDefinition()
+ else:
+ self._AddToCurrentEnumEntry(line)
+ if ',' in line:
+ self._ParseCurrentEnumEntry()
+
+ def _ParseSingleLineEnum(self, line):
+ for entry in line.split(','):
+ self._AddToCurrentEnumEntry(entry)
+ self._ParseCurrentEnumEntry()
+
+ self._FinalizeCurrentEnumDefinition()
+
+ def _ParseCurrentEnumEntry(self):
+ if not self._current_enum_entry:
+ return
+
+ enum_entry = HeaderParser.enum_line_re.match(self._current_enum_entry)
+ if not enum_entry:
+ raise Exception('Unexpected error while attempting to parse %s as enum '
+ 'entry.' % self._current_enum_entry)
+
+ enum_key = enum_entry.groups()[0]
+ enum_value = enum_entry.groups()[2]
+ self._current_definition.AppendEntry(enum_key, enum_value)
+ if self._current_comments:
+ self._current_definition.AppendEntryComment(
+ enum_key, ' '.join(self._current_comments))
+ self._current_comments = []
+ self._current_enum_entry = ''
+
+ def _AddToCurrentEnumEntry(self, line):
+ self._current_enum_entry += ' ' + line.strip()
+
+ def _FinalizeCurrentEnumDefinition(self):
+ if self._current_enum_entry:
+ self._ParseCurrentEnumEntry()
+ self._ApplyGeneratorDirectives()
+ self._current_definition.Finalize()
+ self._enum_definitions.append(self._current_definition)
+ self._current_definition = None
+ self._in_enum = False
+
+ def _ParseMultiLineDirectiveLine(self, line):
+ multi_line_directive_continuation = (
+ HeaderParser.multi_line_directive_continuation_re.match(line))
+ multi_line_directive_end = (
+ HeaderParser.multi_line_directive_end_re.match(line))
+
+ if multi_line_directive_continuation:
+ value_cont = multi_line_directive_continuation.groups()[0]
+ self._multi_line_generator_directive[1].append(value_cont)
+ elif multi_line_directive_end:
+ directive_name = self._multi_line_generator_directive[0]
+ directive_value = "".join(self._multi_line_generator_directive[1])
+ directive_value += multi_line_directive_end.groups()[0]
+ self._multi_line_generator_directive = None
+ self._generator_directives.Update(directive_name, directive_value)
+ else:
+ raise Exception('Malformed multi-line directive declaration in ' +
+ self._path)
+
+ def _ParseRegularLine(self, line):
+ enum_start = HeaderParser.enum_start_re.match(line)
+ generator_directive_error = HeaderParser.generator_error_re.match(line)
+ generator_directive = HeaderParser.generator_directive_re.match(line)
+ multi_line_generator_directive_start = (
+ HeaderParser.multi_line_generator_directive_start_re.match(line))
+ single_line_enum = HeaderParser.enum_single_line_re.match(line)
+
+ if generator_directive_error:
+ raise Exception('Malformed directive declaration in ' + self._path +
+ '. Use () for multi-line directives. E.g.\n' +
+ '// GENERATED_JAVA_ENUM_PACKAGE: (\n' +
+ '// foo.package)')
+ elif generator_directive:
+ directive_name = generator_directive.groups()[0]
+ directive_value = generator_directive.groups()[1]
+ self._generator_directives.Update(directive_name, directive_value)
+ elif multi_line_generator_directive_start:
+ directive_name = multi_line_generator_directive_start.groups()[0]
+ directive_value = multi_line_generator_directive_start.groups()[1]
+ self._multi_line_generator_directive = (directive_name, [directive_value])
+ elif enum_start or single_line_enum:
+ if self._generator_directives.empty:
+ return
+ self._current_definition = EnumDefinition(
+ original_enum_name=enum_start.groups()[1],
+ fixed_type=enum_start.groups()[3])
+ self._in_enum = True
+ if single_line_enum:
+ self._ParseSingleLineEnum(single_line_enum.group('enum_entries'))
+
+
+def DoGenerate(source_paths):
+ for source_path in source_paths:
+ enum_definitions = DoParseHeaderFile(source_path)
+ if not enum_definitions:
+ raise Exception('No enums found in %s\n'
+ 'Did you forget prefixing enums with '
+ '"// GENERATED_JAVA_ENUM_PACKAGE: foo"?' %
+ source_path)
+ for enum_definition in enum_definitions:
+ package_path = enum_definition.enum_package.replace('.', os.path.sep)
+ file_name = enum_definition.class_name + '.java'
+ output_path = os.path.join(package_path, file_name)
+ output = GenerateOutput(source_path, enum_definition)
+ yield output_path, output
+
+
+def DoParseHeaderFile(path):
+ with open(path) as f:
+ return HeaderParser(f.readlines(), path).ParseDefinitions()
+
+
+def GenerateOutput(source_path, enum_definition):
+ template = Template("""
+// Copyright ${YEAR} The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+// ${SCRIPT_NAME}
+// From
+// ${SOURCE_PATH}
+
+package ${PACKAGE};
+
+import android.support.annotation.IntDef;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@IntDef({
+${INT_DEF}
+})
+@Retention(RetentionPolicy.SOURCE)
+public @interface ${CLASS_NAME} {
+${ENUM_ENTRIES}
+}
+""")
+
+ enum_template = Template(' int ${NAME} = ${VALUE};')
+ enum_entries_string = []
+ enum_names = []
+ for enum_name, enum_value in enum_definition.entries.iteritems():
+ values = {
+ 'NAME': enum_name,
+ 'VALUE': enum_value,
+ }
+ enum_comments = enum_definition.comments.get(enum_name)
+ if enum_comments:
+ enum_comments_indent = ' * '
+ comments_line_wrapper = textwrap.TextWrapper(
+ initial_indent=enum_comments_indent,
+ subsequent_indent=enum_comments_indent,
+ width=100)
+ enum_entries_string.append(' /**')
+ enum_entries_string.append('\n'.join(
+ comments_line_wrapper.wrap(enum_comments)))
+ enum_entries_string.append(' */')
+ enum_entries_string.append(enum_template.substitute(values))
+ if enum_name != "NUM_ENTRIES":
+ enum_names.append(enum_definition.class_name + '.' + enum_name)
+ enum_entries_string = '\n'.join(enum_entries_string)
+
+ enum_names_indent = ' ' * 4
+ wrapper = textwrap.TextWrapper(initial_indent = enum_names_indent,
+ subsequent_indent = enum_names_indent,
+ width = 100)
+ enum_names_string = '\n'.join(wrapper.wrap(', '.join(enum_names)))
+
+ values = {
+ 'CLASS_NAME': enum_definition.class_name,
+ 'ENUM_ENTRIES': enum_entries_string,
+ 'PACKAGE': enum_definition.enum_package,
+ 'INT_DEF': enum_names_string,
+ 'SCRIPT_NAME': java_cpp_utils.GetScriptName(),
+ 'SOURCE_PATH': source_path,
+ 'YEAR': str(date.today().year)
+ }
+ return template.substitute(values)
+
+
+def DoMain(argv):
+ usage = 'usage: %prog [options] [output_dir] input_file(s)...'
+ parser = optparse.OptionParser(usage=usage)
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_option('--srcjar',
+ help='When specified, a .srcjar at the given path is '
+ 'created instead of individual .java files.')
+
+ options, args = parser.parse_args(argv)
+
+ if not args:
+ parser.error('Need to specify at least one input file')
+ input_paths = args
+
+ with build_utils.AtomicOutput(options.srcjar) as f:
+ with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
+ for output_path, data in DoGenerate(input_paths):
+ build_utils.AddToZipHermetic(srcjar, output_path, data=data)
+
+ if options.depfile:
+ build_utils.WriteDepfile(options.depfile, options.srcjar, add_pydeps=False)
+
+
+if __name__ == '__main__':
+ DoMain(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/java_cpp_enum.pydeps b/deps/v8/build/android/gyp/java_cpp_enum.pydeps
new file mode 100644
index 0000000000..d5869edddf
--- /dev/null
+++ b/deps/v8/build/android/gyp/java_cpp_enum.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_enum.pydeps build/android/gyp/java_cpp_enum.py
+../../gn_helpers.py
+java_cpp_enum.py
+util/__init__.py
+util/build_utils.py
+util/java_cpp_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/java_cpp_enum_tests.py b/deps/v8/build/android/gyp/java_cpp_enum_tests.py
new file mode 100755
index 0000000000..5717047c7a
--- /dev/null
+++ b/deps/v8/build/android/gyp/java_cpp_enum_tests.py
@@ -0,0 +1,747 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for enum_preprocess.py.
+
+This test suite contains various tests for the C++ -> Java enum generator.
+"""
+
+import collections
+from datetime import date
+import unittest
+
+import java_cpp_enum
+from java_cpp_enum import EnumDefinition, GenerateOutput
+from java_cpp_enum import HeaderParser
+from util import java_cpp_utils
+
+
+class TestPreprocess(unittest.TestCase):
+ def testOutput(self):
+ definition = EnumDefinition(original_enum_name='ClassName',
+ enum_package='some.package',
+ entries=[('E1', 1), ('E2', '2 << 2')],
+ comments=[('E2', 'This is a comment.'),
+ ('E1', 'This is a multiple line '
+ 'comment that is really long. '
+ 'This is a multiple line '
+ 'comment that is really '
+ 'really long.')])
+ output = GenerateOutput('path/to/file', definition)
+ expected = """
+// Copyright %d The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+// %s
+// From
+// path/to/file
+
+package some.package;
+
+import android.support.annotation.IntDef;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@IntDef({
+ ClassName.E1, ClassName.E2
+})
+@Retention(RetentionPolicy.SOURCE)
+public @interface ClassName {
+ /**
+ * %s
+ * really really long.
+ */
+ int E1 = 1;
+ /**
+ * This is a comment.
+ */
+ int E2 = 2 << 2;
+}
+"""
+ long_comment = ('This is a multiple line comment that is really long. '
+ 'This is a multiple line comment that is')
+ self.assertEqual(
+ expected % (date.today().year, java_cpp_utils.GetScriptName(),
+ long_comment), output)
+
+ def testParseSimpleEnum(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumName {
+ VALUE_ZERO,
+ VALUE_ONE,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('EnumName', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('VALUE_ZERO', 0),
+ ('VALUE_ONE', 1)]),
+ definition.entries)
+
+ def testParseBitShifts(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumName {
+ VALUE_ZERO = 1 << 0,
+ VALUE_ONE = 1 << 1,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumName {
+ ENUM_NAME_ZERO = 1 << 0,
+ ENUM_NAME_ONE = 1 << 1,
+ ENUM_NAME_TWO = ENUM_NAME_ZERO | ENUM_NAME_ONE,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(2, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('EnumName', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('VALUE_ZERO', '1 << 0'),
+ ('VALUE_ONE', '1 << 1')]),
+ definition.entries)
+
+ definition = definitions[1]
+ expected_entries = collections.OrderedDict([
+ ('ZERO', '1 << 0'),
+ ('ONE', '1 << 1'),
+ ('TWO', 'ZERO | ONE')])
+ self.assertEqual(expected_entries, definition.entries)
+
+ def testParseMultilineEnumEntry(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace
+ enum Foo {
+ VALUE_ZERO = 1 << 0,
+ VALUE_ONE =
+ SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | ControlKey,
+ VALUE_TWO = 1 << 18,
+ };
+ """.split('\n')
+ expected_entries = collections.OrderedDict([
+ ('VALUE_ZERO', '1 << 0'),
+ ('VALUE_ONE', 'SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | '
+ 'ControlKey'),
+ ('VALUE_TWO', '1 << 18')])
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('bar.namespace', definition.enum_package)
+ self.assertEqual(expected_entries, definition.entries)
+
+ def testParseEnumEntryWithTrailingMultilineEntry(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace
+ enum Foo {
+ VALUE_ZERO = 1,
+ VALUE_ONE =
+ SymbolKey | FnKey | AltGrKey | MetaKey |
+ AltKey | ControlKey | ShiftKey,
+ };
+ """.split('\n')
+ expected_entries = collections.OrderedDict([
+ ('VALUE_ZERO', '1'),
+ ('VALUE_ONE', 'SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | '
+ 'ControlKey | ShiftKey')])
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('bar.namespace', definition.enum_package)
+ self.assertEqual(expected_entries, definition.entries)
+
+ def testParseNoCommaAfterLastEntry(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace
+ enum Foo {
+ VALUE_ZERO = 1,
+
+ // This is a multiline
+ //
+ // comment with an empty line.
+ VALUE_ONE = 2
+ };
+ """.split('\n')
+ expected_entries = collections.OrderedDict([
+ ('VALUE_ZERO', '1'),
+ ('VALUE_ONE', '2')])
+ expected_comments = collections.OrderedDict([
+ ('VALUE_ONE', 'This is a multiline comment with an empty line.')])
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('bar.namespace', definition.enum_package)
+ self.assertEqual(expected_entries, definition.entries)
+ self.assertEqual(expected_comments, definition.comments)
+
+ def testParseClassNameOverride(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+ enum EnumName {
+ FOO
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OtherOverride
+ enum PrefixTest {
+ PREFIX_TEST_A,
+ PREFIX_TEST_B,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(2, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('OverrideName', definition.class_name)
+
+ definition = definitions[1]
+ self.assertEqual('OtherOverride', definition.class_name)
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1)]),
+ definition.entries)
+
+ def testParsePreservesCommentsWhenPrefixStripping(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumOne {
+ ENUM_ONE_A = 1,
+ // Comment there
+ ENUM_ONE_B = A,
+ };
+
+ enum EnumIgnore {
+ C, D, E
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: other.package
+ // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+ enum EnumTwo {
+ P_A,
+ // This comment spans
+ // two lines.
+ P_B
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(2, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('EnumOne', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('A', '1'),
+ ('B', 'A')]),
+ definition.entries)
+ self.assertEqual(collections.OrderedDict([('B', 'Comment there')]),
+ definition.comments)
+ definition = definitions[1]
+ self.assertEqual('EnumTwo', definition.class_name)
+ self.assertEqual('other.package', definition.enum_package)
+ self.assertEqual(collections.OrderedDict(
+ [('B', 'This comment spans two lines.')]), definition.comments)
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1)]),
+ definition.entries)
+
+ def testParseTwoEnums(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum AnEnum {
+ ENUM_ONE_A = 1,
+ ENUM_ONE_B = A,
+ };
+
+ enum EnumIgnore {
+ C, D, E
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: other.package
+ enum EnumTwo {
+ P_A,
+ P_B
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(2, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('AnEnum', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('ENUM_ONE_A', '1'),
+ ('ENUM_ONE_B', 'A')]),
+ definition.entries)
+ definition = definitions[1]
+ self.assertEqual('EnumTwo', definition.class_name)
+ self.assertEqual('other.package', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('P_A', 0),
+ ('P_B', 1)]),
+ definition.entries)
+
+ def testParseSingleLineEnum(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: other.package
+ // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+ enum EnumTwo { P_A, P_B };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ definition = definitions[0]
+ self.assertEqual('EnumTwo', definition.class_name)
+ self.assertEqual('other.package', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1)]),
+ definition.entries)
+
+ def testParseWithStrippingAndRelativeReferences(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: other.package
+ // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+ enum EnumTwo {
+ P_A = 1,
+ // P_A is old-don't use P_A.
+ P_B = P_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ definition = definitions[0]
+ self.assertEqual('EnumTwo', definition.class_name)
+ self.assertEqual('other.package', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('A', '1'),
+ ('B', 'A')]),
+ definition.entries)
+ self.assertEqual(collections.OrderedDict([('B', 'A is old-don\'t use A.')]),
+ definition.comments)
+
+ def testParseSingleLineAndRegularEnum(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumOne {
+ ENUM_ONE_A = 1,
+ // Comment there
+ ENUM_ONE_B = A,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: other.package
+ enum EnumTwo { P_A, P_B };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+ enum EnumName {
+ ENUM_NAME_FOO
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ definition = definitions[0]
+ self.assertEqual(
+ collections.OrderedDict([('A', '1'), ('B', 'A')]), definition.entries)
+ self.assertEqual(collections.OrderedDict([('B', 'Comment there')]),
+ definition.comments)
+
+ self.assertEqual(3, len(definitions))
+ definition = definitions[1]
+ self.assertEqual(
+ collections.OrderedDict([('P_A', 0), ('P_B', 1)]), definition.entries)
+
+ definition = definitions[2]
+ self.assertEqual(collections.OrderedDict([('FOO', 0)]), definition.entries)
+
+ def testParseWithCamelCaseNames(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumTest {
+ EnumTestA = 1,
+ // comment for EnumTestB.
+ EnumTestB = 2,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ // GENERATED_JAVA_PREFIX_TO_STRIP: Test
+ enum AnEnum {
+ TestHTTPOption,
+ TestHTTPSOption,
+ };
+
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ definition = definitions[0]
+ self.assertEqual(
+ collections.OrderedDict([('A', '1'), ('B', '2')]),
+ definition.entries)
+ self.assertEqual(
+ collections.OrderedDict([('B', 'comment for B.')]),
+ definition.comments)
+
+ definition = definitions[1]
+ self.assertEqual(
+ collections.OrderedDict([('HTTP_OPTION', 0), ('HTTPS_OPTION', 1)]),
+ definition.entries)
+
+ def testParseWithKCamelCaseNames(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumOne {
+ kEnumOne = 1,
+ // comment for kEnumTwo.
+ kEnumTwo = 2,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+ enum EnumName {
+ kEnumNameFoo,
+ kEnumNameBar
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumName {
+ kEnumNameFoo,
+ kEnumBar,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum Keys {
+ kSymbolKey = 1 << 0,
+ kAltKey = 1 << 1,
+ kUpKey = 1 << 2,
+ kKeyModifiers = kSymbolKey | kAltKey | kUpKey | kKeyModifiers,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum Mixed {
+ kTestVal,
+ kCodecMPEG2
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ definition = definitions[0]
+ self.assertEqual(
+ collections.OrderedDict([('ENUM_ONE', '1'), ('ENUM_TWO', '2')]),
+ definition.entries)
+ self.assertEqual(
+ collections.OrderedDict([('ENUM_TWO', 'comment for ENUM_TWO.')]),
+ definition.comments)
+
+ definition = definitions[1]
+ self.assertEqual(
+ collections.OrderedDict([('FOO', 0), ('BAR', 1)]),
+ definition.entries)
+
+ definition = definitions[2]
+ self.assertEqual(
+ collections.OrderedDict([('ENUM_NAME_FOO', 0), ('ENUM_BAR', 1)]),
+ definition.entries)
+
+ definition = definitions[3]
+ expected_entries = collections.OrderedDict([
+ ('SYMBOL_KEY', '1 << 0'),
+ ('ALT_KEY', '1 << 1'),
+ ('UP_KEY', '1 << 2'),
+ ('KEY_MODIFIERS', 'SYMBOL_KEY | ALT_KEY | UP_KEY | KEY_MODIFIERS')])
+ self.assertEqual(expected_entries, definition.entries)
+
+ definition = definitions[4]
+ self.assertEqual(
+ collections.OrderedDict([('TEST_VAL', 0), ('CODEC_MPEG2', 1)]),
+ definition.entries)
+
+ def testParseThrowsOnUnknownDirective(self):
+ test_data = """
+ // GENERATED_JAVA_UNKNOWN: Value
+ enum EnumName {
+ VALUE_ONE,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testParseReturnsEmptyListWithoutDirectives(self):
+ test_data = """
+ enum EnumName {
+ VALUE_ONE,
+ };
+ """.split('\n')
+ self.assertEqual([], HeaderParser(test_data).ParseDefinitions())
+
+ def testParseEnumClass(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum class Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('A', 0)]),
+ definition.entries)
+
+ def testParseEnumStruct(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum struct Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('A', 0)]),
+ definition.entries)
+
+ def testParseFixedTypeEnum(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum Foo : int {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual('int', definition.fixed_type)
+ self.assertEqual(collections.OrderedDict([('A', 0)]),
+ definition.entries)
+
+ def testParseFixedTypeEnumClass(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum class Foo: unsigned short {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual('unsigned short', definition.fixed_type)
+ self.assertEqual(collections.OrderedDict([('A', 0)]),
+ definition.entries)
+
+ def testParseUnknownFixedTypeRaises(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum class Foo: foo_type {
+ FOO_A,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testParseSimpleMultiLineDirective(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (
+ // test.namespace)
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual('test.namespace', definitions[0].enum_package)
+ self.assertEqual('Bar', definitions[0].class_name)
+
+ def testParseMultiLineDirective(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (te
+ // st.name
+ // space)
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual('test.namespace', definitions[0].enum_package)
+
+ def testParseMultiLineDirectiveWithOtherDirective(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (
+ // test.namespace)
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: (
+ // Ba
+ // r
+ // )
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual('test.namespace', definitions[0].enum_package)
+ self.assertEqual('Bar', definitions[0].class_name)
+
+ def testParseMalformedMultiLineDirectiveWithOtherDirective(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (
+ // test.name
+ // space
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testParseMalformedMultiLineDirective(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (
+ // test.name
+ // space
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testParseMalformedMultiLineDirectiveShort(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testParseMalformedMultiLineDirectiveMissingBrackets(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE:
+ // test.namespace
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testEnumValueAssignmentNoneDefined(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', None)
+ definition.AppendEntry('C', None)
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1),
+ ('C', 2)]),
+ definition.entries)
+
+ def testEnumValueAssignmentAllDefined(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', '1')
+ definition.AppendEntry('B', '2')
+ definition.AppendEntry('C', '3')
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', '1'),
+ ('B', '2'),
+ ('C', '3')]),
+ definition.entries)
+
+ def testEnumValueAssignmentReferences(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', 'A')
+ definition.AppendEntry('C', None)
+ definition.AppendEntry('D', 'C')
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 0),
+ ('C', 1),
+ ('D', 1)]),
+ definition.entries)
+
+ def testEnumValueAssignmentSet(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', '2')
+ definition.AppendEntry('C', None)
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 2),
+ ('C', 3)]),
+ definition.entries)
+
+ def testEnumValueAssignmentSetReferences(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', 'A')
+ definition.AppendEntry('C', 'B')
+ definition.AppendEntry('D', None)
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 0),
+ ('C', 0),
+ ('D', 1)]),
+ definition.entries)
+
+ def testEnumValueAssignmentRaises(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', 'foo')
+ definition.AppendEntry('C', None)
+ with self.assertRaises(Exception):
+ definition.Finalize()
+
+ def testExplicitPrefixStripping(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('P_A', None)
+ definition.AppendEntry('B', None)
+ definition.AppendEntry('P_C', None)
+ definition.AppendEntry('P_LAST', 'P_C')
+ definition.prefix_to_strip = 'P_'
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1),
+ ('C', 2),
+ ('LAST', 2)]),
+ definition.entries)
+
+ def testImplicitPrefixStripping(self):
+ definition = EnumDefinition(original_enum_name='ClassName',
+ enum_package='p')
+ definition.AppendEntry('CLASS_NAME_A', None)
+ definition.AppendEntry('CLASS_NAME_B', None)
+ definition.AppendEntry('CLASS_NAME_C', None)
+ definition.AppendEntry('CLASS_NAME_LAST', 'CLASS_NAME_C')
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1),
+ ('C', 2),
+ ('LAST', 2)]),
+ definition.entries)
+
+ def testImplicitPrefixStrippingRequiresAllConstantsToBePrefixed(self):
+ definition = EnumDefinition(original_enum_name='Name',
+ enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', None)
+ definition.AppendEntry('NAME_LAST', None)
+ definition.Finalize()
+ self.assertEqual(['A', 'B', 'NAME_LAST'], definition.entries.keys())
+
+ def testGenerateThrowsOnEmptyInput(self):
+ with self.assertRaises(Exception):
+ original_do_parse = java_cpp_enum.DoParseHeaderFile
+ try:
+ java_cpp_enum.DoParseHeaderFile = lambda _: []
+ for _ in java_cpp_enum.DoGenerate(['file']):
+ pass
+ finally:
+ java_cpp_enum.DoParseHeaderFile = original_do_parse
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/gyp/java_cpp_strings.py b/deps/v8/build/android/gyp/java_cpp_strings.py
new file mode 100755
index 0000000000..acaaf223ef
--- /dev/null
+++ b/deps/v8/build/android/gyp/java_cpp_strings.py
@@ -0,0 +1,213 @@
+#!/user/bin/env python
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import java_cpp_utils
+
+
+def _ToUpper(match):
+ return match.group(1).upper()
+
+
+def _GetClassName(source_path):
+ name = os.path.basename(os.path.abspath(source_path))
+ (name, _) = os.path.splitext(name)
+ name = re.sub(r'_([a-z])', _ToUpper, name)
+ name = re.sub(r'^(.)', _ToUpper, name)
+ return name
+
+
+class _String(object):
+
+ def __init__(self, name, value, comments):
+ self.name = java_cpp_utils.KCamelToShouty(name)
+ self.value = value
+ self.comments = '\n'.join(' ' + x for x in comments)
+
+ def Format(self):
+ return '%s\n public static final String %s = %s;' % (
+ self.comments, self.name, self.value)
+
+
+def ParseTemplateFile(lines):
+ package_re = re.compile(r'^package (.*);')
+ class_re = re.compile(r'.*class (.*) {')
+ package = ''
+ class_name = ''
+ for line in lines:
+ package_line = package_re.match(line)
+ if package_line:
+ package = package_line.groups()[0]
+ class_line = class_re.match(line)
+ if class_line:
+ class_name = class_line.groups()[0]
+ break
+ return package, class_name
+
+
+# TODO(crbug.com/937282): It should be possible to parse a file for more than
+# string constants. However, this currently only handles extracting string
+# constants from a file (and all string constants from that file). Work will
+# be needed if we want to annotate specific constants or non string constants
+# in the file to be parsed.
+class StringFileParser(object):
+ SINGLE_LINE_COMMENT_RE = re.compile(r'\s*(// [^\n]*)')
+ STRING_RE = re.compile(r'\s*const char k(.*)\[\]\s*=\s*(?:(".*"))?')
+ VALUE_RE = re.compile(r'\s*("[^"]*")')
+
+ def __init__(self, lines, path=''):
+ self._lines = lines
+ self._path = path
+ self._in_string = False
+ self._in_comment = False
+ self._package = ''
+ self._current_comments = []
+ self._current_name = ''
+ self._current_value = ''
+ self._strings = []
+
+ def _Reset(self):
+ self._current_comments = []
+ self._current_name = ''
+ self._current_value = ''
+ self._in_string = False
+ self._in_comment = False
+
+ def _AppendString(self):
+ self._strings.append(
+ _String(self._current_name, self._current_value,
+ self._current_comments))
+ self._Reset()
+
+ def _ParseValue(self, line):
+ value_line = StringFileParser.VALUE_RE.match(line)
+ if value_line:
+ self._current_value = value_line.groups()[0]
+ self._AppendString()
+ else:
+ self._Reset()
+
+ def _ParseComment(self, line):
+ comment_line = StringFileParser.SINGLE_LINE_COMMENT_RE.match(line)
+ if comment_line:
+ self._current_comments.append(comment_line.groups()[0])
+ self._in_comment = True
+ self._in_string = True
+ return True
+ else:
+ self._in_comment = False
+ return False
+
+ def _ParseString(self, line):
+ string_line = StringFileParser.STRING_RE.match(line)
+ if string_line:
+ self._current_name = string_line.groups()[0]
+ if string_line.groups()[1]:
+ self._current_value = string_line.groups()[1]
+ self._AppendString()
+ return True
+ else:
+ self._in_string = False
+ return False
+
+ def _ParseLine(self, line):
+ if not self._in_string:
+ if not self._ParseString(line):
+ self._ParseComment(line)
+ return
+
+ if self._in_comment:
+ if self._ParseComment(line):
+ return
+ if not self._ParseString(line):
+ self._Reset()
+ return
+
+ if self._in_string:
+ self._ParseValue(line)
+
+ def Parse(self):
+ for line in self._lines:
+ self._ParseLine(line)
+ return self._strings
+
+
+def _GenerateOutput(template, source_path, template_path, strings):
+ description_template = """
+ // This following string constants were inserted by
+ // {SCRIPT_NAME}
+ // From
+ // {SOURCE_PATH}
+ // Into
+ // {TEMPLATE_PATH}
+
+"""
+ values = {
+ 'SCRIPT_NAME': java_cpp_utils.GetScriptName(),
+ 'SOURCE_PATH': source_path,
+ 'TEMPLATE_PATH': template_path,
+ }
+ description = description_template.format(**values)
+ native_strings = '\n\n'.join(x.Format() for x in strings)
+
+ values = {
+ 'NATIVE_STRINGS': description + native_strings,
+ }
+ return template.format(**values)
+
+
+def _ParseStringFile(path):
+ with open(path) as f:
+ return StringFileParser(f.readlines(), path).Parse()
+
+
+def _Generate(source_paths, template_path):
+ with open(template_path) as f:
+ lines = f.readlines()
+ template = ''.join(lines)
+ for source_path in source_paths:
+ strings = _ParseStringFile(source_path)
+ package, class_name = ParseTemplateFile(lines)
+ package_path = package.replace('.', os.path.sep)
+ file_name = class_name + '.java'
+ output_path = os.path.join(package_path, file_name)
+ output = _GenerateOutput(template, source_path, template_path, strings)
+ yield output, output_path
+
+
+def _Main(argv):
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument(
+ '--srcjar',
+ required=True,
+ help='When specified, a .srcjar at the given path is '
+ 'created instead of individual .java files.')
+
+ parser.add_argument(
+ '--template',
+ required=True,
+ help='Can be used to provide a context into which the'
+ 'new string constants will be inserted.')
+
+ parser.add_argument(
+ 'inputs', nargs='+', help='Input file(s)', metavar='INPUTFILE')
+ args = parser.parse_args(argv)
+
+ with build_utils.AtomicOutput(args.srcjar) as f:
+ with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
+ for data, path in _Generate(args.inputs, args.template):
+ build_utils.AddToZipHermetic(srcjar, path, data=data)
+
+
+if __name__ == '__main__':
+ _Main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/java_cpp_strings.pydeps b/deps/v8/build/android/gyp/java_cpp_strings.pydeps
new file mode 100644
index 0000000000..901b580e89
--- /dev/null
+++ b/deps/v8/build/android/gyp/java_cpp_strings.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_strings.pydeps build/android/gyp/java_cpp_strings.py
+../../gn_helpers.py
+java_cpp_strings.py
+util/__init__.py
+util/build_utils.py
+util/java_cpp_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/java_cpp_strings_tests.py b/deps/v8/build/android/gyp/java_cpp_strings_tests.py
new file mode 100755
index 0000000000..acf51e428e
--- /dev/null
+++ b/deps/v8/build/android/gyp/java_cpp_strings_tests.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for java_cpp_strings.py.
+
+This test suite contains various tests for the C++ -> Java string generator.
+"""
+
+import unittest
+
+import java_cpp_strings
+
+
+class _TestStringsParser(unittest.TestCase):
+
+ def testParseComments(self):
+ test_data = """
+/**
+ * This should be ignored as well.
+ */
+
+// Comment followed by a blank line.
+
+// Comment followed by unrelated code.
+int foo() { return 3; }
+
+// Real comment.
+const char kASwitch[] = "a-value";
+
+// Real comment that spans
+// multiple lines.
+const char kAnotherSwitch[] = "another-value";
+
+// Comment followed by nothing.
+""".split('\n')
+ strings = java_cpp_strings.StringFileParser(test_data).Parse()
+ self.assertEqual(2, len(strings))
+ self.assertEqual('A_SWITCH', strings[0].name)
+ self.assertEqual('"a-value"', strings[0].value)
+ self.assertEqual(1, len(strings[0].comments.split('\n')))
+ self.assertEqual('ANOTHER_SWITCH', strings[1].name)
+ self.assertEqual('"another-value"', strings[1].value)
+ self.assertEqual(2, len(strings[1].comments.split('\n')))
+
+ def testStringValues(self):
+ test_data = """
+// Single line string constants.
+const char kAString[] = "a-value";
+const char kNoComment[] = "no-comment";
+
+// Single line switch with a big space.
+const char kAStringWithSpace[] = "a-value";
+
+// Wrapped constant definition.
+const char kAStringWithAVeryLongNameThatWillHaveToWrap[] =
+ "a-string-with-a-very-long-name-that-will-have-to-wrap";
+
+// This is erroneous and should be ignored.
+const char kInvalidLineBreak[] =
+
+ "invalid-line-break";
+""".split('\n')
+ strings = java_cpp_strings.StringFileParser(test_data).Parse()
+ self.assertEqual(4, len(strings))
+ self.assertEqual('A_STRING', strings[0].name)
+ self.assertEqual('"a-value"', strings[0].value)
+ self.assertEqual('NO_COMMENT', strings[1].name)
+ self.assertEqual('"no-comment"', strings[1].value)
+ self.assertEqual('A_STRING_WITH_SPACE', strings[2].name)
+ self.assertEqual('"a-value"', strings[2].value)
+ self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP',
+ strings[3].name)
+ self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap"',
+ strings[3].value)
+
+ def testTemplateParsing(self):
+ test_data = """
+// Copyright {YEAR} The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+// {SCRIPT_NAME}
+// From
+// {SOURCE_PATH}, and
+// {TEMPLATE_PATH}
+
+package my.java.package;
+
+public any sort of class MyClass {{
+
+{NATIVE_STRINGS}
+
+}}
+""".split('\n')
+ package, class_name = java_cpp_strings.ParseTemplateFile(test_data)
+ self.assertEqual('my.java.package', package)
+ self.assertEqual('MyClass', class_name)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/gyp/java_google_api_keys.py b/deps/v8/build/android/gyp/java_google_api_keys.py
new file mode 100755
index 0000000000..349821a8fc
--- /dev/null
+++ b/deps/v8/build/android/gyp/java_google_api_keys.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Generates a Java file with API keys.
+
+import argparse
+import os
+import string
+import sys
+import zipfile
+
+from util import build_utils
+
+sys.path.append(
+ os.path.abspath(os.path.join(sys.path[0], '../../../google_apis')))
+import google_api_keys
+
+
+PACKAGE = 'org.chromium.chrome'
+CLASSNAME = 'GoogleAPIKeys'
+
+
+def GetScriptName():
+ return os.path.relpath(__file__, build_utils.DIR_SOURCE_ROOT)
+
+
+def GenerateOutput(constant_definitions):
+ template = string.Template("""
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+// ${SCRIPT_NAME}
+// From
+// ${SOURCE_PATH}
+
+package ${PACKAGE};
+
+public class ${CLASS_NAME} {
+${CONSTANT_ENTRIES}
+}
+""")
+
+ constant_template = string.Template(
+ ' public static final String ${NAME} = "${VALUE}";')
+ constant_entries_list = []
+ for constant_name, constant_value in constant_definitions.iteritems():
+ values = {
+ 'NAME': constant_name,
+ 'VALUE': constant_value,
+ }
+ constant_entries_list.append(constant_template.substitute(values))
+ constant_entries_string = '\n'.join(constant_entries_list)
+
+ values = {
+ 'CLASS_NAME': CLASSNAME,
+ 'CONSTANT_ENTRIES': constant_entries_string,
+ 'PACKAGE': PACKAGE,
+ 'SCRIPT_NAME': GetScriptName(),
+ 'SOURCE_PATH': 'google_api_keys/google_api_keys.h',
+ }
+ return template.substitute(values)
+
+
+def _DoWriteJavaOutput(output_path, constant_definition):
+ folder = os.path.dirname(output_path)
+ if folder and not os.path.exists(folder):
+ os.makedirs(folder)
+ with open(output_path, 'w') as out_file:
+ out_file.write(GenerateOutput(constant_definition))
+
+
+def _DoWriteJarOutput(output_path, constant_definition):
+ folder = os.path.dirname(output_path)
+ if folder and not os.path.exists(folder):
+ os.makedirs(folder)
+ with zipfile.ZipFile(output_path, 'w') as srcjar:
+ path = '%s/%s' % (PACKAGE.replace('.', '/'), CLASSNAME + '.java')
+ data = GenerateOutput(constant_definition)
+ build_utils.AddToZipHermetic(srcjar, path, data=data)
+
+
+def _DoMain(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--out", help="Path for java output.")
+ parser.add_argument("--srcjar", help="Path for srcjar output.")
+ options = parser.parse_args(argv)
+ if not options.out and not options.srcjar:
+ parser.print_help()
+ sys.exit(-1)
+
+ values = {}
+ values['GOOGLE_API_KEY'] = google_api_keys.GetAPIKey()
+ values['GOOGLE_API_KEY_PHYSICAL_WEB_TEST'] = (google_api_keys.
+ GetAPIKeyPhysicalWebTest())
+ values['GOOGLE_CLIENT_ID_MAIN'] = google_api_keys.GetClientID('MAIN')
+ values['GOOGLE_CLIENT_SECRET_MAIN'] = google_api_keys.GetClientSecret('MAIN')
+ values['GOOGLE_CLIENT_ID_CLOUD_PRINT'] = google_api_keys.GetClientID(
+ 'CLOUD_PRINT')
+ values['GOOGLE_CLIENT_SECRET_CLOUD_PRINT'] = google_api_keys.GetClientSecret(
+ 'CLOUD_PRINT')
+ values['GOOGLE_CLIENT_ID_REMOTING'] = google_api_keys.GetClientID('REMOTING')
+ values['GOOGLE_CLIENT_SECRET_REMOTING'] = google_api_keys.GetClientSecret(
+ 'REMOTING')
+ values['GOOGLE_CLIENT_ID_REMOTING_HOST'] = google_api_keys.GetClientID(
+ 'REMOTING_HOST')
+ values['GOOGLE_CLIENT_SECRET_REMOTING_HOST'] = (google_api_keys.
+ GetClientSecret('REMOTING_HOST'))
+ values['GOOGLE_CLIENT_ID_REMOTING_IDENTITY_API'] = (google_api_keys.
+ GetClientID('REMOTING_IDENTITY_API'))
+
+ if options.out:
+ _DoWriteJavaOutput(options.out, values)
+ if options.srcjar:
+ _DoWriteJarOutput(options.srcjar, values)
+
+
+if __name__ == '__main__':
+ _DoMain(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/java_google_api_keys_tests.py b/deps/v8/build/android/gyp/java_google_api_keys_tests.py
new file mode 100755
index 0000000000..6529a5397e
--- /dev/null
+++ b/deps/v8/build/android/gyp/java_google_api_keys_tests.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for java_google_api_keys.py.
+
+This test suite contains various tests for the C++ -> Java Google API Keys
+generator.
+"""
+
+import unittest
+
+import java_google_api_keys
+
+
+class TestJavaGoogleAPIKeys(unittest.TestCase):
+ def testOutput(self):
+ definition = {'E1': 'abc', 'E2': 'defgh'}
+ output = java_google_api_keys.GenerateOutput(definition)
+ expected = """
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+// %s
+// From
+// google_api_keys/google_api_keys.h
+
+package org.chromium.chrome;
+
+public class GoogleAPIKeys {
+ public static final String E1 = "abc";
+ public static final String E2 = "defgh";
+}
+"""
+ self.assertEqual(expected % java_google_api_keys.GetScriptName(), output)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/gyp/javac.py b/deps/v8/build/android/gyp/javac.py
new file mode 100755
index 0000000000..7cbe74c4b1
--- /dev/null
+++ b/deps/v8/build/android/gyp/javac.py
@@ -0,0 +1,595 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import distutils.spawn
+import itertools
+import logging
+import multiprocessing
+import optparse
+import os
+import shutil
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import md5_check
+from util import jar_info_utils
+
+import jar
+
+sys.path.insert(
+ 0,
+ os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src'))
+import colorama
+
+
+ERRORPRONE_WARNINGS_TO_TURN_OFF = [
+ # TODO(crbug.com/834807): Follow steps in bug
+ 'DoubleBraceInitialization',
+ # TODO(crbug.com/834790): Follow steps in bug.
+ 'CatchAndPrintStackTrace',
+ # TODO(crbug.com/801210): Follow steps in bug.
+ 'SynchronizeOnNonFinalField',
+ # TODO(crbug.com/802073): Follow steps in bug.
+ 'TypeParameterUnusedInFormals',
+ # TODO(crbug.com/803484): Follow steps in bug.
+ 'CatchFail',
+ # TODO(crbug.com/803485): Follow steps in bug.
+ 'JUnitAmbiguousTestClass',
+ # Android platform default is always UTF-8.
+ # https://developer.android.com/reference/java/nio/charset/Charset.html#defaultCharset()
+ 'DefaultCharset',
+ # Low priority since the alternatives still work.
+ 'JdkObsolete',
+ # We don't use that many lambdas.
+ 'FunctionalInterfaceClash',
+ # There are lots of times when we just want to post a task.
+ 'FutureReturnValueIgnored',
+ # Nice to be explicit about operators, but not necessary.
+ 'OperatorPrecedence',
+ # Just false positives in our code.
+ 'ThreadJoinLoop',
+ # Low priority corner cases with String.split.
+ # Linking Guava and using Splitter was rejected
+ # in the https://chromium-review.googlesource.com/c/chromium/src/+/871630.
+ 'StringSplitter',
+ # Preferred to use another method since it propagates exceptions better.
+ 'ClassNewInstance',
+ # Nice to have static inner classes but not necessary.
+ 'ClassCanBeStatic',
+ # Explicit is better than implicit.
+ 'FloatCast',
+ # Results in false positives.
+ 'ThreadLocalUsage',
+ # Also just false positives.
+ 'Finally',
+ # False positives for Chromium.
+ 'FragmentNotInstantiable',
+ # Low priority to fix.
+ 'HidingField',
+ # Low priority.
+ 'IntLongMath',
+ # Low priority.
+ 'BadComparable',
+ # Low priority.
+ 'EqualsHashCode',
+ # Nice to fix but low priority.
+ 'TypeParameterShadowing',
+ # Good to have immutable enums, also low priority.
+ 'ImmutableEnumChecker',
+ # False positives for testing.
+ 'InputStreamSlowMultibyteRead',
+ # Nice to have better primitives.
+ 'BoxedPrimitiveConstructor',
+ # Not necessary for tests.
+ 'OverrideThrowableToString',
+ # Nice to have better type safety.
+ 'CollectionToArraySafeParameter',
+ # Makes logcat debugging more difficult, and does not provide obvious
+ # benefits in the Chromium codebase.
+ 'ObjectToString',
+]
+
+ERRORPRONE_WARNINGS_TO_ERROR = [
+ # Add warnings to this after fixing/suppressing all instances in our codebase.
+ 'ArgumentSelectionDefectChecker',
+ 'AssertionFailureIgnored',
+ 'FloatingPointLiteralPrecision',
+ 'JavaLangClash',
+ 'MissingFail',
+ 'MissingOverride',
+ 'NarrowingCompoundAssignment',
+ 'OrphanedFormatString',
+ 'ParameterName',
+ 'ParcelableCreator',
+ 'ReferenceEquality',
+ 'StaticGuardedByInstance',
+ 'StaticQualifiedUsingExpression',
+ 'UseCorrectAssertInTests',
+]
+
+
+def ProcessJavacOutput(output):
+ fileline_prefix = r'(?P<fileline>(?P<file>[-.\w/\\]+.java):(?P<line>[0-9]+):)'
+ warning_re = re.compile(
+ fileline_prefix + r'(?P<full_message> warning: (?P<message>.*))$')
+ error_re = re.compile(
+ fileline_prefix + r'(?P<full_message> (?P<message>.*))$')
+ marker_re = re.compile(r'\s*(?P<marker>\^)\s*$')
+
+ # These warnings cannot be suppressed even for third party code. Deprecation
+ # warnings especially do not help since we must support older android version.
+ deprecated_re = re.compile(
+ r'(Note: .* uses? or overrides? a deprecated API.)$')
+ unchecked_re = re.compile(
+ r'(Note: .* uses? unchecked or unsafe operations.)$')
+ recompile_re = re.compile(r'(Note: Recompile with -Xlint:.* for details.)$')
+
+ warning_color = ['full_message', colorama.Fore.YELLOW + colorama.Style.DIM]
+ error_color = ['full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT]
+ marker_color = ['marker', colorama.Fore.BLUE + colorama.Style.BRIGHT]
+
+ def Colorize(line, regex, color):
+ match = regex.match(line)
+ start = match.start(color[0])
+ end = match.end(color[0])
+ return (line[:start]
+ + color[1] + line[start:end]
+ + colorama.Fore.RESET + colorama.Style.RESET_ALL
+ + line[end:])
+
+ def ApplyFilters(line):
+ return not (deprecated_re.match(line)
+ or unchecked_re.match(line)
+ or recompile_re.match(line))
+
+ def ApplyColors(line):
+ if warning_re.match(line):
+ line = Colorize(line, warning_re, warning_color)
+ elif error_re.match(line):
+ line = Colorize(line, error_re, error_color)
+ elif marker_re.match(line):
+ line = Colorize(line, marker_re, marker_color)
+ return line
+
+ return '\n'.join(map(ApplyColors, filter(ApplyFilters, output.split('\n'))))
+
+
+def _ExtractClassFiles(jar_path, dest_dir, java_files):
+ """Extracts all .class files not corresponding to |java_files|."""
+ # Two challenges exist here:
+ # 1. |java_files| have prefixes that are not represented in the the jar paths.
+ # 2. A single .java file results in multiple .class files when it contains
+ # nested classes.
+ # Here's an example:
+ # source path: ../../base/android/java/src/org/chromium/Foo.java
+ # jar paths: org/chromium/Foo.class, org/chromium/Foo$Inner.class
+ # To extract only .class files not related to the given .java files, we strip
+ # off ".class" and "$*.class" and use a substring match against java_files.
+ def extract_predicate(path):
+ if not path.endswith('.class'):
+ return False
+ path_without_suffix = re.sub(r'(?:\$|\.)[^/]*class$', '', path)
+ partial_java_path = path_without_suffix + '.java'
+ return not any(p.endswith(partial_java_path) for p in java_files)
+
+ logging.info('Extracting class files from %s', jar_path)
+ build_utils.ExtractAll(jar_path, path=dest_dir, predicate=extract_predicate)
+ for path in build_utils.FindInDirectory(dest_dir, '*.class'):
+ shutil.copystat(jar_path, path)
+
+
+def _ParsePackageAndClassNames(java_file):
+ package_name = ''
+ class_names = []
+ with open(java_file) as f:
+ for l in f:
+ # Strip unindented comments.
+ # Considers a leading * as a continuation of a multi-line comment (our
+ # linter doesn't enforce a space before it like there should be).
+ l = re.sub(r'^(?://.*|/?\*.*?(?:\*/\s*|$))', '', l)
+
+ m = re.match(r'package\s+(.*?);', l)
+ if m and not package_name:
+ package_name = m.group(1)
+
+ # Not exactly a proper parser, but works for sources that Chrome uses.
+ # In order to not match nested classes, it just checks for lack of indent.
+ m = re.match(r'(?:\S.*?)?(?:class|@?interface|enum)\s+(.+?)\b', l)
+ if m:
+ class_names.append(m.group(1))
+ return package_name, class_names
+
+
+def _CheckPathMatchesClassName(java_file, package_name, class_name):
+ parts = package_name.split('.') + [class_name + '.java']
+ expected_path_suffix = os.path.sep.join(parts)
+ if not java_file.endswith(expected_path_suffix):
+ raise Exception(('Java package+class name do not match its path.\n'
+ 'Actual path: %s\nExpected path: %s') %
+ (java_file, expected_path_suffix))
+
+
+def _MoveGeneratedJavaFilesToGenDir(classes_dir, generated_java_dir):
+ # Move any Annotation Processor-generated .java files into $out/gen
+ # so that codesearch can find them.
+ javac_generated_sources = []
+ for src_path in build_utils.FindInDirectory(classes_dir, '*.java'):
+ dst_path = os.path.join(generated_java_dir,
+ os.path.relpath(src_path, classes_dir))
+ build_utils.MakeDirectory(os.path.dirname(dst_path))
+ shutil.move(src_path, dst_path)
+ javac_generated_sources.append(dst_path)
+ return javac_generated_sources
+
+
+def _ProcessJavaFileForInfo(java_file):
+ package_name, class_names = _ParsePackageAndClassNames(java_file)
+ return java_file, package_name, class_names
+
+
+def _ProcessInfo(java_file, package_name, class_names, source, chromium_code):
+ for class_name in class_names:
+ yield '{}.{}'.format(package_name, class_name)
+ # Skip aidl srcjars since they don't indent code correctly.
+ if '_aidl.srcjar' in source:
+ continue
+ assert not chromium_code or len(class_names) == 1, (
+ 'Chromium java files must only have one class: {}'.format(source))
+ if chromium_code:
+ # This check is not necessary but nice to check this somewhere.
+ _CheckPathMatchesClassName(java_file, package_name, class_names[0])
+
+
+def _CreateInfoFile(java_files, jar_path, chromium_code, srcjar_files,
+ classes_dir, generated_java_dir):
+ """Writes a .jar.info file.
+
+ This maps fully qualified names for classes to either the java file that they
+ are defined in or the path of the srcjar that they came from.
+ """
+ output_path = jar_path + '.info'
+ logging.info('Start creating info file: %s', output_path)
+ javac_generated_sources = _MoveGeneratedJavaFilesToGenDir(
+ classes_dir, generated_java_dir)
+ logging.info('Finished moving generated java files: %s', output_path)
+ # 2 processes saves ~0.9s, 3 processes saves ~1.2s, 4 processes saves ~1.2s.
+ pool = multiprocessing.Pool(processes=3)
+ results = pool.imap_unordered(
+ _ProcessJavaFileForInfo,
+ itertools.chain(java_files, javac_generated_sources),
+ chunksize=10)
+ pool.close()
+ all_info_data = {}
+ for java_file, package_name, class_names in results:
+ source = srcjar_files.get(java_file, java_file)
+ for fully_qualified_name in _ProcessInfo(
+ java_file, package_name, class_names, source, chromium_code):
+ all_info_data[fully_qualified_name] = java_file
+ logging.info('Writing info file: %s', output_path)
+ with build_utils.AtomicOutput(output_path) as f:
+ jar_info_utils.WriteJarInfoFile(f, all_info_data, srcjar_files)
+ logging.info('Completed info file: %s', output_path)
+
+
+def _CreateJarFile(jar_path, provider_configurations, additional_jar_files,
+ classes_dir):
+ logging.info('Start creating jar file: %s', jar_path)
+ with build_utils.AtomicOutput(jar_path) as f:
+ jar.JarDirectory(
+ classes_dir,
+ f.name,
+ # Avoid putting generated java files into the jar since
+ # _MoveGeneratedJavaFilesToGenDir has not completed yet
+ predicate=lambda name: not name.endswith('.java'),
+ provider_configurations=provider_configurations,
+ additional_files=additional_jar_files)
+ logging.info('Completed jar file: %s', jar_path)
+
+
+def _OnStaleMd5(options, javac_cmd, java_files, classpath):
+ logging.info('Starting _OnStaleMd5')
+
+ # Compiles with Error Prone take twice as long to run as pure javac. Thus GN
+ # rules run both in parallel, with Error Prone only used for checks.
+ save_outputs = not options.enable_errorprone
+
+ with build_utils.TempDir() as temp_dir:
+ srcjars = options.java_srcjars
+
+ classes_dir = os.path.join(temp_dir, 'classes')
+ os.makedirs(classes_dir)
+
+ if save_outputs:
+ generated_java_dir = options.generated_dir
+ else:
+ generated_java_dir = os.path.join(temp_dir, 'gen')
+
+ shutil.rmtree(generated_java_dir, True)
+
+ srcjar_files = {}
+ if srcjars:
+ logging.info('Extracting srcjars to %s', generated_java_dir)
+ build_utils.MakeDirectory(generated_java_dir)
+ jar_srcs = []
+ for srcjar in options.java_srcjars:
+ extracted_files = build_utils.ExtractAll(
+ srcjar, no_clobber=True, path=generated_java_dir, pattern='*.java')
+ for path in extracted_files:
+ # We want the path inside the srcjar so the viewer can have a tree
+ # structure.
+ srcjar_files[path] = '{}/{}'.format(
+ srcjar, os.path.relpath(path, generated_java_dir))
+ jar_srcs.extend(extracted_files)
+ logging.info('Done extracting srcjars')
+ java_files.extend(jar_srcs)
+
+ if java_files:
+ # Don't include the output directory in the initial set of args since it
+ # being in a temp dir makes it unstable (breaks md5 stamping).
+ cmd = javac_cmd + ['-d', classes_dir]
+
+ # Pass classpath and source paths as response files to avoid extremely
+ # long command lines that are tedius to debug.
+ if classpath:
+ cmd += ['-classpath', ':'.join(classpath)]
+
+ java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt')
+ with open(java_files_rsp_path, 'w') as f:
+ f.write(' '.join(java_files))
+ cmd += ['@' + java_files_rsp_path]
+
+ logging.debug('Build command %s', cmd)
+ build_utils.CheckOutput(
+ cmd,
+ print_stdout=options.chromium_code,
+ stderr_filter=ProcessJavacOutput)
+ logging.info('Finished build command')
+
+ if save_outputs:
+ # Creating the jar file takes the longest, start it first on a separate
+ # process to unblock the rest of the post-processing steps.
+ jar_file_worker = multiprocessing.Process(
+ target=_CreateJarFile,
+ args=(options.jar_path, options.provider_configurations,
+ options.additional_jar_files, classes_dir))
+ jar_file_worker.start()
+ else:
+ jar_file_worker = None
+ build_utils.Touch(options.jar_path)
+
+ if save_outputs:
+ _CreateInfoFile(java_files, options.jar_path, options.chromium_code,
+ srcjar_files, classes_dir, generated_java_dir)
+ else:
+ build_utils.Touch(options.jar_path + '.info')
+
+ if jar_file_worker:
+ jar_file_worker.join()
+ logging.info('Completed all steps in _OnStaleMd5')
+
+
+def _ParseOptions(argv):
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_option(
+ '--java-srcjars',
+ action='append',
+ default=[],
+ help='List of srcjars to include in compilation.')
+ parser.add_option(
+ '--generated-dir',
+ help='Subdirectory within target_gen_dir to place extracted srcjars and '
+ 'annotation processor output for codesearch to find.')
+ parser.add_option(
+ '--bootclasspath',
+ action='append',
+ default=[],
+ help='Boot classpath for javac. If this is specified multiple times, '
+ 'they will all be appended to construct the classpath.')
+ parser.add_option(
+ '--java-version',
+ help='Java language version to use in -source and -target args to javac.')
+ parser.add_option(
+ '--full-classpath',
+ action='append',
+ help='Classpath to use when annotation processors are present.')
+ parser.add_option(
+ '--interface-classpath',
+ action='append',
+ help='Classpath to use when no annotation processors are present.')
+ parser.add_option(
+ '--processors',
+ action='append',
+ help='GN list of annotation processor main classes.')
+ parser.add_option(
+ '--processorpath',
+ action='append',
+ help='GN list of jars that comprise the classpath used for Annotation '
+ 'Processors.')
+ parser.add_option(
+ '--processor-arg',
+ dest='processor_args',
+ action='append',
+ help='key=value arguments for the annotation processors.')
+ parser.add_option(
+ '--provider-configuration',
+ dest='provider_configurations',
+ action='append',
+ help='File to specify a service provider. Will be included '
+ 'in the jar under META-INF/services.')
+ parser.add_option(
+ '--additional-jar-file',
+ dest='additional_jar_files',
+ action='append',
+ help='Additional files to package into jar. By default, only Java .class '
+ 'files are packaged into the jar. Files should be specified in '
+ 'format <filename>:<path to be placed in jar>.')
+ parser.add_option(
+ '--chromium-code',
+ type='int',
+ help='Whether code being compiled should be built with stricter '
+ 'warnings for chromium code.')
+ parser.add_option(
+ '--errorprone-path', help='Use the Errorprone compiler at this path.')
+ parser.add_option(
+ '--enable-errorprone',
+ action='store_true',
+ help='Enable errorprone checks')
+ parser.add_option('--jar-path', help='Jar output path.')
+ parser.add_option(
+ '--javac-arg',
+ action='append',
+ default=[],
+ help='Additional arguments to pass to javac.')
+
+ options, args = parser.parse_args(argv)
+ build_utils.CheckOptions(options, parser, required=('jar_path',))
+
+ options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
+ options.full_classpath = build_utils.ParseGnList(options.full_classpath)
+ options.interface_classpath = build_utils.ParseGnList(
+ options.interface_classpath)
+ options.processorpath = build_utils.ParseGnList(options.processorpath)
+ options.processors = build_utils.ParseGnList(options.processors)
+ options.java_srcjars = build_utils.ParseGnList(options.java_srcjars)
+
+ if options.java_version == '1.8' and options.bootclasspath:
+ # Android's boot jar doesn't contain all java 8 classes.
+ # See: https://github.com/evant/gradle-retrolambda/issues/23.
+ # Get the path of the jdk folder by searching for the 'jar' executable. We
+ # cannot search for the 'javac' executable because goma provides a custom
+ # version of 'javac'.
+ jar_path = os.path.realpath(distutils.spawn.find_executable('jar'))
+ jdk_dir = os.path.dirname(os.path.dirname(jar_path))
+ rt_jar = os.path.join(jdk_dir, 'jre', 'lib', 'rt.jar')
+ options.bootclasspath.append(rt_jar)
+
+ additional_jar_files = []
+ for arg in options.additional_jar_files or []:
+ filepath, jar_filepath = arg.split(':')
+ additional_jar_files.append((filepath, jar_filepath))
+ options.additional_jar_files = additional_jar_files
+
+ java_files = []
+ for arg in args:
+ # Interpret a path prefixed with @ as a file containing a list of sources.
+ if arg.startswith('@'):
+ java_files.extend(build_utils.ReadSourcesList(arg[1:]))
+ else:
+ java_files.append(arg)
+
+ return options, java_files
+
+
+def main(argv):
+ logging.basicConfig(
+ level=logging.INFO if os.environ.get('_JAVAC_DEBUG') else logging.WARNING,
+ format='%(levelname).1s %(relativeCreated)6d %(message)s')
+ colorama.init()
+
+ argv = build_utils.ExpandFileArgs(argv)
+ options, java_files = _ParseOptions(argv)
+
+ # Until we add a version of javac via DEPS, use errorprone with all checks
+ # disabled rather than javac. This ensures builds are reproducible.
+ # https://crbug.com/693079
+ # As of Jan 2019, on a z920, compiling chrome_java times:
+ # * With javac: 17 seconds
+ # * With errorprone (checks disabled): 20 seconds
+ # * With errorprone (checks enabled): 30 seconds
+ if options.errorprone_path:
+ javac_path = options.errorprone_path
+ else:
+ javac_path = distutils.spawn.find_executable('javac')
+
+ javac_cmd = [
+ javac_path,
+ '-g',
+ # Chromium only allows UTF8 source files. Being explicit avoids
+ # javac pulling a default encoding from the user's environment.
+ '-encoding',
+ 'UTF-8',
+ # Prevent compiler from compiling .java files not listed as inputs.
+ # See: http://blog.ltgt.net/most-build-tools-misuse-javac/
+ '-sourcepath',
+ ':',
+ ]
+
+ if options.enable_errorprone:
+ for warning in ERRORPRONE_WARNINGS_TO_TURN_OFF:
+ javac_cmd.append('-Xep:{}:OFF'.format(warning))
+ for warning in ERRORPRONE_WARNINGS_TO_ERROR:
+ javac_cmd.append('-Xep:{}:ERROR'.format(warning))
+ elif options.errorprone_path:
+ javac_cmd.append('-XepDisableAllChecks')
+
+ if options.java_version:
+ javac_cmd.extend([
+ '-source', options.java_version,
+ '-target', options.java_version,
+ ])
+
+ if options.chromium_code:
+ javac_cmd.extend(['-Werror'])
+ else:
+ # XDignore.symbol.file makes javac compile against rt.jar instead of
+ # ct.sym. This means that using a java internal package/class will not
+ # trigger a compile warning or error.
+ javac_cmd.extend(['-XDignore.symbol.file'])
+
+ if options.processors:
+ javac_cmd.extend(['-processor', ','.join(options.processors)])
+
+ if options.bootclasspath:
+ javac_cmd.extend(['-bootclasspath', ':'.join(options.bootclasspath)])
+
+ # Annotation processors crash when given interface jars.
+ active_classpath = (
+ options.full_classpath
+ if options.processors else options.interface_classpath)
+ classpath = []
+ if active_classpath:
+ classpath.extend(active_classpath)
+
+ if options.processorpath:
+ javac_cmd.extend(['-processorpath', ':'.join(options.processorpath)])
+ if options.processor_args:
+ for arg in options.processor_args:
+ javac_cmd.extend(['-A%s' % arg])
+
+ javac_cmd.extend(options.javac_arg)
+
+ classpath_inputs = (options.bootclasspath + options.interface_classpath +
+ options.processorpath)
+
+ # GN already knows of java_files, so listing them just make things worse when
+ # they change.
+ depfile_deps = [javac_path] + classpath_inputs + options.java_srcjars
+ input_paths = depfile_deps + java_files
+
+ output_paths = [
+ options.jar_path,
+ options.jar_path + '.info',
+ ]
+
+ # List python deps in input_strings rather than input_paths since the contents
+ # of them does not change what gets written to the depsfile.
+ build_utils.CallAndWriteDepfileIfStale(
+ lambda: _OnStaleMd5(options, javac_cmd, java_files, classpath),
+ options,
+ depfile_deps=depfile_deps,
+ input_paths=input_paths,
+ input_strings=javac_cmd + classpath,
+ output_paths=output_paths,
+ add_pydeps=False)
+ logging.info('Script complete: %s', __file__)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/javac.pydeps b/deps/v8/build/android/gyp/javac.pydeps
new file mode 100644
index 0000000000..a9d257b95f
--- /dev/null
+++ b/deps/v8/build/android/gyp/javac.pydeps
@@ -0,0 +1,15 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/javac.pydeps build/android/gyp/javac.py
+../../../third_party/colorama/src/colorama/__init__.py
+../../../third_party/colorama/src/colorama/ansi.py
+../../../third_party/colorama/src/colorama/ansitowin32.py
+../../../third_party/colorama/src/colorama/initialise.py
+../../../third_party/colorama/src/colorama/win32.py
+../../../third_party/colorama/src/colorama/winterm.py
+../../gn_helpers.py
+jar.py
+javac.py
+util/__init__.py
+util/build_utils.py
+util/jar_info_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/jinja_template.py b/deps/v8/build/android/gyp/jinja_template.py
new file mode 100755
index 0000000000..4d5c403dfe
--- /dev/null
+++ b/deps/v8/build/android/gyp/jinja_template.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Renders one or more template files using the Jinja template engine."""
+
+import codecs
+import argparse
+import os
+import sys
+
+from util import build_utils
+from util import resource_utils
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+from pylib.constants import host_paths
+
+# Import jinja2 from third_party/jinja2
+sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'))
+import jinja2 # pylint: disable=F0401
+
+
+class _RecordingFileSystemLoader(jinja2.FileSystemLoader):
+ def __init__(self, searchpath):
+ jinja2.FileSystemLoader.__init__(self, searchpath)
+ self.loaded_templates = set()
+
+ def get_source(self, environment, template):
+ contents, filename, uptodate = jinja2.FileSystemLoader.get_source(
+ self, environment, template)
+ self.loaded_templates.add(os.path.relpath(filename))
+ return contents, filename, uptodate
+
+
+class JinjaProcessor(object):
+ """Allows easy rendering of jinja templates with input file tracking."""
+ def __init__(self, loader_base_dir, variables=None):
+ self.loader_base_dir = loader_base_dir
+ self.variables = variables or {}
+ self.loader = _RecordingFileSystemLoader(loader_base_dir)
+ self.env = jinja2.Environment(loader=self.loader)
+ self.env.undefined = jinja2.StrictUndefined
+ self.env.line_comment_prefix = '##'
+ self.env.trim_blocks = True
+ self.env.lstrip_blocks = True
+ self._template_cache = {} # Map of path -> Template
+
+ def Render(self, input_filename, variables=None):
+ input_rel_path = os.path.relpath(input_filename, self.loader_base_dir)
+ template = self._template_cache.get(input_rel_path)
+ if not template:
+ template = self.env.get_template(input_rel_path)
+ self._template_cache[input_rel_path] = template
+ return template.render(variables or self.variables)
+
+ def GetLoadedTemplates(self):
+ return list(self.loader.loaded_templates)
+
+
+def _ProcessFile(processor, input_filename, output_filename):
+ output = processor.Render(input_filename)
+
+ # If |output| is same with the file content, we skip update and
+ # ninja's restat will avoid rebuilding things that depend on it.
+ if os.path.isfile(output_filename):
+ with codecs.open(output_filename, 'r', 'utf-8') as f:
+ if f.read() == output:
+ return
+
+ with codecs.open(output_filename, 'w', 'utf-8') as output_file:
+ output_file.write(output)
+
+
+def _ProcessFiles(processor, input_filenames, inputs_base_dir, outputs_zip):
+ with build_utils.TempDir() as temp_dir:
+ files_to_zip = dict()
+ for input_filename in input_filenames:
+ relpath = os.path.relpath(os.path.abspath(input_filename),
+ os.path.abspath(inputs_base_dir))
+ if relpath.startswith(os.pardir):
+ raise Exception('input file %s is not contained in inputs base dir %s'
+ % (input_filename, inputs_base_dir))
+
+ output_filename = os.path.join(temp_dir, relpath)
+ parent_dir = os.path.dirname(output_filename)
+ build_utils.MakeDirectory(parent_dir)
+ _ProcessFile(processor, input_filename, output_filename)
+ files_to_zip[relpath] = input_filename
+
+ resource_utils.CreateResourceInfoFile(files_to_zip, outputs_zip)
+ build_utils.ZipDir(outputs_zip, temp_dir)
+
+
+def _ParseVariables(variables_arg, error_func):
+ variables = {}
+ for v in build_utils.ParseGnList(variables_arg):
+ if '=' not in v:
+ error_func('--variables argument must contain "=": ' + v)
+ name, _, value = v.partition('=')
+ variables[name] = value
+ return variables
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--inputs', required=True,
+ help='GN-list of template files to process.')
+ parser.add_argument('--includes', default='',
+ help="GN-list of files that get {% include %}'ed.")
+ parser.add_argument('--output', help='The output file to generate. Valid '
+ 'only if there is a single input.')
+ parser.add_argument('--outputs-zip', help='A zip file for the processed '
+ 'templates. Required if there are multiple inputs.')
+ parser.add_argument('--inputs-base-dir', help='A common ancestor directory '
+ 'of the inputs. Each output\'s path in the output zip '
+ 'will match the relative path from INPUTS_BASE_DIR to '
+ 'the input. Required if --output-zip is given.')
+ parser.add_argument('--loader-base-dir', help='Base path used by the '
+ 'template loader. Must be a common ancestor directory of '
+ 'the inputs. Defaults to DIR_SOURCE_ROOT.',
+ default=host_paths.DIR_SOURCE_ROOT)
+ parser.add_argument('--variables', help='Variables to be made available in '
+ 'the template processing environment, as a GYP list '
+ '(e.g. --variables "channel=beta mstone=39")', default='')
+ parser.add_argument('--check-includes', action='store_true',
+ help='Enable inputs and includes checks.')
+ options = parser.parse_args()
+
+ inputs = build_utils.ParseGnList(options.inputs)
+ includes = build_utils.ParseGnList(options.includes)
+
+ if (options.output is None) == (options.outputs_zip is None):
+ parser.error('Exactly one of --output and --output-zip must be given')
+ if options.output and len(inputs) != 1:
+ parser.error('--output cannot be used with multiple inputs')
+ if options.outputs_zip and not options.inputs_base_dir:
+ parser.error('--inputs-base-dir must be given when --output-zip is used')
+
+ variables = _ParseVariables(options.variables, parser.error)
+ processor = JinjaProcessor(options.loader_base_dir, variables=variables)
+
+ if options.output:
+ _ProcessFile(processor, inputs[0], options.output)
+ else:
+ _ProcessFiles(processor, inputs, options.inputs_base_dir,
+ options.outputs_zip)
+
+ if options.check_includes:
+ all_inputs = set(processor.GetLoadedTemplates())
+ all_inputs.difference_update(inputs)
+ all_inputs.difference_update(includes)
+ if all_inputs:
+ raise Exception('Found files not listed via --includes:\n' +
+ '\n'.join(sorted(all_inputs)))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/gyp/jinja_template.pydeps b/deps/v8/build/android/gyp/jinja_template.pydeps
new file mode 100644
index 0000000000..a2a38176bf
--- /dev/null
+++ b/deps/v8/build/android/gyp/jinja_template.pydeps
@@ -0,0 +1,41 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jinja_template.pydeps build/android/gyp/jinja_template.py
+../../../third_party/catapult/devil/devil/__init__.py
+../../../third_party/catapult/devil/devil/android/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../../third_party/catapult/devil/devil/constants/__init__.py
+../../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../pylib/__init__.py
+../pylib/constants/__init__.py
+../pylib/constants/host_paths.py
+jinja_template.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/resource_utils.py
diff --git a/deps/v8/build/android/gyp/lint.py b/deps/v8/build/android/gyp/lint.py
new file mode 100755
index 0000000000..b2f90c7e9e
--- /dev/null
+++ b/deps/v8/build/android/gyp/lint.py
@@ -0,0 +1,399 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs Android's lint tool."""
+
+
+import argparse
+import os
+import re
+import sys
+import traceback
+from xml.dom import minidom
+
+from util import build_utils
+
+_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/master/build/android/docs/lint.md' # pylint: disable=line-too-long
+
+
+def _OnStaleMd5(lint_path, config_path, processed_config_path,
+ manifest_path, result_path, product_dir, sources, jar_path,
+ cache_dir, android_sdk_version, srcjars, resource_sources,
+ disable=None, classpath=None, can_fail_build=False,
+ include_unexpected=False, silent=False):
+ def _RebasePath(path):
+ """Returns relative path to top-level src dir.
+
+ Args:
+ path: A path relative to cwd.
+ """
+ ret = os.path.relpath(os.path.abspath(path), build_utils.DIR_SOURCE_ROOT)
+ # If it's outside of src/, just use abspath.
+ if ret.startswith('..'):
+ ret = os.path.abspath(path)
+ return ret
+
+ def _ProcessConfigFile():
+ if not config_path or not processed_config_path:
+ return
+ if not build_utils.IsTimeStale(processed_config_path, [config_path]):
+ return
+
+ with open(config_path, 'rb') as f:
+ content = f.read().replace(
+ 'PRODUCT_DIR', _RebasePath(product_dir))
+
+ with open(processed_config_path, 'wb') as f:
+ f.write(content)
+
+ def _ProcessResultFile():
+ with open(result_path, 'rb') as f:
+ content = f.read().replace(
+ _RebasePath(product_dir), 'PRODUCT_DIR')
+
+ with open(result_path, 'wb') as f:
+ f.write(content)
+
+ def _ParseAndShowResultFile():
+ dom = minidom.parse(result_path)
+ issues = dom.getElementsByTagName('issue')
+ if not silent:
+ print >> sys.stderr
+ for issue in issues:
+ issue_id = issue.attributes['id'].value
+ message = issue.attributes['message'].value
+ location_elem = issue.getElementsByTagName('location')[0]
+ path = location_elem.attributes['file'].value
+ line = location_elem.getAttribute('line')
+ if line:
+ error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id)
+ else:
+ # Issues in class files don't have a line number.
+ error = '%s %s: %s [warning]' % (path, message, issue_id)
+ print >> sys.stderr, error.encode('utf-8')
+ for attr in ['errorLine1', 'errorLine2']:
+ error_line = issue.getAttribute(attr)
+ if error_line:
+ print >> sys.stderr, error_line.encode('utf-8')
+ return len(issues)
+
+ with build_utils.TempDir() as temp_dir:
+ _ProcessConfigFile()
+
+ cmd = [
+ _RebasePath(lint_path), '-Werror', '--exitcode', '--showall',
+ '--xml', _RebasePath(result_path),
+ ]
+ if jar_path:
+ # --classpath is just for .class files for this one target.
+ cmd.extend(['--classpath', _RebasePath(jar_path)])
+ if processed_config_path:
+ cmd.extend(['--config', _RebasePath(processed_config_path)])
+
+ tmp_dir_counter = [0]
+ def _NewTempSubdir(prefix, append_digit=True):
+ # Helper function to create a new sub directory based on the number of
+ # subdirs created earlier.
+ if append_digit:
+ tmp_dir_counter[0] += 1
+ prefix += str(tmp_dir_counter[0])
+ new_dir = os.path.join(temp_dir, prefix)
+ os.makedirs(new_dir)
+ return new_dir
+
+ resource_dirs = []
+ for resource_source in resource_sources:
+ if os.path.isdir(resource_source):
+ resource_dirs.append(resource_source)
+ else:
+ # This is a zip file with generated resources (e. g. strings from GRD).
+ # Extract it to temporary folder.
+ resource_dir = _NewTempSubdir(resource_source, append_digit=False)
+ resource_dirs.append(resource_dir)
+ build_utils.ExtractAll(resource_source, path=resource_dir)
+
+ for resource_dir in resource_dirs:
+ cmd.extend(['--resources', _RebasePath(resource_dir)])
+
+ if classpath:
+ # --libraries is the classpath (excluding active target).
+ cp = ':'.join(_RebasePath(p) for p in classpath)
+ cmd.extend(['--libraries', cp])
+
+ # There may be multiple source files with the same basename (but in
+ # different directories). It is difficult to determine what part of the path
+ # corresponds to the java package, and so instead just link the source files
+ # into temporary directories (creating a new one whenever there is a name
+ # conflict).
+ def PathInDir(d, src):
+ subpath = os.path.join(d, _RebasePath(src))
+ subdir = os.path.dirname(subpath)
+ if not os.path.exists(subdir):
+ os.makedirs(subdir)
+ return subpath
+
+ src_dirs = []
+ for src in sources:
+ src_dir = None
+ for d in src_dirs:
+ if not os.path.exists(PathInDir(d, src)):
+ src_dir = d
+ break
+ if not src_dir:
+ src_dir = _NewTempSubdir('SRC_ROOT')
+ src_dirs.append(src_dir)
+ cmd.extend(['--sources', _RebasePath(src_dir)])
+ os.symlink(os.path.abspath(src), PathInDir(src_dir, src))
+
+ if srcjars:
+ srcjar_paths = build_utils.ParseGnList(srcjars)
+ if srcjar_paths:
+ srcjar_dir = _NewTempSubdir('SRC_ROOT')
+ cmd.extend(['--sources', _RebasePath(srcjar_dir)])
+ for srcjar in srcjar_paths:
+ build_utils.ExtractAll(srcjar, path=srcjar_dir)
+
+ if disable:
+ cmd.extend(['--disable', ','.join(disable)])
+
+ project_dir = _NewTempSubdir('SRC_ROOT')
+ if android_sdk_version:
+ # Create dummy project.properies file in a temporary "project" directory.
+ # It is the only way to add Android SDK to the Lint's classpath. Proper
+ # classpath is necessary for most source-level checks.
+ with open(os.path.join(project_dir, 'project.properties'), 'w') \
+ as propfile:
+ print >> propfile, 'target=android-{}'.format(android_sdk_version)
+
+ # Put the manifest in a temporary directory in order to avoid lint detecting
+ # sibling res/ and src/ directories (which should be pass explicitly if they
+ # are to be included).
+ if not manifest_path:
+ manifest_path = os.path.join(
+ build_utils.DIR_SOURCE_ROOT, 'build', 'android',
+ 'AndroidManifest.xml')
+ os.symlink(os.path.abspath(manifest_path),
+ os.path.join(project_dir, 'AndroidManifest.xml'))
+ cmd.append(project_dir)
+
+ if os.path.exists(result_path):
+ os.remove(result_path)
+
+ env = os.environ.copy()
+ stderr_filter = None
+ if cache_dir:
+ env['_JAVA_OPTIONS'] = '-Duser.home=%s' % _RebasePath(cache_dir)
+ # When _JAVA_OPTIONS is set, java prints to stderr:
+ # Picked up _JAVA_OPTIONS: ...
+ #
+ # We drop all lines that contain _JAVA_OPTIONS from the output
+ stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l)
+
+ def fail_func(returncode, stderr):
+ if returncode != 0:
+ return True
+ if (include_unexpected and
+ 'Unexpected failure during lint analysis' in stderr):
+ return True
+ return False
+
+ try:
+ build_utils.CheckOutput(cmd, cwd=build_utils.DIR_SOURCE_ROOT,
+ env=env or None, stderr_filter=stderr_filter,
+ fail_func=fail_func)
+ except build_utils.CalledProcessError:
+ # There is a problem with lint usage
+ if not os.path.exists(result_path):
+ raise
+
+ # Sometimes produces empty (almost) files:
+ if os.path.getsize(result_path) < 10:
+ if can_fail_build:
+ raise
+ elif not silent:
+ traceback.print_exc()
+ return
+
+ # There are actual lint issues
+ try:
+ num_issues = _ParseAndShowResultFile()
+ except Exception: # pylint: disable=broad-except
+ if not silent:
+ print 'Lint created unparseable xml file...'
+ print 'File contents:'
+ with open(result_path) as f:
+ print f.read()
+ if can_fail_build:
+ traceback.print_exc()
+ if can_fail_build:
+ raise
+ else:
+ return
+
+ _ProcessResultFile()
+ if num_issues == 0 and include_unexpected:
+ msg = 'Please refer to output above for unexpected lint failures.\n'
+ else:
+ msg = ('\nLint found %d new issues.\n'
+ ' - For full explanation, please refer to %s\n'
+ ' - For more information about lint and how to fix lint issues,'
+ ' please refer to %s\n' %
+ (num_issues, _RebasePath(result_path), _LINT_MD_URL))
+ if not silent:
+ print >> sys.stderr, msg
+ if can_fail_build:
+ raise Exception('Lint failed.')
+
+
+def _FindInDirectories(directories, filename_filter):
+ all_files = []
+ for directory in directories:
+ all_files.extend(build_utils.FindInDirectory(directory, filename_filter))
+ return all_files
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_argument('--lint-path', required=True,
+ help='Path to lint executable.')
+ parser.add_argument('--product-dir', required=True,
+ help='Path to product dir.')
+ parser.add_argument('--result-path', required=True,
+ help='Path to XML lint result file.')
+ parser.add_argument('--cache-dir', required=True,
+ help='Path to the directory in which the android cache '
+ 'directory tree should be stored.')
+ parser.add_argument('--platform-xml-path', required=True,
+ help='Path to api-platforms.xml')
+ parser.add_argument('--android-sdk-version',
+ help='Version (API level) of the Android SDK used for '
+ 'building.')
+ parser.add_argument('--create-cache', action='store_true',
+ help='Mark the lint cache file as an output rather than '
+ 'an input.')
+ parser.add_argument('--can-fail-build', action='store_true',
+ help='If set, script will exit with nonzero exit status'
+ ' if lint errors are present')
+ parser.add_argument('--include-unexpected-failures', action='store_true',
+ help='If set, script will exit with nonzero exit status'
+ ' if lint itself crashes with unexpected failures.')
+ parser.add_argument('--config-path',
+ help='Path to lint suppressions file.')
+ parser.add_argument('--disable',
+ help='List of checks to disable.')
+ parser.add_argument('--jar-path',
+ help='Jar file containing class files.')
+ parser.add_argument('--java-sources-file',
+ help='File containing a list of java files.')
+ parser.add_argument('--manifest-path',
+ help='Path to AndroidManifest.xml')
+ parser.add_argument('--classpath', default=[], action='append',
+ help='GYP-list of classpath .jar files')
+ parser.add_argument('--processed-config-path',
+ help='Path to processed lint suppressions file.')
+ parser.add_argument('--resource-dir',
+ help='Path to resource dir.')
+ parser.add_argument('--resource-sources', default=[], action='append',
+ help='GYP-list of resource sources (directories with '
+ 'resources or archives created by resource-generating '
+ 'tasks.')
+ parser.add_argument('--silent', action='store_true',
+ help='If set, script will not log anything.')
+ parser.add_argument('--src-dirs',
+ help='Directories containing java files.')
+ parser.add_argument('--srcjars',
+ help='GN list of included srcjars.')
+
+ args = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:]))
+
+ sources = []
+ if args.src_dirs:
+ src_dirs = build_utils.ParseGnList(args.src_dirs)
+ sources = _FindInDirectories(src_dirs, '*.java')
+ elif args.java_sources_file:
+ sources.extend(build_utils.ReadSourcesList(args.java_sources_file))
+
+ if args.config_path and not args.processed_config_path:
+ parser.error('--config-path specified without --processed-config-path')
+ elif args.processed_config_path and not args.config_path:
+ parser.error('--processed-config-path specified without --config-path')
+
+ input_paths = [
+ args.lint_path,
+ args.platform_xml_path,
+ ]
+ if args.config_path:
+ input_paths.append(args.config_path)
+ if args.jar_path:
+ input_paths.append(args.jar_path)
+ if args.manifest_path:
+ input_paths.append(args.manifest_path)
+ if sources:
+ input_paths.extend(sources)
+ classpath = []
+ for gyp_list in args.classpath:
+ classpath.extend(build_utils.ParseGnList(gyp_list))
+ input_paths.extend(classpath)
+
+ resource_sources = []
+ if args.resource_dir:
+ # Backward compatibility with GYP
+ resource_sources += [ args.resource_dir ]
+
+ for gyp_list in args.resource_sources:
+ resource_sources += build_utils.ParseGnList(gyp_list)
+
+ for resource_source in resource_sources:
+ if os.path.isdir(resource_source):
+ input_paths.extend(build_utils.FindInDirectory(resource_source, '*'))
+ else:
+ input_paths.append(resource_source)
+
+ input_strings = [
+ args.can_fail_build,
+ args.include_unexpected_failures,
+ args.silent,
+ ]
+ if args.android_sdk_version:
+ input_strings.append(args.android_sdk_version)
+ if args.processed_config_path:
+ input_strings.append(args.processed_config_path)
+
+ disable = []
+ if args.disable:
+ disable = build_utils.ParseGnList(args.disable)
+ input_strings.extend(disable)
+
+ output_paths = [args.result_path, args.processed_config_path]
+
+ build_utils.CallAndWriteDepfileIfStale(
+ lambda: _OnStaleMd5(args.lint_path,
+ args.config_path,
+ args.processed_config_path,
+ args.manifest_path, args.result_path,
+ args.product_dir, sources,
+ args.jar_path,
+ args.cache_dir,
+ args.android_sdk_version,
+ args.srcjars,
+ resource_sources,
+ disable=disable,
+ classpath=classpath,
+ can_fail_build=args.can_fail_build,
+ include_unexpected=args.include_unexpected_failures,
+ silent=args.silent),
+ args,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ depfile_deps=classpath,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/lint.pydeps b/deps/v8/build/android/gyp/lint.pydeps
new file mode 100644
index 0000000000..a8616e4d37
--- /dev/null
+++ b/deps/v8/build/android/gyp/lint.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/lint.pydeps build/android/gyp/lint.py
+../../gn_helpers.py
+lint.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/main_dex_list.py b/deps/v8/build/android/gyp/main_dex_list.py
new file mode 100755
index 0000000000..2435859099
--- /dev/null
+++ b/deps/v8/build/android/gyp/main_dex_list.py
@@ -0,0 +1,174 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import json
+import os
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+from util import proguard_util
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--shrinked-android-path', required=True,
+ help='Path to shrinkedAndroid.jar')
+ parser.add_argument('--dx-path', required=True,
+ help='Path to dx.jar')
+ parser.add_argument('--main-dex-rules-path', action='append', default=[],
+ dest='main_dex_rules_paths',
+ help='A file containing a list of proguard rules to use '
+ 'in determining the class to include in the '
+ 'main dex.')
+ parser.add_argument('--main-dex-list-path', required=True,
+ help='The main dex list file to generate.')
+ parser.add_argument('--inputs',
+ help='JARs for which a main dex list should be '
+ 'generated.')
+ parser.add_argument('--proguard-path', required=True,
+ help='Path to the proguard executable.')
+ parser.add_argument('--negative-main-dex-globs',
+ help='GN-list of globs of .class names (e.g. org/chromium/foo/Bar.class) '
+ 'that will fail the build if they match files in the main dex.')
+
+ parser.add_argument('paths', nargs='*', default=[],
+ help='JARs for which a main dex list should be '
+ 'generated.')
+
+ args = parser.parse_args(build_utils.ExpandFileArgs(args))
+
+ depfile_deps = []
+ if args.inputs:
+ args.inputs = build_utils.ParseGnList(args.inputs)
+ depfile_deps = args.inputs
+ args.paths.extend(args.inputs)
+
+ if args.negative_main_dex_globs:
+ args.negative_main_dex_globs = build_utils.ParseGnList(
+ args.negative_main_dex_globs)
+
+ proguard_cmd = [
+ 'java', '-jar', args.proguard_path,
+ '-forceprocessing',
+ '-dontwarn', '-dontoptimize', '-dontobfuscate', '-dontpreverify',
+ '-libraryjars', args.shrinked_android_path,
+ ]
+ for m in args.main_dex_rules_paths:
+ proguard_cmd.extend(['-include', m])
+
+ main_dex_list_cmd = [
+ 'java', '-cp', args.dx_path,
+ 'com.android.multidex.MainDexListBuilder',
+ # This workaround significantly increases main dex size and doesn't seem to
+ # be needed by Chrome. See comment in the source:
+ # https://android.googlesource.com/platform/dalvik/+/master/dx/src/com/android/multidex/MainDexListBuilder.java
+ '--disable-annotation-resolution-workaround',
+ ]
+
+ input_paths = list(args.paths)
+ input_paths += [
+ args.shrinked_android_path,
+ args.dx_path,
+ ]
+ input_paths += args.main_dex_rules_paths
+
+ input_strings = [
+ proguard_cmd,
+ main_dex_list_cmd,
+ ]
+ if args.negative_main_dex_globs:
+ input_strings += args.negative_main_dex_globs
+
+ output_paths = [
+ args.main_dex_list_path,
+ ]
+
+ build_utils.CallAndWriteDepfileIfStale(
+ lambda: _OnStaleMd5(proguard_cmd, main_dex_list_cmd, args.paths,
+ args.main_dex_list_path,
+ args.negative_main_dex_globs),
+ args,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ depfile_deps=depfile_deps,
+ add_pydeps=False)
+
+ return 0
+
+
+def _CheckForUnwanted(kept_classes, proguard_cmd, negative_main_dex_globs):
+ # Check if ProGuard kept any unwanted classes.
+ found_unwanted_classes = sorted(
+ p for p in kept_classes
+ if build_utils.MatchesGlob(p, negative_main_dex_globs))
+
+ if found_unwanted_classes:
+ first_class = found_unwanted_classes[0].replace(
+ '.class', '').replace('/', '.')
+ proguard_cmd += ['-whyareyoukeeping', 'class', first_class, '{}']
+ output = build_utils.CheckOutput(
+ proguard_cmd, print_stderr=False,
+ stdout_filter=proguard_util.ProguardOutputFilter())
+ raise Exception(
+ ('Found classes that should not be in the main dex:\n {}\n\n'
+ 'Here is the -whyareyoukeeping output for {}: \n{}').format(
+ '\n '.join(found_unwanted_classes), first_class, output))
+
+
+def _OnStaleMd5(proguard_cmd, main_dex_list_cmd, paths, main_dex_list_path,
+ negative_main_dex_globs):
+ paths_arg = ':'.join(paths)
+ main_dex_list = ''
+ try:
+ with tempfile.NamedTemporaryFile(suffix='.jar') as temp_jar:
+ # Step 1: Use ProGuard to find all @MainDex code, and all code reachable
+ # from @MainDex code (recursive).
+ proguard_cmd += [
+ '-injars', paths_arg,
+ '-outjars', temp_jar.name
+ ]
+ build_utils.CheckOutput(proguard_cmd, print_stderr=False)
+
+ # Record the classes kept by ProGuard. Not used by the build, but useful
+ # for debugging what classes are kept by ProGuard vs. MainDexListBuilder.
+ with zipfile.ZipFile(temp_jar.name) as z:
+ kept_classes = [p for p in z.namelist() if p.endswith('.class')]
+ with open(main_dex_list_path + '.partial', 'w') as f:
+ f.write('\n'.join(kept_classes) + '\n')
+
+ if negative_main_dex_globs:
+ # Perform assertions before MainDexListBuilder because:
+ # a) MainDexListBuilder is not recursive, so being included by it isn't
+ # a huge deal.
+ # b) Errors are much more actionable.
+ _CheckForUnwanted(kept_classes, proguard_cmd, negative_main_dex_globs)
+
+ # Step 2: Expand inclusion list to all classes referenced by the .class
+ # files of kept classes (non-recursive).
+ main_dex_list_cmd += [
+ temp_jar.name, paths_arg
+ ]
+ main_dex_list = build_utils.CheckOutput(main_dex_list_cmd)
+
+ except build_utils.CalledProcessError as e:
+ if 'output jar is empty' in e.output:
+ pass
+ elif "input doesn't contain any classes" in e.output:
+ pass
+ else:
+ raise
+
+ with open(main_dex_list_path, 'w') as main_dex_list_file:
+ main_dex_list_file.write(main_dex_list)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/main_dex_list.pydeps b/deps/v8/build/android/gyp/main_dex_list.pydeps
new file mode 100644
index 0000000000..8c482dfa52
--- /dev/null
+++ b/deps/v8/build/android/gyp/main_dex_list.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/main_dex_list.pydeps build/android/gyp/main_dex_list.py
+../../gn_helpers.py
+main_dex_list.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/proguard_util.py
diff --git a/deps/v8/build/android/gyp/merge_manifest.py b/deps/v8/build/android/gyp/merge_manifest.py
new file mode 100755
index 0000000000..0637d43492
--- /dev/null
+++ b/deps/v8/build/android/gyp/merge_manifest.py
@@ -0,0 +1,174 @@
+#!/usr/bin/env python
+
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Merges dependency Android manifests into a root manifest."""
+
+import argparse
+import contextlib
+import os
+import shlex
+import sys
+import tempfile
+import xml.dom.minidom as minidom
+import xml.etree.ElementTree as ElementTree
+
+from util import build_utils
+from util import diff_utils
+
+# Tools library directory - relative to Android SDK root
+_SDK_TOOLS_LIB_DIR = os.path.join('tools', 'lib')
+
+_MANIFEST_MERGER_MAIN_CLASS = 'com.android.manifmerger.Merger'
+_MANIFEST_MERGER_JARS = [
+ 'common{suffix}.jar',
+ 'manifest-merger{suffix}.jar',
+ 'sdk-common{suffix}.jar',
+ 'sdklib{suffix}.jar',
+]
+
+_TOOLS_NAMESPACE_PREFIX = 'tools'
+_TOOLS_NAMESPACE = 'http://schemas.android.com/tools'
+_ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android'
+
+# Without registering namespaces ElementTree converts them to "ns0" and "ns1"
+ElementTree.register_namespace('tools', _TOOLS_NAMESPACE)
+ElementTree.register_namespace('android', _ANDROID_NAMESPACE)
+
+
+@contextlib.contextmanager
+def _ProcessManifest(manifest_path):
+ """Patches an Android manifest to always include the 'tools' namespace
+ declaration, as it is not propagated by the manifest merger from the SDK.
+
+ See https://issuetracker.google.com/issues/63411481
+ """
+ doc = minidom.parse(manifest_path)
+ manifests = doc.getElementsByTagName('manifest')
+ assert len(manifests) == 1
+ manifest = manifests[0]
+ package = manifest.getAttribute('package')
+
+ manifest.setAttribute('xmlns:%s' % _TOOLS_NAMESPACE_PREFIX, _TOOLS_NAMESPACE)
+
+ tmp_prefix = os.path.basename(manifest_path)
+ with tempfile.NamedTemporaryFile(prefix=tmp_prefix) as patched_manifest:
+ doc.writexml(patched_manifest)
+ patched_manifest.flush()
+ yield patched_manifest.name, package
+
+
+def _BuildManifestMergerClasspath(build_vars):
+ return ':'.join([
+ os.path.join(
+ build_vars['android_sdk_root'], _SDK_TOOLS_LIB_DIR,
+ jar.format(suffix=build_vars['android_sdk_tools_version_suffix']))
+ for jar in _MANIFEST_MERGER_JARS
+ ])
+
+
+def _SortAndStripElementTree(tree, reverse_toplevel=False):
+ for node in tree:
+ if node.text and node.text.isspace():
+ node.text = None
+ _SortAndStripElementTree(node)
+ tree[:] = sorted(tree, key=ElementTree.tostring, reverse=reverse_toplevel)
+
+
+def _NormalizeManifest(path):
+ with open(path) as f:
+ # This also strips comments and sorts node attributes alphabetically.
+ root = ElementTree.fromstring(f.read())
+
+ # Sort nodes alphabetically, recursively.
+ _SortAndStripElementTree(root, reverse_toplevel=True)
+
+ # Fix up whitespace/indentation.
+ dom = minidom.parseString(ElementTree.tostring(root))
+ lines = []
+ for l in dom.toprettyxml(indent=' ').splitlines():
+ if l.strip():
+ if len(l) > 100:
+ indent = ' ' * l.find('<')
+ attributes = shlex.split(l, posix=False)
+ lines.append('{}{}'.format(indent, attributes[0]))
+ for attribute in attributes[1:]:
+ lines.append('{} {}'.format(indent, attribute))
+ else:
+ lines.append(l)
+
+ return '\n'.join(lines)
+
+
+def main(argv):
+ argv = build_utils.ExpandFileArgs(argv)
+ parser = argparse.ArgumentParser(description=__doc__)
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--build-vars',
+ help='Path to GN build vars file',
+ required=True)
+ parser.add_argument('--root-manifest',
+ help='Root manifest which to merge into',
+ required=True)
+ parser.add_argument(
+ '--expected-manifest', help='Expected contents for the merged manifest.')
+ parser.add_argument('--normalized-output', help='Normalized merged manifest.')
+ parser.add_argument(
+ '--verify-expected-manifest',
+ action='store_true',
+ help='Fail if expected contents do not match merged manifest contents.')
+ parser.add_argument('--output', help='Output manifest path', required=True)
+ parser.add_argument('--extras',
+ help='GN list of additional manifest to merge')
+ args = parser.parse_args(argv)
+
+ classpath = _BuildManifestMergerClasspath(
+ build_utils.ReadBuildVars(args.build_vars))
+
+ with build_utils.AtomicOutput(args.output) as output:
+ cmd = [
+ 'java',
+ '-cp',
+ classpath,
+ _MANIFEST_MERGER_MAIN_CLASS,
+ '--out',
+ output.name,
+ ]
+
+ extras = build_utils.ParseGnList(args.extras)
+ if extras:
+ cmd += ['--libs', ':'.join(extras)]
+
+ with _ProcessManifest(args.root_manifest) as tup:
+ root_manifest, package = tup
+ cmd += ['--main', root_manifest, '--property', 'PACKAGE=' + package]
+ build_utils.CheckOutput(cmd,
+ # https://issuetracker.google.com/issues/63514300:
+ # The merger doesn't set a nonzero exit code for failures.
+ fail_func=lambda returncode, stderr: returncode != 0 or
+ build_utils.IsTimeStale(output.name, [root_manifest] + extras))
+
+ if args.expected_manifest:
+ with build_utils.AtomicOutput(args.normalized_output) as normalized_output:
+ normalized_output.write(_NormalizeManifest(args.output))
+ msg = diff_utils.DiffFileContents(args.expected_manifest,
+ args.normalized_output)
+ if msg:
+ sys.stderr.write("""\
+AndroidManifest.xml expectations file needs updating. For details see:
+https://chromium.googlesource.com/chromium/src/+/HEAD/chrome/android/java/README.md
+""")
+ sys.stderr.write(msg)
+ if args.verify_expected_manifest:
+ sys.exit(1)
+
+ if args.depfile:
+ inputs = extras + classpath.split(':')
+ build_utils.WriteDepfile(args.depfile, args.output, inputs=inputs,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/merge_manifest.pydeps b/deps/v8/build/android/gyp/merge_manifest.pydeps
new file mode 100644
index 0000000000..797cd5fbd6
--- /dev/null
+++ b/deps/v8/build/android/gyp/merge_manifest.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/merge_manifest.pydeps build/android/gyp/merge_manifest.py
+../../gn_helpers.py
+merge_manifest.py
+util/__init__.py
+util/build_utils.py
+util/diff_utils.py
+util/md5_check.py
diff --git a/deps/v8/build/android/gyp/prepare_resources.py b/deps/v8/build/android/gyp/prepare_resources.py
new file mode 100755
index 0000000000..a463f29645
--- /dev/null
+++ b/deps/v8/build/android/gyp/prepare_resources.py
@@ -0,0 +1,324 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Process Android resource directories to generate .resources.zip, R.txt and
+.srcjar files."""
+
+import argparse
+import collections
+import os
+import re
+import shutil
+import sys
+
+import generate_v14_compatible_resources
+
+from util import build_utils
+from util import resource_utils
+
+_AAPT_IGNORE_PATTERN = ':'.join([
+ 'OWNERS', # Allow OWNERS files within res/
+ '*.py', # PRESUBMIT.py sometimes exist.
+ '*.pyc',
+ '*~', # Some editors create these as temp files.
+ '.*', # Never makes sense to include dot(files/dirs).
+ '*.d.stamp', # Ignore stamp files
+ ])
+
+def _ParseArgs(args):
+ """Parses command line options.
+
+ Returns:
+ An options object as from argparse.ArgumentParser.parse_args()
+ """
+ parser, input_opts, output_opts = resource_utils.ResourceArgsParser()
+
+ input_opts.add_argument(
+ '--aapt-path', required=True, help='Path to the Android aapt tool')
+
+ input_opts.add_argument('--resource-dirs',
+ default='[]',
+ help='A list of input directories containing resources '
+ 'for this target.')
+
+ input_opts.add_argument(
+ '--shared-resources',
+ action='store_true',
+ help='Make resources shareable by generating an onResourcesLoaded() '
+ 'method in the R.java source file.')
+
+ input_opts.add_argument('--custom-package',
+ help='Optional Java package for main R.java.')
+
+ input_opts.add_argument(
+ '--android-manifest',
+ help='Optional AndroidManifest.xml path. Only used to extract a package '
+ 'name for R.java if a --custom-package is not provided.')
+
+ output_opts.add_argument(
+ '--resource-zip-out',
+ help='Path to a zip archive containing all resources from '
+ '--resource-dirs, merged into a single directory tree. This will '
+ 'also include auto-generated v14-compatible resources unless '
+ '--v14-skip is used.')
+
+ output_opts.add_argument('--srcjar-out',
+ help='Path to .srcjar to contain the generated R.java.')
+
+ output_opts.add_argument('--r-text-out',
+ help='Path to store the generated R.txt file.')
+
+ input_opts.add_argument(
+ '--v14-skip',
+ action="store_true",
+ help='Do not generate nor verify v14 resources.')
+
+ input_opts.add_argument(
+ '--strip-drawables',
+ action="store_true",
+ help='Remove drawables from the resources.')
+
+ options = parser.parse_args(args)
+
+ resource_utils.HandleCommonOptions(options)
+
+ options.resource_dirs = build_utils.ParseGnList(options.resource_dirs)
+
+ return options
+
+
+def _GenerateGlobs(pattern):
+ # This function processes the aapt ignore assets pattern into a list of globs
+ # to be used to exclude files on the python side. It removes the '!', which is
+ # used by aapt to mean 'not chatty' so it does not output if the file is
+ # ignored (we dont output anyways, so it is not required). This function does
+ # not handle the <dir> and <file> prefixes used by aapt and are assumed not to
+ # be included in the pattern string.
+ return pattern.replace('!', '').split(':')
+
+
+def _ZipResources(resource_dirs, zip_path, ignore_pattern):
+ # Python zipfile does not provide a way to replace a file (it just writes
+ # another file with the same name). So, first collect all the files to put
+ # in the zip (with proper overriding), and then zip them.
+ # ignore_pattern is a string of ':' delimited list of globs used to ignore
+ # files that should not be part of the final resource zip.
+ files_to_zip = dict()
+ files_to_zip_without_generated = dict()
+ globs = _GenerateGlobs(ignore_pattern)
+ for d in resource_dirs:
+ for root, _, files in os.walk(d):
+ for f in files:
+ archive_path = f
+ parent_dir = os.path.relpath(root, d)
+ if parent_dir != '.':
+ archive_path = os.path.join(parent_dir, f)
+ path = os.path.join(root, f)
+ if build_utils.MatchesGlob(archive_path, globs):
+ continue
+ # We want the original resource dirs in the .info file rather than the
+ # generated overridden path.
+ if not path.startswith('/tmp'):
+ files_to_zip_without_generated[archive_path] = path
+ files_to_zip[archive_path] = path
+ resource_utils.CreateResourceInfoFile(files_to_zip_without_generated,
+ zip_path)
+ build_utils.DoZip(files_to_zip.iteritems(), zip_path)
+
+
+def _GenerateRTxt(options, dep_subdirs, gen_dir):
+ """Generate R.txt file.
+
+ Args:
+ options: The command-line options tuple.
+ dep_subdirs: List of directories containing extracted dependency resources.
+ gen_dir: Locates where the aapt-generated files will go. In particular
+ the output file is always generated as |{gen_dir}/R.txt|.
+ """
+ # NOTE: This uses aapt rather than aapt2 because 'aapt2 compile' does not
+ # support the --output-text-symbols option yet (https://crbug.com/820460).
+ package_command = [options.aapt_path,
+ 'package',
+ '-m',
+ '-M', resource_utils.EMPTY_ANDROID_MANIFEST_PATH,
+ '--no-crunch',
+ '--auto-add-overlay',
+ '--no-version-vectors',
+ ]
+ for j in options.include_resources:
+ package_command += ['-I', j]
+
+ ignore_pattern = _AAPT_IGNORE_PATTERN
+ if options.strip_drawables:
+ ignore_pattern += ':*drawable*'
+ package_command += [
+ '--output-text-symbols',
+ gen_dir,
+ '-J',
+ gen_dir, # Required for R.txt generation.
+ '--ignore-assets',
+ ignore_pattern
+ ]
+
+ # Adding all dependencies as sources is necessary for @type/foo references
+ # to symbols within dependencies to resolve. However, it has the side-effect
+ # that all Java symbols from dependencies are copied into the new R.java.
+ # E.g.: It enables an arguably incorrect usage of
+ # "mypackage.R.id.lib_symbol" where "libpackage.R.id.lib_symbol" would be
+ # more correct. This is just how Android works.
+ for d in dep_subdirs:
+ package_command += ['-S', d]
+
+ for d in options.resource_dirs:
+ package_command += ['-S', d]
+
+ # Only creates an R.txt
+ build_utils.CheckOutput(
+ package_command, print_stdout=False, print_stderr=False)
+
+
+def _GenerateResourcesZip(output_resource_zip, input_resource_dirs, v14_skip,
+ strip_drawables, temp_dir):
+ """Generate a .resources.zip file fron a list of input resource dirs.
+
+ Args:
+ output_resource_zip: Path to the output .resources.zip file.
+ input_resource_dirs: A list of input resource directories.
+ v14_skip: If False, then v14-compatible resource will also be
+ generated in |{temp_dir}/v14| and added to the final zip.
+ temp_dir: Path to temporary directory.
+ """
+ if not v14_skip:
+ # Generate v14-compatible resources in temp_dir.
+ v14_dir = os.path.join(temp_dir, 'v14')
+ build_utils.MakeDirectory(v14_dir)
+
+ for resource_dir in input_resource_dirs:
+ generate_v14_compatible_resources.GenerateV14Resources(
+ resource_dir,
+ v14_dir)
+
+ input_resource_dirs.append(v14_dir)
+
+ ignore_pattern = _AAPT_IGNORE_PATTERN
+ if strip_drawables:
+ ignore_pattern += ':*drawable*'
+ _ZipResources(input_resource_dirs, output_resource_zip, ignore_pattern)
+
+
+def _OnStaleMd5(options):
+ with resource_utils.BuildContext() as build:
+ if options.r_text_in:
+ r_txt_path = options.r_text_in
+ else:
+ # Extract dependencies to resolve @foo/type references into
+ # dependent packages.
+ dep_subdirs = resource_utils.ExtractDeps(options.dependencies_res_zips,
+ build.deps_dir)
+
+ _GenerateRTxt(options, dep_subdirs, build.gen_dir)
+ r_txt_path = build.r_txt_path
+
+ # 'aapt' doesn't generate any R.txt file if res/ was empty.
+ if not os.path.exists(r_txt_path):
+ build_utils.Touch(r_txt_path)
+
+ if options.r_text_out:
+ shutil.copyfile(r_txt_path, options.r_text_out)
+
+ if options.srcjar_out:
+ package = options.custom_package
+ if not package and options.android_manifest:
+ package = resource_utils.ExtractPackageFromManifest(
+ options.android_manifest)
+
+ # Don't create a .java file for the current resource target when no
+ # package name was provided (either by manifest or build rules).
+ if package:
+ # All resource IDs should be non-final here, but the
+ # onResourcesLoaded() method should only be generated if
+ # --shared-resources is used.
+ rjava_build_options = resource_utils.RJavaBuildOptions()
+ rjava_build_options.ExportAllResources()
+ rjava_build_options.ExportAllStyleables()
+ if options.shared_resources:
+ rjava_build_options.GenerateOnResourcesLoaded()
+
+ resource_utils.CreateRJavaFiles(
+ build.srcjar_dir, package, r_txt_path,
+ options.extra_res_packages,
+ options.extra_r_text_files,
+ rjava_build_options)
+
+ build_utils.ZipDir(options.srcjar_out, build.srcjar_dir)
+
+ if options.resource_zip_out:
+ _GenerateResourcesZip(options.resource_zip_out, options.resource_dirs,
+ options.v14_skip, options.strip_drawables,
+ build.temp_dir)
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseArgs(args)
+
+ # Order of these must match order specified in GN so that the correct one
+ # appears first in the depfile.
+ possible_output_paths = [
+ options.resource_zip_out,
+ options.r_text_out,
+ options.srcjar_out,
+ ]
+ output_paths = [x for x in possible_output_paths if x]
+
+ # List python deps in input_strings rather than input_paths since the contents
+ # of them does not change what gets written to the depsfile.
+ input_strings = options.extra_res_packages + [
+ options.custom_package,
+ options.shared_resources,
+ options.v14_skip,
+ options.strip_drawables,
+ ]
+
+ possible_input_paths = [
+ options.aapt_path,
+ options.android_manifest,
+ ]
+ possible_input_paths += options.include_resources
+ input_paths = [x for x in possible_input_paths if x]
+ input_paths.extend(options.dependencies_res_zips)
+ input_paths.extend(options.extra_r_text_files)
+
+ # Resource files aren't explicitly listed in GN. Listing them in the depfile
+ # ensures the target will be marked stale when resource files are removed.
+ depfile_deps = []
+ resource_names = []
+ for resource_dir in options.resource_dirs:
+ for resource_file in build_utils.FindInDirectory(resource_dir, '*'):
+ # Don't list the empty .keep file in depfile. Since it doesn't end up
+ # included in the .zip, it can lead to -w 'dupbuild=err' ninja errors
+ # if ever moved.
+ if not resource_file.endswith(os.path.join('empty', '.keep')):
+ input_paths.append(resource_file)
+ depfile_deps.append(resource_file)
+ resource_names.append(os.path.relpath(resource_file, resource_dir))
+
+ # Resource filenames matter to the output, so add them to strings as well.
+ # This matters if a file is renamed but not changed (http://crbug.com/597126).
+ input_strings.extend(sorted(resource_names))
+
+ build_utils.CallAndWriteDepfileIfStale(
+ lambda: _OnStaleMd5(options),
+ options,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ depfile_deps=depfile_deps,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/prepare_resources.pydeps b/deps/v8/build/android/gyp/prepare_resources.pydeps
new file mode 100644
index 0000000000..0e9ccfbe5e
--- /dev/null
+++ b/deps/v8/build/android/gyp/prepare_resources.pydeps
@@ -0,0 +1,30 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/prepare_resources.pydeps build/android/gyp/prepare_resources.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+generate_v14_compatible_resources.py
+prepare_resources.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/resource_utils.py
diff --git a/deps/v8/build/android/gyp/proguard.py b/deps/v8/build/android/gyp/proguard.py
new file mode 100755
index 0000000000..bb86b2dca6
--- /dev/null
+++ b/deps/v8/build/android/gyp/proguard.py
@@ -0,0 +1,290 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import cStringIO
+import optparse
+import os
+import shutil
+import sys
+import tempfile
+
+from util import build_utils
+from util import diff_utils
+from util import proguard_util
+
+_GENERATED_PROGUARD_HEADER = """
+################################################################################
+# Dynamically generated from build/android/gyp/proguard.py
+################################################################################
+"""
+
+# Example:
+# android.arch.core.internal.SafeIterableMap$Entry -> b:
+# 1:1:java.lang.Object getKey():353:353 -> getKey
+# 2:2:java.lang.Object getValue():359:359 -> getValue
+def _RemoveMethodMappings(orig_path, out_fd):
+ with open(orig_path) as in_fd:
+ for line in in_fd:
+ if line[:1] != ' ':
+ out_fd.write(line)
+ out_fd.flush()
+
+
+def _ParseOptions(args):
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_option('--proguard-path',
+ help='Path to the proguard.jar to use.')
+ parser.add_option('--r8-path',
+ help='Path to the R8.jar to use.')
+ parser.add_option('--input-paths',
+ help='Paths to the .jar files proguard should run on.')
+ parser.add_option('--output-path', help='Path to the generated .jar file.')
+ parser.add_option('--proguard-configs', action='append',
+ help='Paths to proguard configuration files.')
+ parser.add_option('--proguard-config-exclusions',
+ default='',
+ help='GN list of paths to proguard configuration files '
+ 'included by --proguard-configs, but that should '
+ 'not actually be included.')
+ parser.add_option(
+ '--apply-mapping', help='Path to proguard mapping to apply.')
+ parser.add_option('--mapping-output',
+ help='Path for proguard to output mapping file to.')
+ parser.add_option(
+ '--output-config',
+ help='Path to write the merged proguard config file to.')
+ parser.add_option(
+ '--expected-configs-file',
+ help='Path to a file containing the expected merged proguard configs')
+ parser.add_option(
+ '--verify-expected-configs',
+ action='store_true',
+ help='Fail if the expected merged proguard configs differ from the '
+ 'generated merged proguard configs.')
+ parser.add_option('--classpath', action='append',
+ help='Classpath for proguard.')
+ parser.add_option('--main-dex-rules-path', action='append',
+ help='Paths to main dex rules for multidex'
+ '- only works with R8.')
+ parser.add_option('--min-api', default='',
+ help='Minimum Android API level compatibility.')
+ parser.add_option('--verbose', '-v', action='store_true',
+ help='Print all proguard output')
+ parser.add_option(
+ '--repackage-classes',
+ help='Unique package name given to an asynchronously proguarded module')
+
+ options, _ = parser.parse_args(args)
+
+ assert not options.main_dex_rules_path or options.r8_path, \
+ 'R8 must be enabled to pass main dex rules.'
+
+ classpath = []
+ for arg in options.classpath:
+ classpath += build_utils.ParseGnList(arg)
+ options.classpath = classpath
+
+ configs = []
+ for arg in options.proguard_configs:
+ configs += build_utils.ParseGnList(arg)
+ options.proguard_configs = configs
+ options.proguard_config_exclusions = (
+ build_utils.ParseGnList(options.proguard_config_exclusions))
+
+ options.input_paths = build_utils.ParseGnList(options.input_paths)
+
+ if not options.mapping_output:
+ options.mapping_output = options.output_path + '.mapping'
+
+ if options.apply_mapping:
+ options.apply_mapping = os.path.abspath(options.apply_mapping)
+
+
+ return options
+
+
+def _VerifyExpectedConfigs(expected_path, actual_path, fail_on_exit):
+ msg = diff_utils.DiffFileContents(expected_path, actual_path)
+ if not msg:
+ return
+
+ sys.stderr.write("""\
+Proguard flag expectations file needs updating. For details see:
+https://chromium.googlesource.com/chromium/src/+/HEAD/chrome/android/java/README.md
+""")
+ sys.stderr.write(msg)
+ if fail_on_exit:
+ sys.exit(1)
+
+
+def _MoveTempDexFile(tmp_dex_dir, dex_path):
+ """Move the temp dex file out of |tmp_dex_dir|.
+
+ Args:
+ tmp_dex_dir: Path to temporary directory created with tempfile.mkdtemp().
+ The directory should have just a single file.
+ dex_path: Target path to move dex file to.
+
+ Raises:
+ Exception if there are multiple files in |tmp_dex_dir|.
+ """
+ tempfiles = os.listdir(tmp_dex_dir)
+ if len(tempfiles) > 1:
+ raise Exception('%d files created, expected 1' % len(tempfiles))
+
+ tmp_dex_path = os.path.join(tmp_dex_dir, tempfiles[0])
+ shutil.move(tmp_dex_path, dex_path)
+
+
+def _CreateR8Command(options, map_output_path, output_dir, tmp_config_path,
+ libraries):
+ cmd = [
+ 'java', '-jar', options.r8_path,
+ '--no-desugaring',
+ '--no-data-resources',
+ '--output', output_dir,
+ '--pg-map-output', map_output_path,
+ ]
+
+ for lib in libraries:
+ cmd += ['--lib', lib]
+
+ for config_file in options.proguard_configs:
+ cmd += ['--pg-conf', config_file]
+
+ temp_config_string = ''
+ if options.apply_mapping or options.repackage_classes or options.min_api:
+ with open(tmp_config_path, 'w') as f:
+ if options.apply_mapping:
+ temp_config_string += '-applymapping \'%s\'\n' % (options.apply_mapping)
+ if options.repackage_classes:
+ temp_config_string += '-repackageclasses \'%s\'\n' % (
+ options.repackage_classes)
+ if options.min_api:
+ temp_config_string += (
+ '-assumevalues class android.os.Build$VERSION {\n' +
+ ' public static final int SDK_INT return ' + options.min_api +
+ '..9999;\n}\n')
+ f.write(temp_config_string)
+ cmd += ['--pg-conf', tmp_config_path]
+
+ if options.main_dex_rules_path:
+ for main_dex_rule in options.main_dex_rules_path:
+ cmd += ['--main-dex-rules', main_dex_rule]
+
+ cmd += options.input_paths
+ return cmd, temp_config_string
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseOptions(args)
+
+ libraries = []
+ for p in options.classpath:
+ # If a jar is part of input no need to include it as library jar.
+ if p not in libraries and p not in options.input_paths:
+ libraries.append(p)
+
+ # TODO(agrieve): Remove proguard usages.
+ if options.r8_path:
+ temp_config_string = ''
+ with build_utils.TempDir() as tmp_dir:
+ tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
+ tmp_proguard_config_path = os.path.join(tmp_dir, 'proguard_config.txt')
+ # If there is no output (no classes are kept), this prevents this script
+ # from failing.
+ build_utils.Touch(tmp_mapping_path)
+
+ f = cStringIO.StringIO()
+ proguard_util.WriteFlagsFile(
+ options.proguard_configs, f, exclude_generated=True)
+ merged_configs = f.getvalue()
+ # Fix up line endings (third_party configs can have windows endings)
+ merged_configs = merged_configs.replace('\r', '')
+ f.close()
+ print_stdout = '-whyareyoukeeping' in merged_configs
+
+ if options.output_path.endswith('.dex'):
+ with build_utils.TempDir() as tmp_dex_dir:
+ cmd, temp_config_string = _CreateR8Command(
+ options, tmp_mapping_path, tmp_dex_dir, tmp_proguard_config_path,
+ libraries)
+ build_utils.CheckOutput(cmd, print_stdout=print_stdout)
+ _MoveTempDexFile(tmp_dex_dir, options.output_path)
+ else:
+ cmd, temp_config_string = _CreateR8Command(
+ options, tmp_mapping_path, options.output_path,
+ tmp_proguard_config_path, libraries)
+ build_utils.CheckOutput(cmd, print_stdout=print_stdout)
+
+ # Copy output files to correct locations.
+ with build_utils.AtomicOutput(options.mapping_output) as mapping:
+ # Mapping files generated by R8 include comments that may break
+ # some of our tooling so remove those.
+ with open(tmp_mapping_path) as tmp:
+ mapping.writelines(l for l in tmp if not l.startswith('#'))
+
+ with build_utils.AtomicOutput(options.output_config) as f:
+ f.write(merged_configs)
+ if temp_config_string:
+ f.write(_GENERATED_PROGUARD_HEADER)
+ f.write(temp_config_string)
+
+ if options.expected_configs_file:
+ _VerifyExpectedConfigs(options.expected_configs_file,
+ options.output_config,
+ options.verify_expected_configs)
+
+ other_inputs = []
+ if options.apply_mapping:
+ other_inputs += options.apply_mapping
+
+ build_utils.WriteDepfile(
+ options.depfile,
+ options.output_path,
+ inputs=options.proguard_configs + options.input_paths + libraries +
+ other_inputs,
+ add_pydeps=False)
+ else:
+ proguard = proguard_util.ProguardCmdBuilder(options.proguard_path)
+ proguard.injars(options.input_paths)
+ proguard.configs(options.proguard_configs)
+ proguard.config_exclusions(options.proguard_config_exclusions)
+ proguard.outjar(options.output_path)
+ proguard.mapping_output(options.mapping_output)
+ proguard.libraryjars(libraries)
+ proguard.verbose(options.verbose)
+ proguard.min_api(options.min_api)
+ # Do not consider the temp file as an input since its name is random.
+ input_paths = proguard.GetInputs()
+
+ with tempfile.NamedTemporaryFile() as f:
+ if options.apply_mapping:
+ input_paths.append(options.apply_mapping)
+ # Maintain only class name mappings in the .mapping file in order to
+ # work around what appears to be a ProGuard bug in -applymapping:
+ # method 'int close()' is not being kept as 'a', but remapped to 'c'
+ _RemoveMethodMappings(options.apply_mapping, f)
+ proguard.mapping(f.name)
+
+ input_strings = proguard.build()
+ if f.name in input_strings:
+ input_strings[input_strings.index(f.name)] = '$M'
+
+ build_utils.CallAndWriteDepfileIfStale(
+ proguard.CheckOutput,
+ options,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=proguard.GetOutputs(),
+ depfile_deps=proguard.GetDepfileDeps(),
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/proguard.pydeps b/deps/v8/build/android/gyp/proguard.pydeps
new file mode 100644
index 0000000000..fd870a0e4b
--- /dev/null
+++ b/deps/v8/build/android/gyp/proguard.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/proguard.pydeps build/android/gyp/proguard.py
+../../gn_helpers.py
+proguard.py
+util/__init__.py
+util/build_utils.py
+util/diff_utils.py
+util/md5_check.py
+util/proguard_util.py
diff --git a/deps/v8/build/android/gyp/test/BUILD.gn b/deps/v8/build/android/gyp/test/BUILD.gn
new file mode 100644
index 0000000000..2deac1d56f
--- /dev/null
+++ b/deps/v8/build/android/gyp/test/BUILD.gn
@@ -0,0 +1,13 @@
+import("//build/config/android/rules.gni")
+
+java_library("hello_world_java") {
+ java_files = [ "java/org/chromium/helloworld/HelloWorldPrinter.java" ]
+}
+
+java_binary("hello_world") {
+ deps = [
+ ":hello_world_java",
+ ]
+ java_files = [ "java/org/chromium/helloworld/HelloWorldMain.java" ]
+ main_class = "org.chromium.helloworld.HelloWorldMain"
+}
diff --git a/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java b/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java
new file mode 100644
index 0000000000..10860d8332
--- /dev/null
+++ b/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java
@@ -0,0 +1,15 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.helloworld;
+
+public class HelloWorldMain {
+ public static void main(String[] args) {
+ if (args.length > 0) {
+ System.exit(Integer.parseInt(args[0]));
+ }
+ HelloWorldPrinter.print();
+ }
+}
+
diff --git a/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java b/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java
new file mode 100644
index 0000000000..b09673e21f
--- /dev/null
+++ b/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java
@@ -0,0 +1,12 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.helloworld;
+
+public class HelloWorldPrinter {
+ public static void print() {
+ System.out.println("Hello, world!");
+ }
+}
+
diff --git a/deps/v8/build/android/gyp/util/__init__.py b/deps/v8/build/android/gyp/util/__init__.py
new file mode 100644
index 0000000000..96196cffb2
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/gyp/util/build_utils.py b/deps/v8/build/android/gyp/util/build_utils.py
new file mode 100644
index 0000000000..e4d7cc6128
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/build_utils.py
@@ -0,0 +1,650 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Contains common helpers for GN action()s."""
+
+import collections
+import contextlib
+import filecmp
+import fnmatch
+import json
+import os
+import pipes
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tempfile
+import zipfile
+
+# Any new non-system import must be added to:
+# //build/config/android/internal_rules.gni
+
+from util import md5_check
+
+sys.path.append(os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir, os.pardir))
+import gn_helpers
+
+# Definition copied from pylib/constants/__init__.py to avoid adding
+# a dependency on pylib.
+DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT',
+ os.path.abspath(os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir, os.pardir, os.pardir)))
+
+HERMETIC_TIMESTAMP = (2001, 1, 1, 0, 0, 0)
+_HERMETIC_FILE_ATTR = (0o644 << 16)
+
+
+@contextlib.contextmanager
+def TempDir():
+ dirname = tempfile.mkdtemp()
+ try:
+ yield dirname
+ finally:
+ shutil.rmtree(dirname)
+
+
+def MakeDirectory(dir_path):
+ try:
+ os.makedirs(dir_path)
+ except OSError:
+ pass
+
+
+def DeleteDirectory(dir_path):
+ if os.path.exists(dir_path):
+ shutil.rmtree(dir_path)
+
+
+def Touch(path, fail_if_missing=False):
+ if fail_if_missing and not os.path.exists(path):
+ raise Exception(path + ' doesn\'t exist.')
+
+ MakeDirectory(os.path.dirname(path))
+ with open(path, 'a'):
+ os.utime(path, None)
+
+
+def FindInDirectory(directory, filename_filter):
+ files = []
+ for root, _dirnames, filenames in os.walk(directory):
+ matched_files = fnmatch.filter(filenames, filename_filter)
+ files.extend((os.path.join(root, f) for f in matched_files))
+ return files
+
+
+def ReadBuildVars(path):
+ """Parses a build_vars.txt into a dict."""
+ with open(path) as f:
+ return dict(l.rstrip().split('=', 1) for l in f)
+
+
+def ParseGnList(value):
+ """Converts a "GN-list" command-line parameter into a list.
+
+ Conversions handled:
+ * None -> []
+ * '' -> []
+ * 'asdf' -> ['asdf']
+ * '["a", "b"]' -> ['a', 'b']
+ * ['["a", "b"]', 'c'] -> ['a', 'b', 'c'] (flattened list)
+
+ The common use for this behavior is in the Android build where things can
+ take lists of @FileArg references that are expanded via ExpandFileArgs.
+ """
+ # Convert None to [].
+ if not value:
+ return []
+ # Convert a list of GN lists to a flattened list.
+ if isinstance(value, list):
+ ret = []
+ for arg in value:
+ ret.extend(ParseGnList(arg))
+ return ret
+ # Convert normal GN list.
+ if value.startswith('['):
+ return gn_helpers.GNValueParser(value).ParseList()
+ # Convert a single string value to a list.
+ return [value]
+
+
+def CheckOptions(options, parser, required=None):
+ if not required:
+ return
+ for option_name in required:
+ if getattr(options, option_name) is None:
+ parser.error('--%s is required' % option_name.replace('_', '-'))
+
+
+def WriteJson(obj, path, only_if_changed=False):
+ old_dump = None
+ if os.path.exists(path):
+ with open(path, 'r') as oldfile:
+ old_dump = oldfile.read()
+
+ new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '))
+
+ if not only_if_changed or old_dump != new_dump:
+ with open(path, 'w') as outfile:
+ outfile.write(new_dump)
+
+
+@contextlib.contextmanager
+def AtomicOutput(path, only_if_changed=True):
+ """Helper to prevent half-written outputs.
+
+ Args:
+ path: Path to the final output file, which will be written atomically.
+ only_if_changed: If True (the default), do not touch the filesystem
+ if the content has not changed.
+ Returns:
+ A python context manager that yelds a NamedTemporaryFile instance
+ that must be used by clients to write the data to. On exit, the
+ manager will try to replace the final output file with the
+ temporary one if necessary. The temporary file is always destroyed
+ on exit.
+ Example:
+ with build_utils.AtomicOutput(output_path) as tmp_file:
+ subprocess.check_call(['prog', '--output', tmp_file.name])
+ """
+ # Create in same directory to ensure same filesystem when moving.
+ with tempfile.NamedTemporaryFile(suffix=os.path.basename(path),
+ dir=os.path.dirname(path),
+ delete=False) as f:
+ try:
+ yield f
+
+ # file should be closed before comparison/move.
+ f.close()
+ if not (only_if_changed and os.path.exists(path) and
+ filecmp.cmp(f.name, path)):
+ shutil.move(f.name, path)
+ finally:
+ if os.path.exists(f.name):
+ os.unlink(f.name)
+
+
+class CalledProcessError(Exception):
+ """This exception is raised when the process run by CheckOutput
+ exits with a non-zero exit code."""
+
+ def __init__(self, cwd, args, output):
+ super(CalledProcessError, self).__init__()
+ self.cwd = cwd
+ self.args = args
+ self.output = output
+
+ def __str__(self):
+ # A user should be able to simply copy and paste the command that failed
+ # into their shell.
+ copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd),
+ ' '.join(map(pipes.quote, self.args)))
+ return 'Command failed: {}\n{}'.format(copyable_command, self.output)
+
+
+def FilterLines(output, filter_string):
+ """Output filter from build_utils.CheckOutput.
+
+ Args:
+ output: Executable output as from build_utils.CheckOutput.
+ filter_string: An RE string that will filter (remove) matching
+ lines from |output|.
+
+ Returns:
+ The filtered output, as a single string.
+ """
+ re_filter = re.compile(filter_string)
+ return '\n'.join(
+ line for line in output.splitlines() if not re_filter.search(line))
+
+
+# This can be used in most cases like subprocess.check_output(). The output,
+# particularly when the command fails, better highlights the command's failure.
+# If the command fails, raises a build_utils.CalledProcessError.
+def CheckOutput(args, cwd=None, env=None,
+ print_stdout=False, print_stderr=True,
+ stdout_filter=None,
+ stderr_filter=None,
+ fail_func=lambda returncode, stderr: returncode != 0):
+ if not cwd:
+ cwd = os.getcwd()
+
+ child = subprocess.Popen(args,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env)
+ stdout, stderr = child.communicate()
+
+ if stdout_filter is not None:
+ stdout = stdout_filter(stdout)
+
+ if stderr_filter is not None:
+ stderr = stderr_filter(stderr)
+
+ if fail_func(child.returncode, stderr):
+ raise CalledProcessError(cwd, args, stdout + stderr)
+
+ if print_stdout:
+ sys.stdout.write(stdout)
+ if print_stderr:
+ sys.stderr.write(stderr)
+
+ return stdout
+
+
+def GetModifiedTime(path):
+ # For a symlink, the modified time should be the greater of the link's
+ # modified time and the modified time of the target.
+ return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
+
+
+def IsTimeStale(output, inputs):
+ if not os.path.exists(output):
+ return True
+
+ output_time = GetModifiedTime(output)
+ for i in inputs:
+ if GetModifiedTime(i) > output_time:
+ return True
+ return False
+
+
+def _CheckZipPath(name):
+ if os.path.normpath(name) != name:
+ raise Exception('Non-canonical zip path: %s' % name)
+ if os.path.isabs(name):
+ raise Exception('Absolute zip path: %s' % name)
+
+
+def _IsSymlink(zip_file, name):
+ zi = zip_file.getinfo(name)
+
+ # The two high-order bytes of ZipInfo.external_attr represent
+ # UNIX permissions and file type bits.
+ return stat.S_ISLNK(zi.external_attr >> 16)
+
+
+def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None,
+ predicate=None):
+ if path is None:
+ path = os.getcwd()
+ elif not os.path.exists(path):
+ MakeDirectory(path)
+
+ if not zipfile.is_zipfile(zip_path):
+ raise Exception('Invalid zip file: %s' % zip_path)
+
+ extracted = []
+ with zipfile.ZipFile(zip_path) as z:
+ for name in z.namelist():
+ if name.endswith('/'):
+ MakeDirectory(os.path.join(path, name))
+ continue
+ if pattern is not None:
+ if not fnmatch.fnmatch(name, pattern):
+ continue
+ if predicate and not predicate(name):
+ continue
+ _CheckZipPath(name)
+ if no_clobber:
+ output_path = os.path.join(path, name)
+ if os.path.exists(output_path):
+ raise Exception(
+ 'Path already exists from zip: %s %s %s'
+ % (zip_path, name, output_path))
+ if _IsSymlink(z, name):
+ dest = os.path.join(path, name)
+ MakeDirectory(os.path.dirname(dest))
+ os.symlink(z.read(name), dest)
+ extracted.append(dest)
+ else:
+ z.extract(name, path)
+ extracted.append(os.path.join(path, name))
+
+ return extracted
+
+
+def AddToZipHermetic(zip_file, zip_path, src_path=None, data=None,
+ compress=None):
+ """Adds a file to the given ZipFile with a hard-coded modified time.
+
+ Args:
+ zip_file: ZipFile instance to add the file to.
+ zip_path: Destination path within the zip file.
+ src_path: Path of the source file. Mutually exclusive with |data|.
+ data: File data as a string.
+ compress: Whether to enable compression. Default is taken from ZipFile
+ constructor.
+ """
+ assert (src_path is None) != (data is None), (
+ '|src_path| and |data| are mutually exclusive.')
+ _CheckZipPath(zip_path)
+ zipinfo = zipfile.ZipInfo(filename=zip_path, date_time=HERMETIC_TIMESTAMP)
+ zipinfo.external_attr = _HERMETIC_FILE_ATTR
+
+ if src_path and os.path.islink(src_path):
+ zipinfo.filename = zip_path
+ zipinfo.external_attr |= stat.S_IFLNK << 16 # mark as a symlink
+ zip_file.writestr(zipinfo, os.readlink(src_path))
+ return
+
+ # zipfile.write() does
+ # external_attr = (os.stat(src_path)[0] & 0xFFFF) << 16
+ # but we want to use _HERMETIC_FILE_ATTR, so manually set
+ # the few attr bits we care about.
+ if src_path:
+ st = os.stat(src_path)
+ for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH):
+ if st.st_mode & mode:
+ zipinfo.external_attr |= mode << 16
+
+ if src_path:
+ with open(src_path, 'rb') as f:
+ data = f.read()
+
+ # zipfile will deflate even when it makes the file bigger. To avoid
+ # growing files, disable compression at an arbitrary cut off point.
+ if len(data) < 16:
+ compress = False
+
+ # None converts to ZIP_STORED, when passed explicitly rather than the
+ # default passed to the ZipFile constructor.
+ compress_type = zip_file.compression
+ if compress is not None:
+ compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
+ zip_file.writestr(zipinfo, data, compress_type)
+
+
+def DoZip(inputs, output, base_dir=None, compress_fn=None,
+ zip_prefix_path=None):
+ """Creates a zip file from a list of files.
+
+ Args:
+ inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
+ output: Path, fileobj, or ZipFile instance to add files to.
+ base_dir: Prefix to strip from inputs.
+ compress_fn: Applied to each input to determine whether or not to compress.
+ By default, items will be |zipfile.ZIP_STORED|.
+ zip_prefix_path: Path prepended to file path in zip file.
+ """
+ if base_dir is None:
+ base_dir = '.'
+ input_tuples = []
+ for tup in inputs:
+ if isinstance(tup, basestring):
+ tup = (os.path.relpath(tup, base_dir), tup)
+ input_tuples.append(tup)
+
+ # Sort by zip path to ensure stable zip ordering.
+ input_tuples.sort(key=lambda tup: tup[0])
+
+ out_zip = output
+ if not isinstance(output, zipfile.ZipFile):
+ out_zip = zipfile.ZipFile(output, 'w')
+
+ try:
+ for zip_path, fs_path in input_tuples:
+ if zip_prefix_path:
+ zip_path = os.path.join(zip_prefix_path, zip_path)
+ compress = compress_fn(zip_path) if compress_fn else None
+ AddToZipHermetic(out_zip, zip_path, src_path=fs_path, compress=compress)
+ finally:
+ if output is not out_zip:
+ out_zip.close()
+
+
+def ZipDir(output, base_dir, compress_fn=None, zip_prefix_path=None):
+ """Creates a zip file from a directory."""
+ inputs = []
+ for root, _, files in os.walk(base_dir):
+ for f in files:
+ inputs.append(os.path.join(root, f))
+
+ with AtomicOutput(output) as f:
+ DoZip(inputs, f, base_dir, compress_fn=compress_fn,
+ zip_prefix_path=zip_prefix_path)
+
+
+def MatchesGlob(path, filters):
+ """Returns whether the given path matches any of the given glob patterns."""
+ return filters and any(fnmatch.fnmatch(path, f) for f in filters)
+
+
+def MergeZips(output, input_zips, path_transform=None, compress=None):
+ """Combines all files from |input_zips| into |output|.
+
+ Args:
+ output: Path, fileobj, or ZipFile instance to add files to.
+ input_zips: Iterable of paths to zip files to merge.
+ path_transform: Called for each entry path. Returns a new path, or None to
+ skip the file.
+ compress: Overrides compression setting from origin zip entries.
+ """
+ path_transform = path_transform or (lambda p: p)
+ added_names = set()
+
+ out_zip = output
+ if not isinstance(output, zipfile.ZipFile):
+ out_zip = zipfile.ZipFile(output, 'w')
+
+ try:
+ for in_file in input_zips:
+ with zipfile.ZipFile(in_file, 'r') as in_zip:
+ # ijar creates zips with null CRCs.
+ in_zip._expected_crc = None
+ for info in in_zip.infolist():
+ # Ignore directories.
+ if info.filename[-1] == '/':
+ continue
+ dst_name = path_transform(info.filename)
+ if not dst_name:
+ continue
+ already_added = dst_name in added_names
+ if not already_added:
+ if compress is not None:
+ compress_entry = compress
+ else:
+ compress_entry = info.compress_type != zipfile.ZIP_STORED
+ AddToZipHermetic(
+ out_zip,
+ dst_name,
+ data=in_zip.read(info),
+ compress=compress_entry)
+ added_names.add(dst_name)
+ finally:
+ if output is not out_zip:
+ out_zip.close()
+
+
+def GetSortedTransitiveDependencies(top, deps_func):
+ """Gets the list of all transitive dependencies in sorted order.
+
+ There should be no cycles in the dependency graph (crashes if cycles exist).
+
+ Args:
+ top: A list of the top level nodes
+ deps_func: A function that takes a node and returns a list of its direct
+ dependencies.
+ Returns:
+ A list of all transitive dependencies of nodes in top, in order (a node will
+ appear in the list at a higher index than all of its dependencies).
+ """
+ # Find all deps depth-first, maintaining original order in the case of ties.
+ deps_map = collections.OrderedDict()
+ def discover(nodes):
+ for node in nodes:
+ if node in deps_map:
+ continue
+ deps = deps_func(node)
+ discover(deps)
+ deps_map[node] = deps
+
+ discover(top)
+ return list(deps_map)
+
+
+def _ComputePythonDependencies():
+ """Gets the paths of imported non-system python modules.
+
+ A path is assumed to be a "system" import if it is outside of chromium's
+ src/. The paths will be relative to the current directory.
+ """
+ _ForceLazyModulesToLoad()
+ module_paths = (m.__file__ for m in sys.modules.itervalues()
+ if m is not None and hasattr(m, '__file__'))
+ abs_module_paths = map(os.path.abspath, module_paths)
+
+ assert os.path.isabs(DIR_SOURCE_ROOT)
+ non_system_module_paths = [
+ p for p in abs_module_paths if p.startswith(DIR_SOURCE_ROOT)]
+ def ConvertPycToPy(s):
+ if s.endswith('.pyc'):
+ return s[:-1]
+ return s
+
+ non_system_module_paths = map(ConvertPycToPy, non_system_module_paths)
+ non_system_module_paths = map(os.path.relpath, non_system_module_paths)
+ return sorted(set(non_system_module_paths))
+
+
+def _ForceLazyModulesToLoad():
+ """Forces any lazily imported modules to fully load themselves.
+
+ Inspecting the modules' __file__ attribute causes lazily imported modules
+ (e.g. from email) to get fully imported and update sys.modules. Iterate
+ over the values until sys.modules stabilizes so that no modules are missed.
+ """
+ while True:
+ num_modules_before = len(sys.modules.keys())
+ for m in sys.modules.values():
+ if m is not None and hasattr(m, '__file__'):
+ _ = m.__file__
+ num_modules_after = len(sys.modules.keys())
+ if num_modules_before == num_modules_after:
+ break
+
+
+def AddDepfileOption(parser):
+ # TODO(agrieve): Get rid of this once we've moved to argparse.
+ if hasattr(parser, 'add_option'):
+ func = parser.add_option
+ else:
+ func = parser.add_argument
+ func('--depfile',
+ help='Path to depfile (refer to `gn help depfile`)')
+
+
+def WriteDepfile(depfile_path, first_gn_output, inputs=None, add_pydeps=True):
+ assert depfile_path != first_gn_output # http://crbug.com/646165
+ assert not isinstance(inputs, basestring) # Easy mistake to make
+ inputs = inputs or []
+ if add_pydeps:
+ inputs = _ComputePythonDependencies() + inputs
+ MakeDirectory(os.path.dirname(depfile_path))
+ # Ninja does not support multiple outputs in depfiles.
+ with open(depfile_path, 'w') as depfile:
+ depfile.write(first_gn_output.replace(' ', '\\ '))
+ depfile.write(': ')
+ depfile.write(' '.join(i.replace(' ', '\\ ') for i in inputs))
+ depfile.write('\n')
+
+
+def ExpandFileArgs(args):
+ """Replaces file-arg placeholders in args.
+
+ These placeholders have the form:
+ @FileArg(filename:key1:key2:...:keyn)
+
+ The value of such a placeholder is calculated by reading 'filename' as json.
+ And then extracting the value at [key1][key2]...[keyn].
+
+ Note: This intentionally does not return the list of files that appear in such
+ placeholders. An action that uses file-args *must* know the paths of those
+ files prior to the parsing of the arguments (typically by explicitly listing
+ them in the action's inputs in build files).
+ """
+ new_args = list(args)
+ file_jsons = dict()
+ r = re.compile('@FileArg\((.*?)\)')
+ for i, arg in enumerate(args):
+ match = r.search(arg)
+ if not match:
+ continue
+
+ lookup_path = match.group(1).split(':')
+ file_path = lookup_path[0]
+ if not file_path in file_jsons:
+ with open(file_path) as f:
+ file_jsons[file_path] = json.load(f)
+
+ expansion = file_jsons[file_path]
+ for k in lookup_path[1:]:
+ expansion = expansion[k]
+
+ # This should match ParseGnList. The output is either a GN-formatted list
+ # or a literal (with no quotes).
+ if isinstance(expansion, list):
+ new_args[i] = (arg[:match.start()] + gn_helpers.ToGNString(expansion) +
+ arg[match.end():])
+ else:
+ new_args[i] = arg[:match.start()] + str(expansion) + arg[match.end():]
+
+ return new_args
+
+
+def ReadSourcesList(sources_list_file_name):
+ """Reads a GN-written file containing list of file names and returns a list.
+
+ Note that this function should not be used to parse response files.
+ """
+ with open(sources_list_file_name) as f:
+ return [file_name.strip() for file_name in f]
+
+
+def CallAndWriteDepfileIfStale(function, options, record_path=None,
+ input_paths=None, input_strings=None,
+ output_paths=None, force=False,
+ pass_changes=False, depfile_deps=None,
+ add_pydeps=True):
+ """Wraps md5_check.CallAndRecordIfStale() and writes a depfile if applicable.
+
+ Depfiles are automatically added to output_paths when present in the |options|
+ argument. They are then created after |function| is called.
+
+ By default, only python dependencies are added to the depfile. If there are
+ other input paths that are not captured by GN deps, then they should be listed
+ in depfile_deps. It's important to write paths to the depfile that are already
+ captured by GN deps since GN args can cause GN deps to change, and such
+ changes are not immediately reflected in depfiles (http://crbug.com/589311).
+ """
+ if not output_paths:
+ raise Exception('At least one output_path must be specified.')
+ input_paths = list(input_paths or [])
+ input_strings = list(input_strings or [])
+ output_paths = list(output_paths or [])
+
+ python_deps = None
+ if hasattr(options, 'depfile') and options.depfile:
+ python_deps = _ComputePythonDependencies()
+ input_paths += python_deps
+ output_paths += [options.depfile]
+
+ def on_stale_md5(changes):
+ args = (changes,) if pass_changes else ()
+ function(*args)
+ if python_deps is not None:
+ all_depfile_deps = list(python_deps) if add_pydeps else []
+ if depfile_deps:
+ all_depfile_deps.extend(depfile_deps)
+ WriteDepfile(options.depfile, output_paths[0], all_depfile_deps,
+ add_pydeps=False)
+
+ md5_check.CallAndRecordIfStale(
+ on_stale_md5,
+ record_path=record_path,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ force=force,
+ pass_changes=True)
diff --git a/deps/v8/build/android/gyp/util/build_utils_test.py b/deps/v8/build/android/gyp/util/build_utils_test.py
new file mode 100755
index 0000000000..d462f0c676
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/build_utils_test.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import sys
+import unittest
+
+sys.path.insert(
+ 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import build_utils
+
+_DEPS = collections.OrderedDict()
+_DEPS['a'] = []
+_DEPS['b'] = []
+_DEPS['c'] = ['a']
+_DEPS['d'] = ['a']
+_DEPS['e'] = ['f']
+_DEPS['f'] = ['a', 'd']
+_DEPS['g'] = []
+_DEPS['h'] = ['d', 'b', 'f']
+_DEPS['i'] = ['f']
+
+
+class BuildUtilsTest(unittest.TestCase):
+ def testGetSortedTransitiveDependencies_all(self):
+ TOP = _DEPS.keys()
+ EXPECTED = ['a', 'b', 'c', 'd', 'f', 'e', 'g', 'h', 'i']
+ actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+ self.assertEqual(EXPECTED, actual)
+
+ def testGetSortedTransitiveDependencies_leaves(self):
+ TOP = ['c', 'e', 'g', 'h', 'i']
+ EXPECTED = ['a', 'c', 'd', 'f', 'e', 'g', 'b', 'h', 'i']
+ actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+ self.assertEqual(EXPECTED, actual)
+
+ def testGetSortedTransitiveDependencies_leavesReverse(self):
+ TOP = ['i', 'h', 'g', 'e', 'c']
+ EXPECTED = ['a', 'd', 'f', 'i', 'b', 'h', 'g', 'e', 'c']
+ actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+ self.assertEqual(EXPECTED, actual)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/gyp/util/diff_utils.py b/deps/v8/build/android/gyp/util/diff_utils.py
new file mode 100755
index 0000000000..b20dc27df2
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/diff_utils.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+import difflib
+from util import build_utils
+
+
+def DiffFileContents(expected_path, actual_path):
+ """Check file contents for equality and return the diff or None."""
+ with open(expected_path) as f_expected, open(actual_path) as f_actual:
+ expected_lines = f_expected.readlines()
+ actual_lines = f_actual.readlines()
+
+ if expected_lines == actual_lines:
+ return None
+
+ expected_path = os.path.relpath(expected_path, build_utils.DIR_SOURCE_ROOT)
+ actual_path = os.path.relpath(actual_path, build_utils.DIR_SOURCE_ROOT)
+
+ diff = difflib.unified_diff(
+ expected_lines,
+ actual_lines,
+ fromfile=os.path.join('before', expected_path),
+ tofile=os.path.join('after', expected_path),
+ n=0)
+
+ # Space added before "patch" so that giant command is not put in bash history.
+ return """\
+Files Compared:
+ * {}
+ * {}
+
+To update the file, run:
+########### START ###########
+ patch -p1 <<'END_DIFF'
+{}
+END_DIFF
+############ END ############
+""".format(expected_path, actual_path, ''.join(diff).rstrip())
diff --git a/deps/v8/build/android/gyp/util/jar_info_utils.py b/deps/v8/build/android/gyp/util/jar_info_utils.py
new file mode 100644
index 0000000000..677e4e4261
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/jar_info_utils.py
@@ -0,0 +1,51 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+# Utilities to read and write .jar.info files.
+#
+# A .jar.info file contains a simple mapping from fully-qualified Java class
+# names to the source file that actually defines it.
+#
+# For APKs, the .jar.info maps the class names to the .jar file that which
+# contains its .class definition instead.
+
+
+def ParseJarInfoFile(info_path):
+ """Parse a given .jar.info file as a dictionary.
+
+ Args:
+ info_path: input .jar.info file path.
+ Returns:
+ A new dictionary mapping fully-qualified Java class names to file paths.
+ """
+ info_data = dict()
+ if os.path.exists(info_path):
+ with open(info_path, 'r') as info_file:
+ for line in info_file:
+ line = line.strip()
+ if line:
+ fully_qualified_name, path = line.split(',', 1)
+ info_data[fully_qualified_name] = path
+ return info_data
+
+
+def WriteJarInfoFile(output_obj, info_data, source_file_map=None):
+ """Generate a .jar.info file from a given dictionary.
+
+ Args:
+ output_obj: output file object.
+ info_data: a mapping of fully qualified Java class names to filepaths.
+ source_file_map: an optional mapping from java source file paths to the
+ corresponding source .srcjar. This is because info_data may contain the
+ path of Java source files that where extracted from an .srcjar into a
+ temporary location.
+ """
+ for fully_qualified_name, path in sorted(info_data.iteritems()):
+ if source_file_map and path in source_file_map:
+ path = source_file_map[path]
+ assert not path.startswith('/tmp'), (
+ 'Java file path should not be in temp dir: {}'.format(path))
+ output_obj.write('{},{}\n'.format(fully_qualified_name, path))
diff --git a/deps/v8/build/android/gyp/util/java_cpp_utils.py b/deps/v8/build/android/gyp/util/java_cpp_utils.py
new file mode 100755
index 0000000000..0b9748657e
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/java_cpp_utils.py
@@ -0,0 +1,32 @@
+#!/user/bin/env python
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import sys
+
+
+def GetScriptName():
+ return os.path.basename(os.path.abspath(sys.argv[0]))
+
+
+def KCamelToShouty(s):
+ """Convert |s| from kCamelCase or CamelCase to SHOUTY_CASE.
+
+ kFooBar -> FOO_BAR
+ FooBar -> FOO_BAR
+ FooBAR9 -> FOO_BAR9
+ FooBARBaz -> FOO_BAR_BAZ
+ """
+ if not re.match(r'^k?([A-Z][^A-Z]+|[A-Z0-9]+)+$', s):
+ return s
+ # Strip the leading k.
+ s = re.sub(r'^k', '', s)
+ # Add _ between title words and anything else.
+ s = re.sub(r'([^_])([A-Z][^A-Z_0-9]+)', r'\1_\2', s)
+ # Add _ between lower -> upper transitions.
+ s = re.sub(r'([^A-Z_0-9])([A-Z])', r'\1_\2', s)
+ return s.upper()
diff --git a/deps/v8/build/android/gyp/util/md5_check.py b/deps/v8/build/android/gyp/util/md5_check.py
new file mode 100644
index 0000000000..9a15ee6e75
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/md5_check.py
@@ -0,0 +1,420 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import difflib
+import hashlib
+import itertools
+import json
+import os
+import sys
+import zipfile
+
+
+# When set and a difference is detected, a diff of what changed is printed.
+PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0))
+
+# An escape hatch that causes all targets to be rebuilt.
+_FORCE_REBUILD = int(os.environ.get('FORCE_REBUILD', 0))
+
+
+def CallAndRecordIfStale(
+ function, record_path=None, input_paths=None, input_strings=None,
+ output_paths=None, force=False, pass_changes=False):
+ """Calls function if outputs are stale.
+
+ Outputs are considered stale if:
+ - any output_paths are missing, or
+ - the contents of any file within input_paths has changed, or
+ - the contents of input_strings has changed.
+
+ To debug which files are out-of-date, set the environment variable:
+ PRINT_MD5_DIFFS=1
+
+ Args:
+ function: The function to call.
+ record_path: Path to record metadata.
+ Defaults to output_paths[0] + '.md5.stamp'
+ input_paths: List of paths to calcualte an md5 sum on.
+ input_strings: List of strings to record verbatim.
+ output_paths: List of output paths.
+ force: Whether to treat outputs as missing regardless of whether they
+ actually are.
+ pass_changes: Whether to pass a Changes instance to |function|.
+ """
+ assert record_path or output_paths
+ input_paths = input_paths or []
+ input_strings = input_strings or []
+ output_paths = output_paths or []
+ record_path = record_path or output_paths[0] + '.md5.stamp'
+
+ assert record_path.endswith('.stamp'), (
+ 'record paths must end in \'.stamp\' so that they are easy to find '
+ 'and delete')
+
+ new_metadata = _Metadata(track_entries=pass_changes or PRINT_EXPLANATIONS)
+ new_metadata.AddStrings(input_strings)
+
+ for path in input_paths:
+ if _IsZipFile(path):
+ entries = _ExtractZipEntries(path)
+ new_metadata.AddZipFile(path, entries)
+ else:
+ new_metadata.AddFile(path, _Md5ForPath(path))
+
+ old_metadata = None
+ force = force or _FORCE_REBUILD
+ missing_outputs = [x for x in output_paths if force or not os.path.exists(x)]
+ # When outputs are missing, don't bother gathering change information.
+ if not missing_outputs and os.path.exists(record_path):
+ with open(record_path, 'r') as jsonfile:
+ try:
+ old_metadata = _Metadata.FromFile(jsonfile)
+ except: # pylint: disable=bare-except
+ pass # Not yet using new file format.
+
+ changes = Changes(old_metadata, new_metadata, force, missing_outputs)
+ if not changes.HasChanges():
+ return
+
+ if PRINT_EXPLANATIONS:
+ print('=' * 80)
+ print('Target is stale: %s' % record_path)
+ print(changes.DescribeDifference())
+ print('=' * 80)
+
+ args = (changes,) if pass_changes else ()
+ function(*args)
+
+ with open(record_path, 'w') as f:
+ new_metadata.ToFile(f)
+
+
+class Changes(object):
+ """Provides and API for querying what changed between runs."""
+
+ def __init__(self, old_metadata, new_metadata, force, missing_outputs):
+ self.old_metadata = old_metadata
+ self.new_metadata = new_metadata
+ self.force = force
+ self.missing_outputs = missing_outputs
+
+ def _GetOldTag(self, path, subpath=None):
+ return self.old_metadata and self.old_metadata.GetTag(path, subpath)
+
+ def HasChanges(self):
+ """Returns whether any changes exist."""
+ return (self.force or
+ not self.old_metadata or
+ self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5() or
+ self.old_metadata.FilesMd5() != self.new_metadata.FilesMd5())
+
+ def AddedOrModifiedOnly(self):
+ """Returns whether the only changes were from added or modified (sub)files.
+
+ No missing outputs, no removed paths/subpaths.
+ """
+ if (self.force or
+ not self.old_metadata or
+ self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5()):
+ return False
+ if any(self.IterRemovedPaths()):
+ return False
+ for path in self.IterModifiedPaths():
+ if any(self.IterRemovedSubpaths(path)):
+ return False
+ return True
+
+ def IterAllPaths(self):
+ """Generator for paths."""
+ return self.new_metadata.IterPaths();
+
+ def IterAllSubpaths(self, path):
+ """Generator for subpaths."""
+ return self.new_metadata.IterSubpaths(path);
+
+ def IterAddedPaths(self):
+ """Generator for paths that were added."""
+ for path in self.new_metadata.IterPaths():
+ if self._GetOldTag(path) is None:
+ yield path
+
+ def IterAddedSubpaths(self, path):
+ """Generator for paths that were added within the given zip file."""
+ for subpath in self.new_metadata.IterSubpaths(path):
+ if self._GetOldTag(path, subpath) is None:
+ yield subpath
+
+ def IterRemovedPaths(self):
+ """Generator for paths that were removed."""
+ if self.old_metadata:
+ for path in self.old_metadata.IterPaths():
+ if self.new_metadata.GetTag(path) is None:
+ yield path
+
+ def IterRemovedSubpaths(self, path):
+ """Generator for paths that were removed within the given zip file."""
+ if self.old_metadata:
+ for subpath in self.old_metadata.IterSubpaths(path):
+ if self.new_metadata.GetTag(path, subpath) is None:
+ yield subpath
+
+ def IterModifiedPaths(self):
+ """Generator for paths whose contents have changed."""
+ for path in self.new_metadata.IterPaths():
+ old_tag = self._GetOldTag(path)
+ new_tag = self.new_metadata.GetTag(path)
+ if old_tag is not None and old_tag != new_tag:
+ yield path
+
+ def IterModifiedSubpaths(self, path):
+ """Generator for paths within a zip file whose contents have changed."""
+ for subpath in self.new_metadata.IterSubpaths(path):
+ old_tag = self._GetOldTag(path, subpath)
+ new_tag = self.new_metadata.GetTag(path, subpath)
+ if old_tag is not None and old_tag != new_tag:
+ yield subpath
+
+ def IterChangedPaths(self):
+ """Generator for all changed paths (added/removed/modified)."""
+ return itertools.chain(self.IterRemovedPaths(),
+ self.IterModifiedPaths(),
+ self.IterAddedPaths())
+
+ def IterChangedSubpaths(self, path):
+ """Generator for paths within a zip that were added/removed/modified."""
+ return itertools.chain(self.IterRemovedSubpaths(path),
+ self.IterModifiedSubpaths(path),
+ self.IterAddedSubpaths(path))
+
+ def DescribeDifference(self):
+ """Returns a human-readable description of what changed."""
+ if self.force:
+ return 'force=True'
+ elif self.missing_outputs:
+ return 'Outputs do not exist:\n ' + '\n '.join(self.missing_outputs)
+ elif self.old_metadata is None:
+ return 'Previous stamp file not found.'
+
+ if self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5():
+ ndiff = difflib.ndiff(self.old_metadata.GetStrings(),
+ self.new_metadata.GetStrings())
+ changed = [s for s in ndiff if not s.startswith(' ')]
+ return 'Input strings changed:\n ' + '\n '.join(changed)
+
+ if self.old_metadata.FilesMd5() == self.new_metadata.FilesMd5():
+ return "There's no difference."
+
+ lines = []
+ lines.extend('Added: ' + p for p in self.IterAddedPaths())
+ lines.extend('Removed: ' + p for p in self.IterRemovedPaths())
+ for path in self.IterModifiedPaths():
+ lines.append('Modified: ' + path)
+ lines.extend(' -> Subpath added: ' + p
+ for p in self.IterAddedSubpaths(path))
+ lines.extend(' -> Subpath removed: ' + p
+ for p in self.IterRemovedSubpaths(path))
+ lines.extend(' -> Subpath modified: ' + p
+ for p in self.IterModifiedSubpaths(path))
+ if lines:
+ return 'Input files changed:\n ' + '\n '.join(lines)
+ return 'I have no idea what changed (there is a bug).'
+
+
+class _Metadata(object):
+ """Data model for tracking change metadata.
+
+ Args:
+ track_entries: Enables per-file change tracking. Slower, but required for
+ Changes functionality.
+ """
+ # Schema:
+ # {
+ # "files-md5": "VALUE",
+ # "strings-md5": "VALUE",
+ # "input-files": [
+ # {
+ # "path": "path.jar",
+ # "tag": "{MD5 of entries}",
+ # "entries": [
+ # { "path": "org/chromium/base/Foo.class", "tag": "{CRC32}" }, ...
+ # ]
+ # }, {
+ # "path": "path.txt",
+ # "tag": "{MD5}",
+ # }
+ # ],
+ # "input-strings": ["a", "b", ...],
+ # }
+ def __init__(self, track_entries=False):
+ self._track_entries = track_entries
+ self._files_md5 = None
+ self._strings_md5 = None
+ self._files = []
+ self._strings = []
+ # Map of (path, subpath) -> entry. Created upon first call to _GetEntry().
+ self._file_map = None
+
+ @classmethod
+ def FromFile(cls, fileobj):
+ """Returns a _Metadata initialized from a file object."""
+ ret = cls()
+ obj = json.load(fileobj)
+ ret._files_md5 = obj['files-md5']
+ ret._strings_md5 = obj['strings-md5']
+ ret._files = obj.get('input-files', [])
+ ret._strings = obj.get('input-strings', [])
+ return ret
+
+ def ToFile(self, fileobj):
+ """Serializes metadata to the given file object."""
+ obj = {
+ 'files-md5': self.FilesMd5(),
+ 'strings-md5': self.StringsMd5(),
+ }
+ if self._track_entries:
+ obj['input-files'] = sorted(self._files, key=lambda e: e['path'])
+ obj['input-strings'] = self._strings
+
+ json.dump(obj, fileobj, indent=2)
+
+ def _AssertNotQueried(self):
+ assert self._files_md5 is None
+ assert self._strings_md5 is None
+ assert self._file_map is None
+
+ def AddStrings(self, values):
+ self._AssertNotQueried()
+ self._strings.extend(str(v) for v in values)
+
+ def AddFile(self, path, tag):
+ """Adds metadata for a non-zip file.
+
+ Args:
+ path: Path to the file.
+ tag: A short string representative of the file contents.
+ """
+ self._AssertNotQueried()
+ self._files.append({
+ 'path': path,
+ 'tag': tag,
+ })
+
+ def AddZipFile(self, path, entries):
+ """Adds metadata for a zip file.
+
+ Args:
+ path: Path to the file.
+ entries: List of (subpath, tag) tuples for entries within the zip.
+ """
+ self._AssertNotQueried()
+ tag = _ComputeInlineMd5(itertools.chain((e[0] for e in entries),
+ (e[1] for e in entries)))
+ self._files.append({
+ 'path': path,
+ 'tag': tag,
+ 'entries': [{"path": e[0], "tag": e[1]} for e in entries],
+ })
+
+ def GetStrings(self):
+ """Returns the list of input strings."""
+ return self._strings
+
+ def FilesMd5(self):
+ """Lazily computes and returns the aggregate md5 of input files."""
+ if self._files_md5 is None:
+ # Omit paths from md5 since temporary files have random names.
+ self._files_md5 = _ComputeInlineMd5(
+ self.GetTag(p) for p in sorted(self.IterPaths()))
+ return self._files_md5
+
+ def StringsMd5(self):
+ """Lazily computes and returns the aggregate md5 of input strings."""
+ if self._strings_md5 is None:
+ self._strings_md5 = _ComputeInlineMd5(self._strings)
+ return self._strings_md5
+
+ def _GetEntry(self, path, subpath=None):
+ """Returns the JSON entry for the given path / subpath."""
+ if self._file_map is None:
+ self._file_map = {}
+ for entry in self._files:
+ self._file_map[(entry['path'], None)] = entry
+ for subentry in entry.get('entries', ()):
+ self._file_map[(entry['path'], subentry['path'])] = subentry
+ return self._file_map.get((path, subpath))
+
+ def GetTag(self, path, subpath=None):
+ """Returns the tag for the given path / subpath."""
+ ret = self._GetEntry(path, subpath)
+ return ret and ret['tag']
+
+ def IterPaths(self):
+ """Returns a generator for all top-level paths."""
+ return (e['path'] for e in self._files)
+
+ def IterSubpaths(self, path):
+ """Returns a generator for all subpaths in the given zip.
+
+ If the given path is not a zip file or doesn't exist, returns an empty
+ iterable.
+ """
+ outer_entry = self._GetEntry(path)
+ if not outer_entry:
+ return ()
+ subentries = outer_entry.get('entries', [])
+ return (entry['path'] for entry in subentries)
+
+
+def _UpdateMd5ForFile(md5, path, block_size=2**16):
+ with open(path, 'rb') as infile:
+ while True:
+ data = infile.read(block_size)
+ if not data:
+ break
+ md5.update(data)
+
+
+def _UpdateMd5ForDirectory(md5, dir_path):
+ for root, _, files in os.walk(dir_path):
+ for f in files:
+ _UpdateMd5ForFile(md5, os.path.join(root, f))
+
+
+def _Md5ForPath(path):
+ md5 = hashlib.md5()
+ if os.path.isdir(path):
+ _UpdateMd5ForDirectory(md5, path)
+ else:
+ _UpdateMd5ForFile(md5, path)
+ return md5.hexdigest()
+
+
+def _ComputeInlineMd5(iterable):
+ """Computes the md5 of the concatenated parameters."""
+ md5 = hashlib.md5()
+ for item in iterable:
+ md5.update(str(item))
+ return md5.hexdigest()
+
+
+def _IsZipFile(path):
+ """Returns whether to treat the given file as a zip file."""
+ # ijar doesn't set the CRC32 field.
+ if path.endswith('.interface.jar'):
+ return False
+ return path[-4:] in ('.zip', '.apk', '.jar') or path.endswith('.srcjar')
+
+
+def _ExtractZipEntries(path):
+ """Returns a list of (path, CRC32) of all files within |path|."""
+ entries = []
+ with zipfile.ZipFile(path) as zip_file:
+ for zip_info in zip_file.infolist():
+ # Skip directories and empty files.
+ if zip_info.CRC:
+ entries.append(
+ (zip_info.filename, zip_info.CRC + zip_info.compress_type))
+ return entries
diff --git a/deps/v8/build/android/gyp/util/md5_check_test.py b/deps/v8/build/android/gyp/util/md5_check_test.py
new file mode 100755
index 0000000000..41e9d3c248
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/md5_check_test.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import os
+import sys
+import tempfile
+import unittest
+import zipfile
+
+sys.path.insert(
+ 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import md5_check
+
+
+def _WriteZipFile(path, entries):
+ with zipfile.ZipFile(path, 'w') as zip_file:
+ for subpath, data in entries:
+ zip_file.writestr(subpath, data)
+
+
+class TestMd5Check(unittest.TestCase):
+ def setUp(self):
+ self.called = False
+ self.changes = None
+
+ def testCallAndRecordIfStale(self):
+ input_strings = ['string1', 'string2']
+ input_file1 = tempfile.NamedTemporaryFile(suffix='.txt')
+ input_file2 = tempfile.NamedTemporaryFile(suffix='.zip')
+ file1_contents = b'input file 1'
+ input_file1.write(file1_contents)
+ input_file1.flush()
+ # Test out empty zip file to start.
+ _WriteZipFile(input_file2.name, [])
+ input_files = [input_file1.name, input_file2.name]
+
+ record_path = tempfile.NamedTemporaryFile(suffix='.stamp')
+
+ def CheckCallAndRecord(should_call, message, force=False,
+ outputs_specified=False, outputs_missing=False,
+ expected_changes=None, added_or_modified_only=None):
+ output_paths = None
+ if outputs_specified:
+ output_file1 = tempfile.NamedTemporaryFile()
+ if outputs_missing:
+ output_file1.close() # Gets deleted on close().
+ output_paths = [output_file1.name]
+
+ self.called = False
+ self.changes = None
+ if expected_changes or added_or_modified_only is not None:
+ def MarkCalled(changes):
+ self.called = True
+ self.changes = changes
+ else:
+ def MarkCalled():
+ self.called = True
+
+ md5_check.CallAndRecordIfStale(
+ MarkCalled,
+ record_path=record_path.name,
+ input_paths=input_files,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ force=force,
+ pass_changes=(expected_changes or added_or_modified_only) is not None)
+ self.assertEqual(should_call, self.called, message)
+ if expected_changes:
+ description = self.changes.DescribeDifference()
+ self.assertTrue(fnmatch.fnmatch(description, expected_changes),
+ 'Expected %s to match %s' % (
+ repr(description), repr(expected_changes)))
+ if should_call and added_or_modified_only is not None:
+ self.assertEqual(added_or_modified_only,
+ self.changes.AddedOrModifiedOnly())
+
+ CheckCallAndRecord(True, 'should call when record doesn\'t exist',
+ expected_changes='Previous stamp file not found.',
+ added_or_modified_only=False)
+ CheckCallAndRecord(False, 'should not call when nothing changed')
+ CheckCallAndRecord(False, 'should not call when nothing changed #2',
+ outputs_specified=True, outputs_missing=False)
+ CheckCallAndRecord(True, 'should call when output missing',
+ outputs_specified=True, outputs_missing=True,
+ expected_changes='Outputs do not exist:*',
+ added_or_modified_only=False)
+ CheckCallAndRecord(True, force=True, message='should call when forced',
+ expected_changes='force=True',
+ added_or_modified_only=False)
+
+ input_file1.write('some more input')
+ input_file1.flush()
+ CheckCallAndRecord(True, 'changed input file should trigger call',
+ expected_changes='*Modified: %s' % input_file1.name,
+ added_or_modified_only=True)
+
+ input_files = input_files[::-1]
+ CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call')
+
+ input_files = input_files[:1]
+ CheckCallAndRecord(True, 'removing file should trigger call',
+ expected_changes='*Removed: %s' % input_file1.name,
+ added_or_modified_only=False)
+
+ input_files.append(input_file1.name)
+ CheckCallAndRecord(True, 'added input file should trigger call',
+ expected_changes='*Added: %s' % input_file1.name,
+ added_or_modified_only=True)
+
+ input_strings[0] = input_strings[0] + ' a bit longer'
+ CheckCallAndRecord(True, 'changed input string should trigger call',
+ expected_changes='*Input strings changed*',
+ added_or_modified_only=False)
+
+ input_strings = input_strings[::-1]
+ CheckCallAndRecord(True, 'reordering of string inputs should trigger call',
+ expected_changes='*Input strings changed*')
+
+ input_strings = input_strings[:1]
+ CheckCallAndRecord(True, 'removing a string should trigger call')
+
+ input_strings.append('a brand new string')
+ CheckCallAndRecord(
+ True,
+ 'added input string should trigger call',
+ added_or_modified_only=False)
+
+ _WriteZipFile(input_file2.name, [('path/1.txt', '1')])
+ CheckCallAndRecord(True, 'added subpath should trigger call',
+ expected_changes='*Modified: %s*Subpath added: %s' % (
+ input_file2.name, 'path/1.txt'),
+ added_or_modified_only=True)
+ _WriteZipFile(input_file2.name, [('path/1.txt', '2')])
+ CheckCallAndRecord(True, 'changed subpath should trigger call',
+ expected_changes='*Modified: %s*Subpath modified: %s' % (
+ input_file2.name, 'path/1.txt'),
+ added_or_modified_only=True)
+ CheckCallAndRecord(False, 'should not call when nothing changed')
+
+ _WriteZipFile(input_file2.name, [])
+ CheckCallAndRecord(True, 'removed subpath should trigger call',
+ expected_changes='*Modified: %s*Subpath removed: %s' % (
+ input_file2.name, 'path/1.txt'),
+ added_or_modified_only=False)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/gyp/util/proguard_util.py b/deps/v8/build/android/gyp/util/proguard_util.py
new file mode 100644
index 0000000000..c0fba206dc
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/proguard_util.py
@@ -0,0 +1,236 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+from util import build_utils
+
+
+class ProguardOutputFilter(object):
+ """ProGuard outputs boring stuff to stdout (proguard version, jar path, etc)
+ as well as interesting stuff (notes, warnings, etc). If stdout is entirely
+ boring, this class suppresses the output.
+ """
+
+ IGNORE_RE = re.compile(
+ r'Pro.*version|Note:|Reading|Preparing|Printing|ProgramClass:|Searching|'
+ r'jar \[|\d+ class path entries checked')
+
+ def __init__(self):
+ self._last_line_ignored = False
+ self._ignore_next_line = False
+
+ def __call__(self, output):
+ ret = []
+ for line in output.splitlines(True):
+ if self._ignore_next_line:
+ self._ignore_next_line = False
+ continue
+
+ if '***BINARY RUN STATS***' in line:
+ self._last_line_ignored = True
+ self._ignore_next_line = True
+ elif not line.startswith(' '):
+ self._last_line_ignored = bool(self.IGNORE_RE.match(line))
+ elif 'You should check if you need to specify' in line:
+ self._last_line_ignored = True
+
+ if not self._last_line_ignored:
+ ret.append(line)
+ return ''.join(ret)
+
+
+class ProguardCmdBuilder(object):
+ def __init__(self, proguard_jar):
+ assert os.path.exists(proguard_jar)
+ self._proguard_jar_path = proguard_jar
+ self._mapping = None
+ self._libraries = None
+ self._injars = None
+ self._configs = None
+ self._config_exclusions = None
+ self._outjar = None
+ self._mapping_output = None
+ self._verbose = False
+ self._min_api = None
+ self._disabled_optimizations = []
+
+ def outjar(self, path):
+ assert self._outjar is None
+ self._outjar = path
+
+ def mapping_output(self, path):
+ assert self._mapping_output is None
+ self._mapping_output = path
+
+ def mapping(self, path):
+ assert self._mapping is None
+ assert os.path.exists(path), path
+ self._mapping = path
+
+ def libraryjars(self, paths):
+ assert self._libraries is None
+ for p in paths:
+ assert os.path.exists(p), p
+ self._libraries = paths
+
+ def injars(self, paths):
+ assert self._injars is None
+ for p in paths:
+ assert os.path.exists(p), p
+ self._injars = paths
+
+ def configs(self, paths):
+ assert self._configs is None
+ self._configs = paths
+ for p in self._configs:
+ assert os.path.exists(p), p
+
+ def config_exclusions(self, paths):
+ assert self._config_exclusions is None
+ self._config_exclusions = paths
+
+ def verbose(self, verbose):
+ self._verbose = verbose
+
+ def min_api(self, min_api):
+ assert self._min_api is None
+ self._min_api = min_api
+
+ def disable_optimizations(self, optimizations):
+ self._disabled_optimizations += optimizations
+
+ def build(self):
+ assert self._injars is not None
+ assert self._outjar is not None
+ assert self._configs is not None
+ cmd = [
+ 'java', '-jar', self._proguard_jar_path,
+ '-forceprocessing',
+ ]
+
+ if self._mapping:
+ cmd += ['-applymapping', self._mapping]
+
+ if self._libraries:
+ cmd += ['-libraryjars', ':'.join(self._libraries)]
+
+ if self._min_api:
+ cmd += [
+ '-assumevalues class android.os.Build$VERSION {' +
+ ' public static final int SDK_INT return ' + self._min_api +
+ '..9999; }'
+ ]
+
+ for optimization in self._disabled_optimizations:
+ cmd += [ '-optimizations', '!' + optimization ]
+
+ # Filter to just .class files to avoid warnings about multiple inputs having
+ # the same files in META_INF/.
+ cmd += [
+ '-injars',
+ ':'.join('{}(**.class)'.format(x) for x in self._injars)
+ ]
+
+ for config_file in self.GetConfigs():
+ cmd += ['-include', config_file]
+
+ # The output jar must be specified after inputs.
+ cmd += [
+ '-outjars', self._outjar,
+ '-printseeds', self._outjar + '.seeds',
+ '-printusage', self._outjar + '.usage',
+ '-printmapping', self._mapping_output,
+ ]
+
+ if self._verbose:
+ cmd.append('-verbose')
+
+ return cmd
+
+ def GetDepfileDeps(self):
+ # The list of inputs that the GN target does not directly know about.
+ inputs = self._configs + self._injars
+ if self._libraries:
+ inputs += self._libraries
+ return inputs
+
+ def GetConfigs(self):
+ ret = list(self._configs)
+ for path in self._config_exclusions:
+ ret.remove(path)
+ return ret
+
+ def GetInputs(self):
+ inputs = self.GetDepfileDeps()
+ inputs += [self._proguard_jar_path]
+ if self._mapping:
+ inputs.append(self._mapping)
+ return inputs
+
+ def GetOutputs(self):
+ return [
+ self._outjar,
+ self._outjar + '.flags',
+ self._mapping_output,
+ self._outjar + '.seeds',
+ self._outjar + '.usage',
+ ]
+
+ def _WriteFlagsFile(self, cmd, out):
+ # Quite useful for auditing proguard flags.
+ WriteFlagsFile(self._configs, out)
+ out.write('#' * 80 + '\n')
+ out.write('# Command-line\n')
+ out.write('#' * 80 + '\n')
+ out.write('# ' + ' '.join(cmd) + '\n')
+
+ def CheckOutput(self):
+ cmd = self.build()
+
+ # There are a couple scenarios (.mapping files and switching from no
+ # proguard -> proguard) where GN's copy() target is used on output
+ # paths. These create hardlinks, so we explicitly unlink here to avoid
+ # updating files with multiple links.
+ for path in self.GetOutputs():
+ if os.path.exists(path):
+ os.unlink(path)
+
+ with open(self._outjar + '.flags', 'w') as out:
+ self._WriteFlagsFile(cmd, out)
+
+ # Warning: and Error: are sent to stderr, but messages and Note: are sent
+ # to stdout.
+ stdout_filter = None
+ stderr_filter = None
+ if not self._verbose:
+ stdout_filter = ProguardOutputFilter()
+ stderr_filter = ProguardOutputFilter()
+ build_utils.CheckOutput(cmd, print_stdout=True,
+ print_stderr=True,
+ stdout_filter=stdout_filter,
+ stderr_filter=stderr_filter)
+
+ # Proguard will skip writing -printseeds / -printusage / -printmapping if
+ # the files would be empty, but ninja needs all outputs to exist.
+ open(self._outjar + '.seeds', 'a').close()
+ open(self._outjar + '.usage', 'a').close()
+ open(self._outjar + '.mapping', 'a').close()
+
+
+def WriteFlagsFile(configs, out, exclude_generated=False):
+ for config in sorted(configs):
+ if exclude_generated and config.endswith('.resources.proguard.txt'):
+ continue
+
+ out.write('#' * 80 + '\n')
+ out.write('# ' + config + '\n')
+ out.write('#' * 80 + '\n')
+ with open(config) as config_file:
+ contents = config_file.read().rstrip()
+ # Remove numbers from generated rule comments to make file more
+ # diff'able.
+ contents = re.sub(r' #generated:\d+', '', contents)
+ out.write(contents)
+ out.write('\n\n')
diff --git a/deps/v8/build/android/gyp/util/resource_utils.py b/deps/v8/build/android/gyp/util/resource_utils.py
new file mode 100644
index 0000000000..61a4f3c238
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/resource_utils.py
@@ -0,0 +1,834 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import contextlib
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+from xml.etree import ElementTree
+
+import util.build_utils as build_utils
+
+_SOURCE_ROOT = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', '..', '..', '..'))
+# Import jinja2 from third_party/jinja2
+sys.path.insert(1, os.path.join(_SOURCE_ROOT, 'third_party'))
+from jinja2 import Template # pylint: disable=F0401
+
+
+EMPTY_ANDROID_MANIFEST_PATH = os.path.join(
+ _SOURCE_ROOT, 'build', 'android', 'AndroidManifest.xml')
+
+ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android'
+TOOLS_NAMESPACE = 'http://schemas.android.com/tools'
+
+# A variation of these maps also exists in:
+# //base/android/java/src/org/chromium/base/LocaleUtils.java
+# //ui/android/java/src/org/chromium/base/LocalizationUtils.java
+_CHROME_TO_ANDROID_LOCALE_MAP = {
+ 'es-419': 'es-rUS',
+ 'fil': 'tl',
+ 'he': 'iw',
+ 'id': 'in',
+ 'yi': 'ji',
+}
+_ANDROID_TO_CHROMIUM_LANGUAGE_MAP = {
+ 'tl': 'fil',
+ 'iw': 'he',
+ 'in': 'id',
+ 'ji': 'yi',
+ 'no': 'nb', # 'no' is not a real language. http://crbug.com/920960
+}
+
+
+_xml_namespace_initialized = False
+
+
+def ToAndroidLocaleName(chromium_locale):
+ """Convert an Chromium locale name into a corresponding Android one."""
+ # First handle the special cases, these are needed to deal with Android
+ # releases *before* 5.0/Lollipop.
+ android_locale = _CHROME_TO_ANDROID_LOCALE_MAP.get(chromium_locale)
+ if android_locale:
+ return android_locale
+
+ # Format of Chromium locale name is '<lang>' or '<lang>-<region>'
+ # where <lang> is a 2 or 3 letter language code (ISO 639-1 or 639-2)
+ # and region is a capitalized locale region name.
+ lang, _, region = chromium_locale.partition('-')
+ if not region:
+ return lang
+
+ # Translate newer language tags into obsolete ones. Only necessary if
+ # region is not None (e.g. 'he-IL' -> 'iw-rIL')
+ lang = _CHROME_TO_ANDROID_LOCALE_MAP.get(lang, lang)
+
+ # Using '<lang>-r<region>' is now acceptable as a locale name for all
+ # versions of Android.
+ return '%s-r%s' % (lang, region)
+
+
+# ISO 639 language code + optional ("-r" + capitalized region code).
+# Note that before Android 5.0/Lollipop, only 2-letter ISO 639-1 codes
+# are supported.
+_RE_ANDROID_LOCALE_QUALIFIER_1 = re.compile(r'^([a-z]{2,3})(\-r([A-Z]+))?$')
+
+# Starting with Android 7.0/Nougat, BCP 47 codes are supported but must
+# be prefixed with 'b+', and may include optional tags. e.g. 'b+en+US',
+# 'b+ja+Latn', 'b+ja+JP+Latn'
+_RE_ANDROID_LOCALE_QUALIFIER_2 = re.compile(r'^b\+([a-z]{2,3})(\+.+)?$')
+
+# Matches an all-uppercase region name.
+_RE_ALL_UPPERCASE = re.compile(r'^[A-Z]+$')
+
+
+def ToChromiumLocaleName(android_locale):
+ """Convert an Android locale name into a Chromium one."""
+ lang = None
+ region = None
+ m = _RE_ANDROID_LOCALE_QUALIFIER_1.match(android_locale)
+ if m:
+ lang = m.group(1)
+ if m.group(2):
+ region = m.group(3)
+ else:
+ m = _RE_ANDROID_LOCALE_QUALIFIER_2.match(android_locale)
+ if m:
+ lang = m.group(1)
+ if m.group(2):
+ tags = m.group(2).split('+')
+ # First all-uppercase tag is a region. This deals with cases where
+ # a special tag is placed before it (e.g. 'cmn+Hant-TW')
+ for tag in tags:
+ if _RE_ALL_UPPERCASE.match(tag):
+ region = tag
+ break
+
+ if not lang:
+ return None
+
+ # Special case for es-rUS -> es-419
+ if lang == 'es' and region == 'US':
+ return 'es-419'
+
+ lang = _ANDROID_TO_CHROMIUM_LANGUAGE_MAP.get(lang, lang)
+ if not region:
+ return lang
+
+ return '%s-%s' % (lang, region)
+
+
+def IsAndroidLocaleQualifier(string):
+ """Returns true if |string| is a valid Android resource locale qualifier."""
+ return (_RE_ANDROID_LOCALE_QUALIFIER_1.match(string)
+ or _RE_ANDROID_LOCALE_QUALIFIER_2.match(string))
+
+
+def FindLocaleInStringResourceFilePath(file_path):
+ """Return Android locale name of a string resource file path.
+
+ Args:
+ file_path: A file path.
+ Returns:
+ If |file_path| is of the format '.../values-<locale>/<name>.xml', return
+ the value of <locale> (and Android locale qualifier). Otherwise return None.
+ """
+ if not file_path.endswith('.xml'):
+ return None
+ prefix = 'values-'
+ dir_name = os.path.basename(os.path.dirname(file_path))
+ if not dir_name.startswith(prefix):
+ return None
+ qualifier = dir_name[len(prefix):]
+ return qualifier if IsAndroidLocaleQualifier(qualifier) else None
+
+
+def ToAndroidLocaleList(locale_list):
+ """Convert a list of Chromium locales into the corresponding Android list."""
+ return sorted(ToAndroidLocaleName(locale) for locale in locale_list)
+
+# Represents a line from a R.txt file.
+_TextSymbolEntry = collections.namedtuple('RTextEntry',
+ ('java_type', 'resource_type', 'name', 'value'))
+
+
+def CreateResourceInfoFile(files_to_zip, zip_path):
+ """Given a mapping of archive paths to their source, write an info file.
+
+ The info file contains lines of '{archive_path},{source_path}' for ease of
+ parsing. Assumes that there is no comma in the file names.
+
+ Args:
+ files_to_zip: Dict mapping path in the zip archive to original source.
+ zip_path: Path where the zip file ends up, this is where the info file goes.
+ """
+ info_file_path = zip_path + '.info'
+ with open(info_file_path, 'w') as info_file:
+ for archive_path, source_path in files_to_zip.iteritems():
+ info_file.write('{},{}\n'.format(archive_path, source_path))
+
+
+def _ParseTextSymbolsFile(path, fix_package_ids=False):
+ """Given an R.txt file, returns a list of _TextSymbolEntry.
+
+ Args:
+ path: Input file path.
+ fix_package_ids: if True, 0x00 and 0x02 package IDs read from the file
+ will be fixed to 0x7f.
+ Returns:
+ A list of _TextSymbolEntry instances.
+ Raises:
+ Exception: An unexpected line was detected in the input.
+ """
+ ret = []
+ with open(path) as f:
+ for line in f:
+ m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line)
+ if not m:
+ raise Exception('Unexpected line in R.txt: %s' % line)
+ java_type, resource_type, name, value = m.groups()
+ if fix_package_ids:
+ value = _FixPackageIds(value)
+ ret.append(_TextSymbolEntry(java_type, resource_type, name, value))
+ return ret
+
+
+def _FixPackageIds(resource_value):
+ # Resource IDs for resources belonging to regular APKs have their first byte
+ # as 0x7f (package id). However with webview, since it is not a regular apk
+ # but used as a shared library, aapt is passed the --shared-resources flag
+ # which changes some of the package ids to 0x00 and 0x02. This function
+ # normalises these (0x00 and 0x02) package ids to 0x7f, which the generated
+ # code in R.java changes to the correct package id at runtime.
+ # resource_value is a string with either, a single value '0x12345678', or an
+ # array of values like '{ 0xfedcba98, 0x01234567, 0x56789abc }'
+ return re.sub(r'0x(?:00|02)', r'0x7f', resource_value)
+
+
+def _GetRTxtResourceNames(r_txt_path):
+ """Parse an R.txt file and extract the set of resource names from it."""
+ return {entry.name for entry in _ParseTextSymbolsFile(r_txt_path)}
+
+
+def GetRTxtStringResourceNames(r_txt_path):
+ """Parse an R.txt file and the list of its string resource names."""
+ return sorted({
+ entry.name
+ for entry in _ParseTextSymbolsFile(r_txt_path)
+ if entry.resource_type == 'string'
+ })
+
+
+def GenerateStringResourcesWhitelist(module_r_txt_path, whitelist_r_txt_path):
+ """Generate a whitelist of string resource IDs.
+
+ Args:
+ module_r_txt_path: Input base module R.txt path.
+ whitelist_r_txt_path: Input whitelist R.txt path.
+ Returns:
+ A dictionary mapping numerical resource IDs to the corresponding
+ string resource names. The ID values are taken from string resources in
+ |module_r_txt_path| that are also listed by name in |whitelist_r_txt_path|.
+ """
+ whitelisted_names = {
+ entry.name
+ for entry in _ParseTextSymbolsFile(whitelist_r_txt_path)
+ if entry.resource_type == 'string'
+ }
+ return {
+ int(entry.value, 0): entry.name
+ for entry in _ParseTextSymbolsFile(module_r_txt_path)
+ if entry.resource_type == 'string' and entry.name in whitelisted_names
+ }
+
+
+class RJavaBuildOptions:
+ """A class used to model the various ways to build an R.java file.
+
+ This is used to control which resource ID variables will be final or
+ non-final, and whether an onResourcesLoaded() method will be generated
+ to adjust the non-final ones, when the corresponding library is loaded
+ at runtime.
+
+ Note that by default, all resources are final, and there is no
+ method generated, which corresponds to calling ExportNoResources().
+ """
+ def __init__(self):
+ self.has_constant_ids = True
+ self.resources_whitelist = None
+ self.has_on_resources_loaded = False
+ self.export_const_styleable = False
+
+ def ExportNoResources(self):
+ """Make all resource IDs final, and don't generate a method."""
+ self.has_constant_ids = True
+ self.resources_whitelist = None
+ self.has_on_resources_loaded = False
+ self.export_const_styleable = False
+
+ def ExportAllResources(self):
+ """Make all resource IDs non-final in the R.java file."""
+ self.has_constant_ids = False
+ self.resources_whitelist = None
+
+ def ExportSomeResources(self, r_txt_file_path):
+ """Only select specific resource IDs to be non-final.
+
+ Args:
+ r_txt_file_path: The path to an R.txt file. All resources named
+ int it will be non-final in the generated R.java file, all others
+ will be final.
+ """
+ self.has_constant_ids = True
+ self.resources_whitelist = _GetRTxtResourceNames(r_txt_file_path)
+
+ def ExportAllStyleables(self):
+ """Make all styleable constants non-final, even non-resources ones.
+
+ Resources that are styleable but not of int[] type are not actually
+ resource IDs but constants. By default they are always final. Call this
+ method to make them non-final anyway in the final R.java file.
+ """
+ self.export_const_styleable = True
+
+ def GenerateOnResourcesLoaded(self):
+ """Generate an onResourcesLoaded() method.
+
+ This Java method will be called at runtime by the framework when
+ the corresponding library (which includes the R.java source file)
+ will be loaded at runtime. This corresponds to the --shared-resources
+ or --app-as-shared-lib flags of 'aapt package'.
+ """
+ self.has_on_resources_loaded = True
+
+ def _IsResourceFinal(self, entry):
+ """Determines whether a resource should be final or not.
+
+ Args:
+ entry: A _TextSymbolEntry instance.
+ Returns:
+ True iff the corresponding entry should be final.
+ """
+ if entry.resource_type == 'styleable' and entry.java_type != 'int[]':
+ # A styleable constant may be exported as non-final after all.
+ return not self.export_const_styleable
+ elif not self.has_constant_ids:
+ # Every resource is non-final
+ return False
+ elif not self.resources_whitelist:
+ # No whitelist means all IDs are non-final.
+ return True
+ else:
+ # Otherwise, only those in the
+ return entry.name not in self.resources_whitelist
+
+
+def CreateRJavaFiles(srcjar_dir, package, main_r_txt_file, extra_res_packages,
+ extra_r_txt_files, rjava_build_options):
+ """Create all R.java files for a set of packages and R.txt files.
+
+ Args:
+ srcjar_dir: The top-level output directory for the generated files.
+ package: Top-level package name.
+ main_r_txt_file: The main R.txt file containing the valid values
+ of _all_ resource IDs.
+ extra_res_packages: A list of extra package names.
+ extra_r_txt_files: A list of extra R.txt files. One per item in
+ |extra_res_packages|. Note that all resource IDs in them will be ignored,
+ |and replaced by the values extracted from |main_r_txt_file|.
+ rjava_build_options: An RJavaBuildOptions instance that controls how
+ exactly the R.java file is generated.
+ Raises:
+ Exception if a package name appears several times in |extra_res_packages|
+ """
+ assert len(extra_res_packages) == len(extra_r_txt_files), \
+ 'Need one R.txt file per package'
+
+ packages = list(extra_res_packages)
+ r_txt_files = list(extra_r_txt_files)
+
+ if package and package not in packages:
+ # Sometimes, an apk target and a resources target share the same
+ # AndroidManifest.xml and thus |package| will already be in |packages|.
+ packages.append(package)
+ r_txt_files.append(main_r_txt_file)
+
+ # Map of (resource_type, name) -> Entry.
+ # Contains the correct values for resources.
+ all_resources = {}
+ for entry in _ParseTextSymbolsFile(main_r_txt_file, fix_package_ids=True):
+ all_resources[(entry.resource_type, entry.name)] = entry
+
+ # Map of package_name->resource_type->entry
+ resources_by_package = (
+ collections.defaultdict(lambda: collections.defaultdict(list)))
+ # Build the R.java files using each package's R.txt file, but replacing
+ # each entry's placeholder value with correct values from all_resources.
+ for package, r_txt_file in zip(packages, r_txt_files):
+ if package in resources_by_package:
+ raise Exception(('Package name "%s" appeared twice. All '
+ 'android_resources() targets must use unique package '
+ 'names, or no package name at all.') % package)
+ resources_by_type = resources_by_package[package]
+ # The sub-R.txt files have the wrong values at this point. Read them to
+ # figure out which entries belong to them, but use the values from the
+ # main R.txt file.
+ for entry in _ParseTextSymbolsFile(r_txt_file):
+ entry = all_resources.get((entry.resource_type, entry.name))
+ # For most cases missing entry here is an error. It means that some
+ # library claims to have or depend on a resource that isn't included into
+ # the APK. There is one notable exception: Google Play Services (GMS).
+ # GMS is shipped as a bunch of AARs. One of them - basement - contains
+ # R.txt with ids of all resources, but most of the resources are in the
+ # other AARs. However, all other AARs reference their resources via
+ # basement's R.java so the latter must contain all ids that are in its
+ # R.txt. Most targets depend on only a subset of GMS AARs so some
+ # resources are missing, which is okay because the code that references
+ # them is missing too. We can't get an id for a resource that isn't here
+ # so the only solution is to skip the resource entry entirely.
+ #
+ # We can verify that all entries referenced in the code were generated
+ # correctly by running Proguard on the APK: it will report missing
+ # fields.
+ if entry:
+ resources_by_type[entry.resource_type].append(entry)
+
+ for package, resources_by_type in resources_by_package.iteritems():
+ _CreateRJavaSourceFile(srcjar_dir, package, resources_by_type,
+ rjava_build_options)
+
+
+def _CreateRJavaSourceFile(srcjar_dir, package, resources_by_type,
+ rjava_build_options):
+ """Generates an R.java source file."""
+ package_r_java_dir = os.path.join(srcjar_dir, *package.split('.'))
+ build_utils.MakeDirectory(package_r_java_dir)
+ package_r_java_path = os.path.join(package_r_java_dir, 'R.java')
+ java_file_contents = _RenderRJavaSource(package, resources_by_type,
+ rjava_build_options)
+ with open(package_r_java_path, 'w') as f:
+ f.write(java_file_contents)
+
+
+# Resource IDs inside resource arrays are sorted. Application resource IDs start
+# with 0x7f but system resource IDs start with 0x01 thus system resource ids are
+# always at the start of the array. This function finds the index of the first
+# non system resource id to be used for package ID rewriting (we should not
+# rewrite system resource ids).
+def _GetNonSystemIndex(entry):
+ """Get the index of the first application resource ID within a resource
+ array."""
+ res_ids = re.findall(r'0x[0-9a-f]{8}', entry.value)
+ for i, res_id in enumerate(res_ids):
+ if res_id.startswith('0x7f'):
+ return i
+ return len(res_ids)
+
+
+def _RenderRJavaSource(package, resources_by_type, rjava_build_options):
+ """Render an R.java source file. See _CreateRJaveSourceFile for args info."""
+ final_resources_by_type = collections.defaultdict(list)
+ non_final_resources_by_type = collections.defaultdict(list)
+ for res_type, resources in resources_by_type.iteritems():
+ for entry in resources:
+ # Entries in stylable that are not int[] are not actually resource ids
+ # but constants.
+ if rjava_build_options._IsResourceFinal(entry):
+ final_resources_by_type[res_type].append(entry)
+ else:
+ non_final_resources_by_type[res_type].append(entry)
+
+ # Keep these assignments all on one line to make diffing against regular
+ # aapt-generated files easier.
+ create_id = ('{{ e.resource_type }}.{{ e.name }} ^= packageIdTransform;')
+ create_id_arr = ('{{ e.resource_type }}.{{ e.name }}[i] ^='
+ ' packageIdTransform;')
+ for_loop_condition = ('int i = {{ startIndex(e) }}; i < '
+ '{{ e.resource_type }}.{{ e.name }}.length; ++i')
+
+ # Here we diverge from what aapt does. Because we have so many
+ # resources, the onResourcesLoaded method was exceeding the 64KB limit that
+ # Java imposes. For this reason we split onResourcesLoaded into different
+ # methods for each resource type.
+ template = Template("""/* AUTO-GENERATED FILE. DO NOT MODIFY. */
+
+package {{ package }};
+
+public final class R {
+ private static boolean sResourcesDidLoad;
+ {% for resource_type in resource_types %}
+ public static final class {{ resource_type }} {
+ {% for e in final_resources[resource_type] %}
+ public static final {{ e.java_type }} {{ e.name }} = {{ e.value }};
+ {% endfor %}
+ {% for e in non_final_resources[resource_type] %}
+ {% if e.value != '0' %}
+ public static {{ e.java_type }} {{ e.name }} = {{ e.value }};
+ {% else %}
+ public static {{ e.java_type }} {{ e.name }};
+ {% endif %}
+ {% endfor %}
+ }
+ {% endfor %}
+ {% if has_on_resources_loaded %}
+ public static void onResourcesLoaded(int packageId) {
+ assert !sResourcesDidLoad;
+ sResourcesDidLoad = true;
+ int packageIdTransform = (packageId ^ 0x7f) << 24;
+ {% for resource_type in resource_types %}
+ onResourcesLoaded{{ resource_type|title }}(packageIdTransform);
+ {% for e in non_final_resources[resource_type] %}
+ {% if e.java_type == 'int[]' %}
+ for(""" + for_loop_condition + """) {
+ """ + create_id_arr + """
+ }
+ {% endif %}
+ {% endfor %}
+ {% endfor %}
+ }
+ {% for res_type in resource_types %}
+ private static void onResourcesLoaded{{ res_type|title }} (
+ int packageIdTransform) {
+ {% for e in non_final_resources[res_type] %}
+ {% if res_type != 'styleable' and e.java_type != 'int[]' %}
+ """ + create_id + """
+ {% endif %}
+ {% endfor %}
+ }
+ {% endfor %}
+ {% endif %}
+}
+""", trim_blocks=True, lstrip_blocks=True)
+
+ return template.render(
+ package=package,
+ resource_types=sorted(resources_by_type),
+ has_on_resources_loaded=rjava_build_options.has_on_resources_loaded,
+ final_resources=final_resources_by_type,
+ non_final_resources=non_final_resources_by_type,
+ startIndex=_GetNonSystemIndex)
+
+
+def ExtractPackageFromManifest(manifest_path):
+ """Extract package name from Android manifest file."""
+ return ParseAndroidManifest(manifest_path)[1].get('package')
+
+
+def ExtractBinaryManifestValues(aapt2_path, apk_path):
+ """Returns (version_code, version_name, package_name) for the given apk."""
+ output = subprocess.check_output([
+ aapt2_path, 'dump', 'xmltree', apk_path, '--file', 'AndroidManifest.xml'
+ ])
+ version_code = re.search(r'versionCode.*?=(\d*)', output).group(1)
+ version_name = re.search(r'versionName.*?="(.*?)"', output).group(1)
+ package_name = re.search(r'package.*?="(.*?)"', output).group(1)
+ return version_code, version_name, package_name
+
+
+def ExtractArscPackage(aapt2_path, apk_path):
+ """Returns (package_name, package_id) of resources.arsc from apk_path."""
+ proc = subprocess.Popen([aapt2_path, 'dump', 'resources', apk_path],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ for line in proc.stdout:
+ # Package name=org.chromium.webview_shell id=7f
+ if line.startswith('Package'):
+ proc.kill()
+ parts = line.split()
+ package_name = parts[1].split('=')[1]
+ package_id = parts[2][3:]
+ return package_name, int(package_id, 16)
+
+ # aapt2 currently crashes when dumping webview resources, but not until after
+ # it prints the "Package" line (b/130553900).
+ sys.stderr.write(proc.stderr.read())
+ raise Exception('Failed to find arsc package name')
+
+
+def ExtractDeps(dep_zips, deps_dir):
+ """Extract a list of resource dependency zip files.
+
+ Args:
+ dep_zips: A list of zip file paths, each one will be extracted to
+ a subdirectory of |deps_dir|, named after the zip file's path (e.g.
+ '/some/path/foo.zip' -> '{deps_dir}/some_path_foo/').
+ deps_dir: Top-level extraction directory.
+ Returns:
+ The list of all sub-directory paths, relative to |deps_dir|.
+ Raises:
+ Exception: If a sub-directory already exists with the same name before
+ extraction.
+ """
+ dep_subdirs = []
+ for z in dep_zips:
+ subdirname = z.replace(os.path.sep, '_')
+ subdir = os.path.join(deps_dir, subdirname)
+ if os.path.exists(subdir):
+ raise Exception('Resource zip name conflict: ' + subdirname)
+ build_utils.ExtractAll(z, path=subdir)
+ dep_subdirs.append(subdir)
+ return dep_subdirs
+
+
+class _ResourceBuildContext(object):
+ """A temporary directory for packaging and compiling Android resources.
+
+ Args:
+ temp_dir: Optional root build directory path. If None, a temporary
+ directory will be created, and removed in Close().
+ """
+ def __init__(self, temp_dir=None):
+ """Initialized the context."""
+ # The top-level temporary directory.
+ if temp_dir:
+ self.temp_dir = temp_dir
+ self.remove_on_exit = False
+ else:
+ self.temp_dir = tempfile.mkdtemp()
+ self.remove_on_exit = True
+
+ # A location to store resources extracted form dependency zip files.
+ self.deps_dir = os.path.join(self.temp_dir, 'deps')
+ os.mkdir(self.deps_dir)
+ # A location to place aapt-generated files.
+ self.gen_dir = os.path.join(self.temp_dir, 'gen')
+ os.mkdir(self.gen_dir)
+ # Location of the generated R.txt file.
+ self.r_txt_path = os.path.join(self.gen_dir, 'R.txt')
+ # A location to place generated R.java files.
+ self.srcjar_dir = os.path.join(self.temp_dir, 'java')
+ os.mkdir(self.srcjar_dir)
+
+ def Close(self):
+ """Close the context and destroy all temporary files."""
+ if self.remove_on_exit:
+ shutil.rmtree(self.temp_dir)
+
+
+@contextlib.contextmanager
+def BuildContext(temp_dir=None):
+ """Generator for a _ResourceBuildContext instance."""
+ try:
+ context = _ResourceBuildContext(temp_dir)
+ yield context
+ finally:
+ context.Close()
+
+
+def ResourceArgsParser():
+ """Create an argparse.ArgumentParser instance with common argument groups.
+
+ Returns:
+ A tuple of (parser, in_group, out_group) corresponding to the parser
+ instance, and the input and output argument groups for it, respectively.
+ """
+ parser = argparse.ArgumentParser(description=__doc__)
+
+ input_opts = parser.add_argument_group('Input options')
+ output_opts = parser.add_argument_group('Output options')
+
+ build_utils.AddDepfileOption(output_opts)
+
+ input_opts.add_argument('--include-resources', required=True, action="append",
+ help='Paths to arsc resource files used to link '
+ 'against. Can be specified multiple times.')
+
+ input_opts.add_argument('--dependencies-res-zips', required=True,
+ help='Resources zip archives from dependents. Required to '
+ 'resolve @type/foo references into dependent '
+ 'libraries.')
+
+ input_opts.add_argument(
+ '--r-text-in',
+ help='Path to pre-existing R.txt. Its resource IDs override those found '
+ 'in the aapt-generated R.txt when generating R.java.')
+
+ input_opts.add_argument(
+ '--extra-res-packages',
+ help='Additional package names to generate R.java files for.')
+
+ input_opts.add_argument(
+ '--extra-r-text-files',
+ help='For each additional package, the R.txt file should contain a '
+ 'list of resources to be included in the R.java file in the format '
+ 'generated by aapt.')
+
+ return (parser, input_opts, output_opts)
+
+
+def HandleCommonOptions(options):
+ """Handle common command-line options after parsing.
+
+ Args:
+ options: the result of parse_args() on the parser returned by
+ ResourceArgsParser(). This function updates a few common fields.
+ """
+ options.include_resources = [build_utils.ParseGnList(r) for r in
+ options.include_resources]
+ # Flatten list of include resources list to make it easier to use.
+ options.include_resources = [r for resources in options.include_resources
+ for r in resources]
+
+ options.dependencies_res_zips = (
+ build_utils.ParseGnList(options.dependencies_res_zips))
+
+ # Don't use [] as default value since some script explicitly pass "".
+ if options.extra_res_packages:
+ options.extra_res_packages = (
+ build_utils.ParseGnList(options.extra_res_packages))
+ else:
+ options.extra_res_packages = []
+
+ if options.extra_r_text_files:
+ options.extra_r_text_files = (
+ build_utils.ParseGnList(options.extra_r_text_files))
+ else:
+ options.extra_r_text_files = []
+
+
+def ParseAndroidResourceStringsFromXml(xml_data):
+ """Parse and Android xml resource file and extract strings from it.
+
+ Args:
+ xml_data: XML file data.
+ Returns:
+ A (dict, namespaces) tuple, where |dict| maps string names to their UTF-8
+ encoded value, and |namespaces| is a dictionary mapping prefixes to URLs
+ corresponding to namespaces declared in the <resources> element.
+ """
+ # NOTE: This uses regular expression matching because parsing with something
+ # like ElementTree makes it tedious to properly parse some of the structured
+ # text found in string resources, e.g.:
+ # <string msgid="3300176832234831527" \
+ # name="abc_shareactionprovider_share_with_application">\
+ # "Condividi tramite <ns1:g id="APPLICATION_NAME">%s</ns1:g>"\
+ # </string>
+ result = {}
+
+ # Find <resources> start tag and extract namespaces from it.
+ m = re.search('<resources([^>]*)>', xml_data, re.MULTILINE)
+ if not m:
+ raise Exception('<resources> start tag expected: ' + xml_data)
+ input_data = xml_data[m.end():]
+ resource_attrs = m.group(1)
+ re_namespace = re.compile('\s*(xmlns:(\w+)="([^"]+)")')
+ namespaces = {}
+ while resource_attrs:
+ m = re_namespace.match(resource_attrs)
+ if not m:
+ break
+ namespaces[m.group(2)] = m.group(3)
+ resource_attrs = resource_attrs[m.end(1):]
+
+ # Find each string element now.
+ re_string_element_start = re.compile('<string ([^>]* )?name="([^">]+)"[^>]*>')
+ re_string_element_end = re.compile('</string>')
+ while input_data:
+ m = re_string_element_start.search(input_data)
+ if not m:
+ break
+ name = m.group(2)
+ input_data = input_data[m.end():]
+ m2 = re_string_element_end.search(input_data)
+ if not m2:
+ raise Exception('Expected closing string tag: ' + input_data)
+ text = input_data[:m2.start()]
+ input_data = input_data[m2.end():]
+ if len(text) and text[0] == '"' and text[-1] == '"':
+ text = text[1:-1]
+ result[name] = text
+
+ return result, namespaces
+
+
+def GenerateAndroidResourceStringsXml(names_to_utf8_text, namespaces=None):
+ """Generate an XML text corresponding to an Android resource strings map.
+
+ Args:
+ names_to_text: A dictionary mapping resource names to localized
+ text (encoded as UTF-8).
+ namespaces: A map of namespace prefix to URL.
+ Returns:
+ New non-Unicode string containing an XML data structure describing the
+ input as an Android resource .xml file.
+ """
+ result = '<?xml version="1.0" encoding="utf-8"?>\n'
+ result += '<resources'
+ if namespaces:
+ for prefix, url in sorted(namespaces.iteritems()):
+ result += ' xmlns:%s="%s"' % (prefix, url)
+ result += '>\n'
+ if not names_to_utf8_text:
+ result += '<!-- this file intentionally empty -->\n'
+ else:
+ for name, utf8_text in sorted(names_to_utf8_text.iteritems()):
+ result += '<string name="%s">"%s"</string>\n' % (name, utf8_text)
+ result += '</resources>\n'
+ return result
+
+
+def FilterAndroidResourceStringsXml(xml_file_path, string_predicate):
+ """Remove unwanted localized strings from an Android resource .xml file.
+
+ This function takes a |string_predicate| callable object that will
+ receive a resource string name, and should return True iff the
+ corresponding <string> element should be kept in the file.
+
+ Args:
+ xml_file_path: Android resource strings xml file path.
+ string_predicate: A predicate function which will receive the string name
+ and shal
+ """
+ with open(xml_file_path) as f:
+ xml_data = f.read()
+ strings_map, namespaces = ParseAndroidResourceStringsFromXml(xml_data)
+
+ string_deletion = False
+ for name in strings_map.keys():
+ if not string_predicate(name):
+ del strings_map[name]
+ string_deletion = True
+
+ if string_deletion:
+ new_xml_data = GenerateAndroidResourceStringsXml(strings_map, namespaces)
+ with open(xml_file_path, 'wb') as f:
+ f.write(new_xml_data)
+
+
+def _RegisterElementTreeNamespaces():
+ global _xml_namespace_initialized
+ if not _xml_namespace_initialized:
+ _xml_namespace_initialized = True
+ ElementTree.register_namespace('android', ANDROID_NAMESPACE)
+ ElementTree.register_namespace('tools', TOOLS_NAMESPACE)
+
+
+def ParseAndroidManifest(path):
+ """Parses an AndroidManifest.xml using ElementTree.
+
+ Registers required namespaces & creates application node if missing.
+
+ Returns tuple of:
+ doc: Root xml document.
+ manifest_node: the <manifest> node.
+ app_node: the <application> node.
+ """
+ _RegisterElementTreeNamespaces()
+ doc = ElementTree.parse(path)
+ # ElementTree.find does not work if the required tag is the root.
+ if doc.getroot().tag == 'manifest':
+ manifest_node = doc.getroot()
+ else:
+ manifest_node = doc.find('manifest')
+
+ app_node = doc.find('application')
+ if app_node is None:
+ app_node = ElementTree.SubElement(manifest_node, 'application')
+
+ return doc, manifest_node, app_node
diff --git a/deps/v8/build/android/gyp/util/resource_utils_test.py b/deps/v8/build/android/gyp/util/resource_utils_test.py
new file mode 100755
index 0000000000..dc1094aca0
--- /dev/null
+++ b/deps/v8/build/android/gyp/util/resource_utils_test.py
@@ -0,0 +1,268 @@
+#!/usr/bin/env python
+# coding: utf-8
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import sys
+import unittest
+
+sys.path.insert(
+ 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import build_utils
+
+# Required because the following import needs build/android/gyp in the
+# Python path to import util.build_utils.
+_BUILD_ANDROID_GYP_ROOT = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), os.pardir))
+sys.path.insert(1, _BUILD_ANDROID_GYP_ROOT)
+
+import resource_utils # pylint: disable=relative-import
+
+# pylint: disable=line-too-long
+
+_TEST_XML_INPUT_1 = '''<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:android="http://schemas.android.com/apk/res/android">
+<string name="copy_to_clipboard_failure_message">"Lõikelauale kopeerimine ebaõnnestus"</string>
+<string name="low_memory_error">"Eelmist toimingut ei saa vähese mälu tõttu lõpetada"</string>
+<string name="opening_file_error">"Valit. faili avamine ebaõnnestus"</string>
+<string name="structured_text">"This is <android:g id="STRUCTURED_TEXT">%s</android:g>"</string>
+</resources>
+'''
+
+_TEST_XML_OUTPUT_2 = '''<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:android="http://schemas.android.com/apk/res/android">
+<string name="low_memory_error">"Eelmist toimingut ei saa vähese mälu tõttu lõpetada"</string>
+<string name="structured_text">"This is <android:g id="STRUCTURED_TEXT">%s</android:g>"</string>
+</resources>
+'''
+
+# pylint: enable=line-too-long
+
+_TEST_XML_OUTPUT_EMPTY = '''<?xml version="1.0" encoding="utf-8"?>
+<resources>
+<!-- this file intentionally empty -->
+</resources>
+'''
+
+_TEST_RESOURCES_MAP_1 = {
+ 'low_memory_error': 'Eelmist toimingut ei saa vähese mälu tõttu lõpetada',
+ 'opening_file_error': 'Valit. faili avamine ebaõnnestus',
+ 'copy_to_clipboard_failure_message': 'Lõikelauale kopeerimine ebaõnnestus',
+ 'structured_text': 'This is <android:g id="STRUCTURED_TEXT">%s</android:g>',
+}
+
+_TEST_NAMESPACES_1 = {'android': 'http://schemas.android.com/apk/res/android'}
+
+_TEST_RESOURCES_WHITELIST_1 = ['low_memory_error', 'structured_text']
+
+# Extracted from one generated Chromium R.txt file, with string resource
+# names shuffled randomly.
+_TEST_R_TXT = r'''int anim abc_fade_in 0x7f050000
+int anim abc_fade_out 0x7f050001
+int anim abc_grow_fade_in_from_bottom 0x7f050002
+int array DefaultCookiesSettingEntries 0x7f120002
+int array DefaultCookiesSettingValues 0x7f120003
+int array DefaultGeolocationSettingEntries 0x7f120004
+int attr actionBarDivider 0x7f0100e7
+int attr actionBarStyle 0x7f0100e2
+int string AllowedDomainsForAppsDesc 0x7f0c0105
+int string AlternateErrorPagesEnabledDesc 0x7f0c0107
+int string AuthAndroidNegotiateAccountTypeDesc 0x7f0c0109
+int string AllowedDomainsForAppsTitle 0x7f0c0104
+int string AlternateErrorPagesEnabledTitle 0x7f0c0106
+int[] styleable SnackbarLayout { 0x0101011f, 0x7f010076, 0x7f0100ba }
+int styleable SnackbarLayout_android_maxWidth 0
+int styleable SnackbarLayout_elevation 2
+'''
+
+# Test whitelist R.txt file. Note that AlternateErrorPagesEnabledTitle is
+# listed as an 'anim' and should thus be skipped. Similarly the string
+# 'ThisStringDoesNotAppear' should not be in the final result.
+_TEST_WHITELIST_R_TXT = r'''int anim AlternateErrorPagesEnabledTitle 0x7f0eeeee
+int string AllowedDomainsForAppsDesc 0x7f0c0105
+int string AlternateErrorPagesEnabledDesc 0x7f0c0107
+int string ThisStringDoesNotAppear 0x7f0fffff
+'''
+
+_TEST_R_TEXT_RESOURCES_IDS = {
+ 0x7f0c0105: 'AllowedDomainsForAppsDesc',
+ 0x7f0c0107: 'AlternateErrorPagesEnabledDesc',
+}
+
+# Names of string resources in _TEST_R_TXT, should be sorted!
+_TEST_R_TXT_STRING_RESOURCE_NAMES = sorted([
+ 'AllowedDomainsForAppsDesc',
+ 'AllowedDomainsForAppsTitle',
+ 'AlternateErrorPagesEnabledDesc',
+ 'AlternateErrorPagesEnabledTitle',
+ 'AuthAndroidNegotiateAccountTypeDesc',
+])
+
+
+def _CreateTestFile(tmp_dir, file_name, file_data):
+ file_path = os.path.join(tmp_dir, file_name)
+ with open(file_path, 'wt') as f:
+ f.write(file_data)
+ return file_path
+
+
+
+class ResourceUtilsTest(unittest.TestCase):
+
+ def test_GetRTxtStringResourceNames(self):
+ with build_utils.TempDir() as tmp_dir:
+ tmp_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT)
+ self.assertListEqual(
+ resource_utils.GetRTxtStringResourceNames(tmp_file),
+ _TEST_R_TXT_STRING_RESOURCE_NAMES)
+
+ def test_GenerateStringResourcesWhitelist(self):
+ with build_utils.TempDir() as tmp_dir:
+ tmp_module_rtxt_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT)
+ tmp_whitelist_rtxt_file = _CreateTestFile(tmp_dir, "test_whitelist_R.txt",
+ _TEST_WHITELIST_R_TXT)
+ self.assertDictEqual(
+ resource_utils.GenerateStringResourcesWhitelist(
+ tmp_module_rtxt_file, tmp_whitelist_rtxt_file),
+ _TEST_R_TEXT_RESOURCES_IDS)
+
+ def test_IsAndroidLocaleQualifier(self):
+ good_locales = [
+ 'en',
+ 'en-rUS',
+ 'fil',
+ 'fil-rPH',
+ 'iw',
+ 'iw-rIL',
+ 'b+en',
+ 'b+en+US',
+ 'b+ja+Latn',
+ 'b+ja+JP+Latn',
+ 'b+cmn+Hant-TW',
+ ]
+ bad_locales = [
+ 'e', 'english', 'en-US', 'en_US', 'en-rus', 'b+e', 'b+english', 'b+ja+'
+ ]
+ for locale in good_locales:
+ self.assertTrue(
+ resource_utils.IsAndroidLocaleQualifier(locale),
+ msg="'%s' should be a good locale!" % locale)
+
+ for locale in bad_locales:
+ self.assertFalse(
+ resource_utils.IsAndroidLocaleQualifier(locale),
+ msg="'%s' should be a bad locale!" % locale)
+
+ def test_ToAndroidLocaleName(self):
+ _TEST_CHROMIUM_TO_ANDROID_LOCALE_MAP = {
+ 'en': 'en',
+ 'en-US': 'en-rUS',
+ 'en-FOO': 'en-rFOO',
+ 'fil': 'tl',
+ 'tl': 'tl',
+ 'he': 'iw',
+ 'he-IL': 'iw-rIL',
+ 'id': 'in',
+ 'id-BAR': 'in-rBAR',
+ 'nb': 'nb',
+ 'yi': 'ji'
+ }
+ for chromium_locale, android_locale in \
+ _TEST_CHROMIUM_TO_ANDROID_LOCALE_MAP.iteritems():
+ result = resource_utils.ToAndroidLocaleName(chromium_locale)
+ self.assertEqual(result, android_locale)
+
+ def test_ToChromiumLocaleName(self):
+ _TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP = {
+ 'foo': 'foo',
+ 'foo-rBAR': 'foo-BAR',
+ 'b+foo': 'foo',
+ 'b+foo+BAR': 'foo-BAR',
+ 'b+foo+BAR+Whatever': 'foo-BAR',
+ 'b+foo+Whatever+BAR': 'foo-BAR',
+ 'b+foo+Whatever': 'foo',
+ 'en': 'en',
+ 'en-rUS': 'en-US',
+ 'en-US': None,
+ 'en-FOO': None,
+ 'en-rFOO': 'en-FOO',
+ 'es-rES': 'es-ES',
+ 'es-rUS': 'es-419',
+ 'tl': 'fil',
+ 'fil': 'fil',
+ 'iw': 'he',
+ 'iw-rIL': 'he-IL',
+ 'in': 'id',
+ 'in-rBAR': 'id-BAR',
+ 'id-rBAR': 'id-BAR',
+ 'nb': 'nb',
+ 'no': 'nb', # http://crbug.com/920960
+ }
+ for android_locale, chromium_locale in \
+ _TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP.iteritems():
+ result = resource_utils.ToChromiumLocaleName(android_locale)
+ self.assertEqual(result, chromium_locale)
+
+ def test_FindLocaleInStringResourceFilePath(self):
+ self.assertEqual(
+ None,
+ resource_utils.FindLocaleInStringResourceFilePath(
+ 'res/values/whatever.xml'))
+ self.assertEqual(
+ 'foo',
+ resource_utils.FindLocaleInStringResourceFilePath(
+ 'res/values-foo/whatever.xml'))
+ self.assertEqual(
+ 'foo-rBAR',
+ resource_utils.FindLocaleInStringResourceFilePath(
+ 'res/values-foo-rBAR/whatever.xml'))
+ self.assertEqual(
+ None,
+ resource_utils.FindLocaleInStringResourceFilePath(
+ 'res/values-foo/ignore-subdirs/whatever.xml'))
+
+ def test_ParseAndroidResourceStringsFromXml(self):
+ ret, namespaces = resource_utils.ParseAndroidResourceStringsFromXml(
+ _TEST_XML_INPUT_1)
+ self.assertDictEqual(ret, _TEST_RESOURCES_MAP_1)
+ self.assertDictEqual(namespaces, _TEST_NAMESPACES_1)
+
+ def test_GenerateAndroidResourceStringsXml(self):
+ # Fist, an empty strings map, with no namespaces
+ result = resource_utils.GenerateAndroidResourceStringsXml({})
+ self.assertEqual(result, _TEST_XML_OUTPUT_EMPTY)
+
+ result = resource_utils.GenerateAndroidResourceStringsXml(
+ _TEST_RESOURCES_MAP_1, _TEST_NAMESPACES_1)
+ self.assertEqual(result, _TEST_XML_INPUT_1)
+
+ @staticmethod
+ def _CreateTestResourceFile(output_dir, locale, string_map, namespaces):
+ values_dir = os.path.join(output_dir, 'values-' + locale)
+ build_utils.MakeDirectory(values_dir)
+ file_path = os.path.join(values_dir, 'strings.xml')
+ with open(file_path, 'w') as f:
+ file_data = resource_utils.GenerateAndroidResourceStringsXml(
+ string_map, namespaces)
+ f.write(file_data)
+ return file_path
+
+ def _CheckTestResourceFile(self, file_path, expected_data):
+ with open(file_path) as f:
+ file_data = f.read()
+ self.assertEqual(file_data, expected_data)
+
+ def test_FilterAndroidResourceStringsXml(self):
+ with build_utils.TempDir() as tmp_path:
+ test_file = self._CreateTestResourceFile(
+ tmp_path, 'foo', _TEST_RESOURCES_MAP_1, _TEST_NAMESPACES_1)
+ resource_utils.FilterAndroidResourceStringsXml(
+ test_file, lambda x: x in _TEST_RESOURCES_WHITELIST_1)
+ self._CheckTestResourceFile(test_file, _TEST_XML_OUTPUT_2)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/gyp/write_build_config.py b/deps/v8/build/android/gyp/write_build_config.py
new file mode 100755
index 0000000000..68dfac4bb8
--- /dev/null
+++ b/deps/v8/build/android/gyp/write_build_config.py
@@ -0,0 +1,1643 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes a build_config file.
+
+The build_config file for a target is a json file containing information about
+how to build that target based on the target's dependencies. This includes
+things like: the javac classpath, the list of android resources dependencies,
+etc. It also includes the information needed to create the build_config for
+other targets that depend on that one.
+
+Android build scripts should not refer to the build_config directly, and the
+build specification should instead pass information in using the special
+file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing
+of values in a json dict in a file and looks like this:
+ --python-arg=@FileArg(build_config_path:javac:classpath)
+
+Note: If paths to input files are passed in this way, it is important that:
+ 1. inputs/deps of the action ensure that the files are available the first
+ time the action runs.
+ 2. Either (a) or (b)
+ a. inputs/deps ensure that the action runs whenever one of the files changes
+ b. the files are added to the action's depfile
+
+NOTE: All paths within .build_config files are relative to $OUTPUT_CHROMIUM_DIR.
+
+This is a technical note describing the format of .build_config files.
+Please keep it updated when changing this script. For extraction and
+visualization instructions, see build/android/docs/build_config.md
+
+------------- BEGIN_MARKDOWN ---------------------------------------------------
+The .build_config file format
+===
+
+# Introduction
+
+This document tries to explain the format of `.build_config` generated during
+the Android build of Chromium. For a higher-level explanation of these files,
+please read
+[build/android/docs/build_config.md](build/android/docs/build_config.md).
+
+# The `deps_info` top-level dictionary:
+
+All `.build_config` files have a required `'deps_info'` key, whose value is a
+dictionary describing the target and its dependencies. The latter has the
+following required keys:
+
+## Required keys in `deps_info`:
+
+* `deps_info['type']`: The target type as a string.
+
+ The following types are known by the internal GN build rules and the
+ build scripts altogether:
+
+ * [java_binary](#target_java_binary)
+ * [java_annotation_processor](#target_java_annotation_processor)
+ * [junit_binary](#target_junit_binary)
+ * [java_library](#target_java_library)
+ * [android_assets](#target_android_assets)
+ * [android_resources](#target_android_resources)
+ * [android_apk](#target_android_apk)
+ * [android_app_bundle_module](#target_android_app_bundle_module)
+ * [android_app_bundle](#target_android_app_bundle)
+ * [dist_jar](#target_dist_jar)
+ * [dist_aar](#target_dist_aar)
+ * [resource_rewriter](#target_resource_rewriter)
+ * [group](#target_group)
+
+ See later sections for more details of some of these.
+
+* `deps_info['path']`: Path to the target's `.build_config` file.
+
+* `deps_info['name']`: Nothing more than the basename of `deps_info['path']`
+at the moment.
+
+* `deps_info['deps_configs']`: List of paths to the `.build_config` files of
+all *direct* dependencies of the current target.
+
+ NOTE: Because the `.build_config` of a given target is always generated
+ after the `.build_config` of its dependencies, the `write_build_config.py`
+ script can use chains of `deps_configs` to compute transitive dependencies
+ for each target when needed.
+
+## Optional keys in `deps_info`:
+
+The following keys will only appear in the `.build_config` files of certain
+target types:
+
+* `deps_info['requires_android']`: True to indicate that the corresponding
+code uses Android-specific APIs, and thus cannot run on the host within a
+regular JVM. May only appear in Java-related targets.
+
+* `deps_info['supports_android']`:
+May appear in Java-related targets, and indicates that
+the corresponding code doesn't use Java APIs that are not available on
+Android. As such it may run either on the host or on an Android device.
+
+* `deps_info['assets']`:
+Only seen for the [`android_assets`](#target_android_assets) type. See below.
+
+* `deps_info['package_name']`: Java package name associated with this target.
+
+ NOTE: For `android_resources` targets,
+ this is the package name for the corresponding R class. For `android_apk`
+ targets, this is the corresponding package name. This does *not* appear for
+ other target types.
+
+* `deps_info['android_manifest']`:
+Path to an AndroidManifest.xml file related to the current target.
+
+# Top-level `resources` dictionary:
+
+This dictionary only appears for a few target types that can contain or
+relate to Android resources (e.g. `android_resources` or `android_apk`):
+
+* `resources['dependency_zips']`:
+List of `deps_info['resources_zip']` entries for all `android_resources`
+dependencies for the current target.
+
+* `resource['extra_package_names']`:
+Always empty for `android_resources` types. Otherwise,
+the list of `deps_info['package_name']` entries for all `android_resources`
+dependencies for the current target. Computed automatically by
+`write_build_config.py`.
+
+* `resources['extra_r_text_files']`:
+Always empty for `android_resources` types. Otherwise, the list of
+`deps_info['r_text']` entries for all `android_resources` dependencies for
+the current target. Computed automatically.
+
+
+# `.build_config` target types description:
+
+## <a name="target_group">Target type `group`</a>:
+
+This type corresponds to a simple target that is only used to group
+dependencies. It matches the `java_group()` GN template. Its only top-level
+`deps_info` keys are `supports_android` (always True), and `deps_configs`.
+
+
+## <a name="target_android_resources">Target type `android_resources`</a>:
+
+This type corresponds to targets that are used to group Android resource files.
+For example, all `android_resources` dependencies of an `android_apk` will
+end up packaged into the final APK by the build system.
+
+It uses the following keys:
+
+* `deps_info['resource_dirs']`:
+List of paths to the source directories containing the resources for this
+target. This key is optional, because some targets can refer to prebuilt
+`.aar` archives.
+
+
+* `deps_info['resources_zip']`:
+*Required*. Path to the `.resources.zip` file that contains all raw/uncompiled
+resource files for this target (and also no `R.txt`, `R.java` or `R.class`).
+
+ If `deps_info['resource_dirs']` is missing, this must point to a prebuilt
+ `.aar` archive containing resources. Otherwise, this will point to a
+ zip archive generated at build time, wrapping the content of
+ `deps_info['resource_dirs']` into a single zip file.
+
+* `deps_info['package_name']`:
+Java package name that the R class for this target belongs to.
+
+* `deps_info['android_manifest']`:
+Optional. Path to the top-level Android manifest file associated with these
+resources (if not provided, an empty manifest will be used to generate R.txt).
+
+* `deps_info['r_text']`:
+Provide the path to the `R.txt` file that describes the resources wrapped by
+this target. Normally this file is generated from the content of the resource
+directories or zip file, but some targets can provide their own `R.txt` file
+if they want.
+
+* `deps_info['srcjar_path']`:
+Path to the `.srcjar` file that contains the auto-generated `R.java` source
+file corresponding to the content of `deps_info['r_text']`. This is *always*
+generated from the content of `deps_info['r_text']` by the
+`build/android/gyp/process_resources.py` script.
+
+
+## <a name="target_android_assets">Target type `android_assets`</a>:
+
+This type corresponds to targets used to group Android assets, i.e. liberal
+files that will be placed under `//assets/` within the final APK.
+
+These use an `deps_info['assets']` key to hold a dictionary of values related
+to assets covered by this target.
+
+* `assets['sources']`:
+The list of all asset source paths for this target. Each source path can
+use an optional `:<zipPath>` suffix, where `<zipPath>` is the final location
+of the assets (relative to `//assets/`) within the APK.
+
+* `assets['outputs']`:
+Optional. Some of the sources might be renamed before being stored in the
+final //assets/ sub-directory. When this happens, this contains a list of
+all renamed output file paths
+
+ NOTE: When not empty, the first items of `assets['sources']` must match
+ every item in this list. Extra sources correspond to non-renamed sources.
+
+ NOTE: This comes from the `asset_renaming_destinations` parameter for the
+ `android_assets()` GN template.
+
+* `assets['disable_compression']`:
+Optional. Will be True to indicate that these assets should be stored
+uncompressed in the final APK. For example, this is necessary for locale
+.pak files used by the System WebView feature.
+
+* `assets['treat_as_locale_paks']`:
+Optional. Will be True to indicate that these assets are locale `.pak` files
+(containing localized strings for C++). These are later processed to generate
+a special ``.build_config`.java` source file, listing all supported Locales in
+the current build.
+
+
+## <a name="target_java_library">Target type `java_library`</a>:
+
+This type is used to describe target that wrap Java bytecode, either created
+by compiling sources, or providing them with a prebuilt jar.
+
+* `deps_info['unprocessed_jar_path']`:
+Path to the original .jar file for this target, before any kind of processing
+through Proguard or other tools. For most targets this is generated
+from sources, with a name like `$target_name.javac.jar`. However, when using
+a prebuilt jar, this will point to the source archive directly.
+
+* `deps_info['jar_path']`:
+Path to a file that is the result of processing
+`deps_info['unprocessed_jar_path']` with various tools.
+
+* `deps_info['interface_jar_path']:
+Path to the interface jar generated for this library. This corresponds to
+a jar file that only contains declarations. Generated by running the `ijar`
+tool on `deps_info['jar_path']`
+
+* `deps_info['dex_path']`:
+Path to the `.dex` file generated for this target, from `deps_info['jar_path']`
+unless this comes from a prebuilt `.aar` archive.
+
+* `deps_info['is_prebuilt']`:
+True to indicate that this target corresponds to a prebuilt `.jar` file.
+In this case, `deps_info['unprocessed_jar_path']` will point to the source
+`.jar` file. Otherwise, it will be point to a build-generated file.
+
+* `deps_info['java_sources_file']`:
+Path to a single `.sources` file listing all the Java sources that were used
+to generate the library (simple text format, one `.jar` path per line).
+
+* `deps_info['owned_resource_dirs']`:
+List of all resource directories belonging to all resource dependencies for
+this target.
+
+* `deps_info['owned_resource_zips']`:
+List of all resource zip files belonging to all resource dependencies for this
+target.
+
+* `deps_info['owned_resource_srcjars']`:
+List of all .srcjar files belonging to all resource dependencies for this
+target.
+
+* `deps_info['javac']`:
+A dictionary containing information about the way the sources in this library
+are compiled. Appears also on other Java-related targets. See the [dedicated
+section about this](#dict_javac) below for details.
+
+* `deps_info['javac_full_classpath']`:
+The classpath used when performing bytecode processing. Essentially the
+collection of all `deps_info['unprocessed_jar_path']` entries for the target
+and all its dependencies.
+
+* `deps_info['javac_full_interface_classpath']`:
+The classpath used when using the errorprone compiler.
+
+* `deps_info['proguard_enabled"]`:
+True to indicate that ProGuard processing is enabled for this target.
+
+* `deps_info['proguard_configs"]`:
+A list of paths to ProGuard configuration files related to this library.
+
+* `deps_info['extra_classpath_jars']:
+For some Java related types, a list of extra `.jar` files to use at build time
+but not at runtime.
+
+* `deps_info['extra_classpath_interface_jars']:
+The interface jars corresponding to extra_classpath_jars.
+
+## <a name="target_java_binary">Target type `java_binary`</a>:
+
+This type corresponds to a Java binary, which is nothing more than a
+`java_library` target that also provides a main class name. It thus inherits
+all entries from the `java_library` type, and adds:
+
+* `deps_info['main_class']`:
+Name of the main Java class that serves as an entry point for the binary.
+
+* `deps_info['java_runtime_classpath']`:
+The classpath used when running a Java or Android binary. Essentially the
+collection of all `deps_info['jar_path']` entries for the target and all its
+dependencies.
+
+
+## <a name="target_junit_binary">Target type `junit_binary`</a>:
+
+A target type for JUnit-specific binaries. Identical to
+[`java_binary`](#target_java_binary) in the context of `.build_config` files,
+except the name.
+
+
+## <a name="target_java_annotation_processor">Target type \
+`java_annotation_processor`</a>:
+
+A target type for Java annotation processors. Identical to
+[`java_binary`](#target_java_binary) in the context of `.build_config` files,
+except the name, except that it requires a `deps_info['main_class']` entry.
+
+
+## <a name="target_android_apk">Target type `android_apk`</a>:
+
+Corresponds to an Android APK. Inherits from the
+[`java_binary`](#target_java_binary) type and adds:
+
+* `deps_info['apk_path']`:
+Path to the raw, unsigned, APK generated by this target.
+
+* `deps_info['incremental_apk_path']`:
+Path to the raw, unsigned, incremental APK generated by this target.
+
+* `deps_info['incremental_install_json_path']`:
+Path to the JSON file with per-apk details for incremental install.
+See `build/android/gyp/incremental/write_installer_json.py` for more
+details about its content.
+
+* `deps_info['dist_jar']['all_interface_jars']`:
+For `android_apk` and `dist_jar` targets, a list of all interface jar files
+that will be merged into the final `.jar` file for distribution.
+
+* `deps_info['final_dex']['path']`:
+Path to the final classes.dex file (or classes.zip in case of multi-dex)
+for this APK.
+
+* `deps_info['final_dex']['dependency_dex_files']`:
+The list of paths to all `deps_info['dex_path']` entries for all library
+dependencies for this APK.
+
+* `native['libraries']`
+List of native libraries for the primary ABI to be embedded in this APK.
+E.g. [ "libchrome.so" ] (i.e. this doesn't include any ABI sub-directory
+prefix).
+
+* `native['java_libraries_list']`
+The same list as `native['libraries']` as a string holding a Java source
+fragment, e.g. `"{\"chrome\"}"`, without any `lib` prefix, and `.so`
+suffix (as expected by `System.loadLibrary()`).
+
+* `native['second_abi_libraries']`
+List of native libraries for the secondary ABI to be embedded in this APK.
+Empty if only a single ABI is supported.
+
+* `native['uncompress_shared_libraries']`
+A boolean indicating whether native libraries are stored uncompressed in the
+APK.
+
+* `native['extra_shared_libraries']`
+A list of native libraries to store within the APK, in addition to those from
+`native['libraries']`. These correspond to things like the Chromium linker
+or instrumentation libraries.
+
+* `assets`
+A list of assets stored compressed in the APK. Each entry has the format
+`<source-path>:<destination-path>`, where `<source-path>` is relative to
+`$CHROMIUM_OUTPUT_DIR`, and `<destination-path>` is relative to `//assets/`
+within the APK.
+
+NOTE: Not to be confused with the `deps_info['assets']` dictionary that
+belongs to `android_assets` targets only.
+
+* `uncompressed_assets`
+A list of uncompressed assets stored in the APK. Each entry has the format
+`<source-path>:<destination-path>` too.
+
+* `compressed_locales_java_list`
+A string holding a Java source fragment that gives the list of locales stored
+compressed in the `//assets/` directory. E.g. `"{\"am\","\ar\",\"en-US\"}"`.
+Note that the files will be stored with the `.pak` extension (e.g.
+`//assets/en-US.pak`).
+
+* `uncompressed_locales_java_list`
+A string holding a Java source fragment that gives the list of locales stored
+uncompressed in the `//assets/stored-locales/` directory. These are used for
+the System WebView feature only. Note that the files will be stored with the
+`.pak` extension (e.g. `//assets/stored-locales/en-US.apk`).
+
+* `extra_android_manifests`
+A list of `deps_configs['android_manifest]` entries, for all resource
+dependencies for this target. I.e. a list of paths to manifest files for
+all the resources in this APK. These will be merged with the root manifest
+file to generate the final one used to build the APK.
+
+* `java_resources_jars`
+This is a list of `.jar` files whose *Java* resources should be included in
+the final APK. For example, this is used to copy the `.res` files from the
+EMMA Coverage tool. The copy will omit any `.class` file and the top-level
+`//meta-inf/` directory from the input jars. Everything else will be copied
+into the final APK as-is.
+
+NOTE: This has nothing to do with *Android* resources.
+
+* `jni['all_source']`
+The list of all `deps_info['java_sources_file']` entries for all library
+dependencies for this APK. Note: this is a list of files, where each file
+contains a list of Java source files. This is used for JNI registration.
+
+* `deps_info['proguard_all_configs']`:
+The collection of all 'deps_info['proguard_configs']` values from this target
+and all its dependencies.
+
+* `deps_info['proguard_classpath_jars']`:
+The collection of all 'deps_info['extra_classpath_jars']` values from all
+dependencies.
+
+* `deps_info['proguard_under_test_mapping']`:
+Applicable to apks with proguard enabled that have an apk_under_test. This is
+the path to the apk_under_test's output proguard .mapping file.
+
+## <a name="target_android_app_bundle_module">Target type \
+`android_app_bundle_module`</a>:
+
+Corresponds to an Android app bundle module. Very similar to an APK and
+inherits the same fields, except that this does not generate an installable
+file (see `android_app_bundle`), and for the following omitted fields:
+
+* `deps_info['apk_path']`, `deps_info['incremental_apk_path']` and
+ `deps_info['incremental_install_json_path']` are omitted.
+
+* top-level `dist_jar` is omitted as well.
+
+In addition to `android_apk` targets though come these new fields:
+
+* `deps_info['proto_resources_path']`:
+The path of an zip archive containing the APK's resources compiled to the
+protocol buffer format (instead of regular binary xml + resources.arsc).
+
+* `deps_info['module_rtxt_path']`:
+The path of the R.txt file generated when compiling the resources for the bundle
+module.
+
+* `deps_info['base_whitelist_rtxt_path']`:
+Optional path to an R.txt file used as a whitelist for base string resources.
+This means that any string resource listed in this file *and* in
+`deps_info['module_rtxt_path']` will end up in the base split APK of any
+`android_app_bundle` target that uses this target as its base module.
+
+This ensures that such localized strings are available to all bundle installs,
+even when language based splits are enabled (e.g. required for WebView strings
+inside the Monochrome bundle).
+
+
+## <a name="target_android_app_bundle">Target type `android_app_bundle`</a>
+
+This target type corresponds to an Android app bundle, and is built from one
+or more `android_app_bundle_module` targets listed as dependencies.
+
+
+## <a name="target_dist_aar">Target type `dist_aar`</a>:
+
+This type corresponds to a target used to generate an `.aar` archive for
+distribution. The archive's content is determined by the target's dependencies.
+
+This always has the following entries:
+
+ * `deps_info['supports_android']` (always True).
+ * `deps_info['requires_android']` (always True).
+ * `deps_info['proguard_configs']` (optional).
+
+
+## <a name="target_dist_jar">Target type `dist_jar`</a>:
+
+This type is similar to [`dist_aar`](#target_dist_aar) but is not
+Android-specific, and used to create a `.jar` file that can be later
+redistributed.
+
+This always has the following entries:
+
+ * `deps_info['proguard_enabled']` (False by default).
+ * `deps_info['proguard_configs']` (optional).
+ * `deps_info['supports_android']` (True by default).
+ * `deps_info['requires_android']` (False by default).
+
+
+
+## <a name="target_resource_rewriter">Target type `resource_rewriter`</a>:
+
+The ResourceRewriter Java class is in charge of rewriting resource IDs at
+runtime, for the benefit of the System WebView feature. This is a special
+target type for it.
+
+Its `.build_config` only keeps a list of dependencies in its
+`deps_info['deps_configs']` key.
+
+## <a name="dict_javac">The `deps_info['javac']` dictionary</a>:
+
+This dictionary appears in Java-related targets (e.g. `java_library`,
+`android_apk` and others), and contains information related to the compilation
+of Java sources, class files, and jars.
+
+* `javac['resource_packages']`
+For `java_library` targets, this is the list of package names for all resource
+dependencies for the current target. Order must match the one from
+`javac['srcjars']`. For other target types, this key does not exist.
+
+* `javac['classpath']`
+The classpath used to compile this target when annotation processors are
+present.
+
+* `javac['interface_classpath']`
+The classpath used to compile this target when annotation processors are
+not present. These are also always used to known when a target needs to be
+rebuilt.
+
+* `javac['processor_classpath']`
+The classpath listing the jars used for annotation processors. I.e. sent as
+`-processorpath` when invoking `javac`.
+
+* `javac['processor_classes']`
+The list of annotation processor main classes. I.e. sent as `-processor' when
+invoking `javac`.
+
+## <a name="android_app_bundle">Target type `android_app_bundle`</a>:
+
+This type corresponds to an Android app bundle (`.aab` file).
+
+--------------- END_MARKDOWN ---------------------------------------------------
+TODO(estevenson): Add docs for static library synchronized proguarding.
+"""
+
+import collections
+import itertools
+import json
+import optparse
+import os
+import sys
+import xml.dom.minidom
+
+from util import build_utils
+
+# Types that should never be used as a dependency of another build config.
+_ROOT_TYPES = ('android_apk', 'java_binary', 'java_annotation_processor',
+ 'junit_binary', 'resource_rewriter', 'android_app_bundle')
+# Types that should not allow code deps to pass through.
+_RESOURCE_TYPES = ('android_assets', 'android_resources', 'system_java_library')
+
+
+def _ExtractMarkdownDocumentation(input_text):
+ """Extract Markdown documentation from a list of input strings lines.
+
+ This generates a list of strings extracted from |input_text|, by looking
+ for '-- BEGIN_MARKDOWN --' and '-- END_MARKDOWN --' line markers."""
+ in_markdown = False
+ result = []
+ for line in input_text.splitlines():
+ if in_markdown:
+ if '-- END_MARKDOWN --' in line:
+ in_markdown = False
+ else:
+ result.append(line)
+ else:
+ if '-- BEGIN_MARKDOWN --' in line:
+ in_markdown = True
+
+ return result
+
+class AndroidManifest(object):
+ def __init__(self, path):
+ self.path = path
+ dom = xml.dom.minidom.parse(path)
+ manifests = dom.getElementsByTagName('manifest')
+ assert len(manifests) == 1
+ self.manifest = manifests[0]
+
+ def GetInstrumentationElements(self):
+ instrumentation_els = self.manifest.getElementsByTagName('instrumentation')
+ if len(instrumentation_els) == 0:
+ return None
+ return instrumentation_els
+
+ def CheckInstrumentationElements(self, expected_package):
+ instrs = self.GetInstrumentationElements()
+ if not instrs:
+ raise Exception('No <instrumentation> elements found in %s' % self.path)
+ for instr in instrs:
+ instrumented_package = instr.getAttributeNS(
+ 'http://schemas.android.com/apk/res/android', 'targetPackage')
+ if instrumented_package != expected_package:
+ raise Exception(
+ 'Wrong instrumented package. Expected %s, got %s'
+ % (expected_package, instrumented_package))
+
+ def GetPackageName(self):
+ return self.manifest.getAttribute('package')
+
+
+dep_config_cache = {}
+def GetDepConfig(path):
+ if not path in dep_config_cache:
+ with open(path) as jsonfile:
+ dep_config_cache[path] = json.load(jsonfile)['deps_info']
+ return dep_config_cache[path]
+
+
+def DepsOfType(wanted_type, configs):
+ return [c for c in configs if c['type'] == wanted_type]
+
+
+def GetAllDepsConfigsInOrder(deps_config_paths):
+ def GetDeps(path):
+ return GetDepConfig(path)['deps_configs']
+ return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps)
+
+
+class Deps(object):
+ def __init__(self, direct_deps_config_paths):
+ self.all_deps_config_paths = GetAllDepsConfigsInOrder(
+ direct_deps_config_paths)
+ self.direct_deps_configs = [
+ GetDepConfig(p) for p in direct_deps_config_paths]
+ self.all_deps_configs = [
+ GetDepConfig(p) for p in self.all_deps_config_paths]
+ self.direct_deps_config_paths = direct_deps_config_paths
+
+ def All(self, wanted_type=None):
+ if type is None:
+ return self.all_deps_configs
+ return DepsOfType(wanted_type, self.all_deps_configs)
+
+ def Direct(self, wanted_type=None):
+ if wanted_type is None:
+ return self.direct_deps_configs
+ return DepsOfType(wanted_type, self.direct_deps_configs)
+
+ def AllConfigPaths(self):
+ return self.all_deps_config_paths
+
+ def RemoveNonDirectDep(self, path):
+ if path in self.direct_deps_config_paths:
+ raise Exception('Cannot remove direct dep.')
+ self.all_deps_config_paths.remove(path)
+ self.all_deps_configs.remove(GetDepConfig(path))
+
+ def GradlePrebuiltJarPaths(self):
+ ret = []
+
+ def helper(cur):
+ for config in cur.Direct('java_library'):
+ if config['is_prebuilt'] or config['gradle_treat_as_prebuilt']:
+ if config['jar_path'] not in ret:
+ ret.append(config['jar_path'])
+
+ helper(self)
+ return ret
+
+ def GradleLibraryProjectDeps(self):
+ ret = []
+
+ def helper(cur):
+ for config in cur.Direct('java_library'):
+ if config['is_prebuilt']:
+ pass
+ elif config['gradle_treat_as_prebuilt']:
+ helper(Deps(config['deps_configs']))
+ elif config not in ret:
+ ret.append(config)
+
+ helper(self)
+ return ret
+
+
+def _MergeAssets(all_assets):
+ """Merges all assets from the given deps.
+
+ Returns:
+ A tuple of: (compressed, uncompressed, locale_paks)
+ |compressed| and |uncompressed| are lists of "srcPath:zipPath". srcPath is
+ the path of the asset to add, and zipPath is the location within the zip
+ (excluding assets/ prefix).
+ |locale_paks| is a set of all zipPaths that have been marked as
+ treat_as_locale_paks=true.
+ """
+ compressed = {}
+ uncompressed = {}
+ locale_paks = set()
+ for asset_dep in all_assets:
+ entry = asset_dep['assets']
+ disable_compression = entry.get('disable_compression')
+ treat_as_locale_paks = entry.get('treat_as_locale_paks')
+ dest_map = uncompressed if disable_compression else compressed
+ other_map = compressed if disable_compression else uncompressed
+ outputs = entry.get('outputs', [])
+ for src, dest in itertools.izip_longest(entry['sources'], outputs):
+ if not dest:
+ dest = os.path.basename(src)
+ # Merge so that each path shows up in only one of the lists, and that
+ # deps of the same target override previous ones.
+ other_map.pop(dest, 0)
+ dest_map[dest] = src
+ if treat_as_locale_paks:
+ locale_paks.add(dest)
+
+ def create_list(asset_map):
+ ret = ['%s:%s' % (src, dest) for dest, src in asset_map.iteritems()]
+ # Sort to ensure deterministic ordering.
+ ret.sort()
+ return ret
+
+ return create_list(compressed), create_list(uncompressed), locale_paks
+
+
+def _ResolveGroups(configs):
+ """Returns a list of configs with all groups inlined."""
+ ret = list(configs)
+ while True:
+ groups = DepsOfType('group', ret)
+ if not groups:
+ return ret
+ for config in groups:
+ index = ret.index(config)
+ expanded_configs = [GetDepConfig(p) for p in config['deps_configs']]
+ ret[index:index + 1] = expanded_configs
+
+
+def _DepsFromPaths(dep_paths, target_type, filter_root_targets=True):
+ """Resolves all groups and trims dependency branches that we never want.
+
+ E.g. When a resource or asset depends on an apk target, the intent is to
+ include the .apk as a resource/asset, not to have the apk's classpath added.
+ """
+ configs = [GetDepConfig(p) for p in dep_paths]
+ groups = DepsOfType('group', configs)
+ configs = _ResolveGroups(configs)
+ configs += groups
+ # Don't allow root targets to be considered as a dep.
+ if filter_root_targets:
+ configs = [c for c in configs if c['type'] not in _ROOT_TYPES]
+
+ # Don't allow java libraries to cross through assets/resources.
+ if target_type in _RESOURCE_TYPES:
+ configs = [c for c in configs if c['type'] in _RESOURCE_TYPES]
+
+ return Deps([c['path'] for c in configs])
+
+
+def _ExtractSharedLibsFromRuntimeDeps(runtime_deps_file):
+ ret = []
+ with open(runtime_deps_file) as f:
+ for line in f:
+ line = line.rstrip()
+ if not line.endswith('.so'):
+ continue
+ # Only unstripped .so files are listed in runtime deps.
+ # Convert to the stripped .so by going up one directory.
+ ret.append(os.path.normpath(line.replace('lib.unstripped/', '')))
+ ret.reverse()
+ return ret
+
+
+def _CreateJavaLibrariesList(library_paths):
+ """Returns a java literal array with the "base" library names:
+ e.g. libfoo.so -> foo
+ """
+ return ('{%s}' % ','.join(['"%s"' % s[3:-3] for s in library_paths]))
+
+
+def _CreateJavaLocaleListFromAssets(assets, locale_paks):
+ """Returns a java literal array from a list of locale assets.
+
+ Args:
+ assets: A list of all APK asset paths in the form 'src:dst'
+ locale_paks: A list of asset paths that correponds to the locale pak
+ files of interest. Each |assets| entry will have its 'dst' part matched
+ against it to determine if they are part of the result.
+ Returns:
+ A string that is a Java source literal array listing the locale names
+ of the corresponding asset files, without directory or .pak suffix.
+ E.g. '{"en-GB", "en-US", "es-ES", "fr", ... }'
+ """
+ assets_paths = [a.split(':')[1] for a in assets]
+ locales = [os.path.basename(a)[:-4] for a in assets_paths if a in locale_paks]
+ return '{%s}' % ','.join(['"%s"' % l for l in sorted(locales)])
+
+
+def main(argv):
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_option('--build-config', help='Path to build_config output.')
+ parser.add_option(
+ '--type',
+ help='Type of this target (e.g. android_library).')
+ parser.add_option(
+ '--deps-configs',
+ help='GN-list of dependent build_config files.')
+ parser.add_option(
+ '--annotation-processor-configs',
+ help='GN-list of build_config files for annotation processors.')
+ parser.add_option(
+ '--classpath-deps-configs',
+ help='GN-list of build_config files for libraries to include as '
+ 'build-time-only classpath.')
+
+ # android_resources options
+ parser.add_option('--srcjar', help='Path to target\'s resources srcjar.')
+ parser.add_option('--resources-zip', help='Path to target\'s resources zip.')
+ parser.add_option('--r-text', help='Path to target\'s R.txt file.')
+ parser.add_option('--package-name',
+ help='Java package name for these resources.')
+ parser.add_option('--android-manifest', help='Path to android manifest.')
+ parser.add_option('--resource-dirs', action='append', default=[],
+ help='GYP-list of resource dirs')
+
+ # android_assets options
+ parser.add_option('--asset-sources', help='List of asset sources.')
+ parser.add_option('--asset-renaming-sources',
+ help='List of asset sources with custom destinations.')
+ parser.add_option('--asset-renaming-destinations',
+ help='List of asset custom destinations.')
+ parser.add_option('--disable-asset-compression', action='store_true',
+ help='Whether to disable asset compression.')
+ parser.add_option('--treat-as-locale-paks', action='store_true',
+ help='Consider the assets as locale paks in BuildConfig.java')
+
+ # java library options
+ parser.add_option('--jar-path', help='Path to target\'s jar output.')
+ parser.add_option('--unprocessed-jar-path',
+ help='Path to the .jar to use for javac classpath purposes.')
+ parser.add_option('--interface-jar-path',
+ help='Path to the .interface.jar to use for javac classpath purposes.')
+ parser.add_option('--is-prebuilt', action='store_true',
+ help='Whether the jar was compiled or pre-compiled.')
+ parser.add_option('--java-sources-file', help='Path to .sources file')
+ parser.add_option('--bundled-srcjars',
+ help='GYP-list of .srcjars that have been included in this java_library.')
+ parser.add_option('--supports-android', action='store_true',
+ help='Whether this library supports running on the Android platform.')
+ parser.add_option('--requires-android', action='store_true',
+ help='Whether this library requires running on the Android platform.')
+ parser.add_option('--bypass-platform-checks', action='store_true',
+ help='Bypass checks for support/require Android platform.')
+ parser.add_option('--extra-classpath-jars',
+ help='GYP-list of .jar files to include on the classpath when compiling, '
+ 'but not to include in the final binary.')
+ parser.add_option('--gradle-treat-as-prebuilt', action='store_true',
+ help='Whether this library should be treated as a prebuilt library by '
+ 'generate_gradle.py.')
+ parser.add_option('--main-class',
+ help='Main class for java_binary or java_annotation_processor targets.')
+ parser.add_option('--java-resources-jar-path',
+ help='Path to JAR that contains java resources. Everything '
+ 'from this JAR except meta-inf/ content and .class files '
+ 'will be added to the final APK.')
+
+ # android library options
+ parser.add_option('--dex-path', help='Path to target\'s dex output.')
+
+ # native library options
+ parser.add_option('--shared-libraries-runtime-deps',
+ help='Path to file containing runtime deps for shared '
+ 'libraries.')
+ parser.add_option('--native-libs',
+ action='append',
+ help='GN-list of native libraries for primary '
+ 'android-abi. Can be specified multiple times.',
+ default=[])
+ parser.add_option('--secondary-abi-shared-libraries-runtime-deps',
+ help='Path to file containing runtime deps for secondary '
+ 'abi shared libraries.')
+ parser.add_option('--secondary-native-libs',
+ action='append',
+ help='GN-list of native libraries for secondary '
+ 'android-abi. Can be specified multiple times.',
+ default=[])
+ parser.add_option(
+ '--native-lib-placeholders',
+ action='append',
+ help='GN-list of native library placeholders to add.',
+ default=[])
+ parser.add_option(
+ '--secondary-native-lib-placeholders',
+ action='append',
+ help='GN-list of native library placeholders to add '
+ 'for the secondary android-abi.',
+ default=[])
+ parser.add_option('--uncompress-shared-libraries', default=False,
+ action='store_true',
+ help='Whether to store native libraries uncompressed')
+ # apk options
+ parser.add_option('--apk-path', help='Path to the target\'s apk output.')
+ parser.add_option('--incremental-apk-path',
+ help="Path to the target's incremental apk output.")
+ parser.add_option('--incremental-install-json-path',
+ help="Path to the target's generated incremental install "
+ "json.")
+ parser.add_option(
+ '--static-library-dependent-configs',
+ help='GN list of .build_configs of targets that use this target as a '
+ 'static library.')
+
+ parser.add_option('--tested-apk-config',
+ help='Path to the build config of the tested apk (for an instrumentation '
+ 'test apk).')
+ parser.add_option('--proguard-enabled', action='store_true',
+ help='Whether proguard is enabled for this apk or bundle module.')
+ parser.add_option('--proguard-configs',
+ help='GN-list of proguard flag files to use in final apk.')
+ parser.add_option('--proguard-mapping-path',
+ help='Path to jar created by ProGuard step')
+ parser.add_option('--fail',
+ help='GN-list of error message lines to fail with.')
+
+ parser.add_option('--final-dex-path',
+ help='Path to final input classes.dex (or classes.zip) to '
+ 'use in final apk.')
+ parser.add_option('--apk-proto-resources',
+ help='Path to resources compiled in protocol buffer format '
+ ' for this apk.')
+ parser.add_option(
+ '--module-rtxt-path',
+ help='Path to R.txt file for resources in a bundle module.')
+ parser.add_option(
+ '--base-whitelist-rtxt-path',
+ help='Path to R.txt file for the base resources whitelist.')
+
+ parser.add_option('--generate-markdown-format-doc', action='store_true',
+ help='Dump the Markdown .build_config format documentation '
+ 'then exit immediately.')
+
+ parser.add_option(
+ '--base-module-build-config',
+ help='Path to the base module\'s build config '
+ 'if this is a feature module.')
+
+ options, args = parser.parse_args(argv)
+
+ if args:
+ parser.error('No positional arguments should be given.')
+
+ if options.generate_markdown_format_doc:
+ doc_lines = _ExtractMarkdownDocumentation(__doc__)
+ for line in doc_lines:
+ print(line)
+ return 0
+
+ if options.fail:
+ parser.error('\n'.join(build_utils.ParseGnList(options.fail)))
+
+ jar_path_options = ['jar_path', 'unprocessed_jar_path', 'interface_jar_path']
+ required_options_map = {
+ 'android_apk': ['build_config', 'dex_path', 'final_dex_path'] + \
+ jar_path_options,
+ 'android_app_bundle_module': ['build_config', 'dex_path',
+ 'final_dex_path'] + jar_path_options,
+ 'android_assets': ['build_config'],
+ 'android_resources': ['build_config', 'resources_zip'],
+ 'dist_aar': ['build_config'],
+ 'dist_jar': ['build_config'],
+ 'group': ['build_config'],
+ 'java_annotation_processor': ['build_config', 'main_class'],
+ 'java_binary': ['build_config'],
+ 'java_library': ['build_config'] + jar_path_options,
+ 'junit_binary': ['build_config'],
+ 'resource_rewriter': ['build_config'],
+ 'system_java_library': ['build_config'],
+ 'android_app_bundle': ['build_config'],
+ }
+ required_options = required_options_map.get(options.type)
+ if not required_options:
+ raise Exception('Unknown type: <%s>' % options.type)
+
+ build_utils.CheckOptions(options, parser, required_options)
+
+ if options.type != 'android_app_bundle_module':
+ if options.apk_proto_resources:
+ raise Exception('--apk-proto-resources can only be used with '
+ '--type=android_app_bundle_module')
+ if options.module_rtxt_path:
+ raise Exception('--module-rxt-path can only be used with '
+ '--type=android_app_bundle_module')
+ if options.base_whitelist_rtxt_path:
+ raise Exception('--base-whitelist-rtxt-path can only be used with '
+ '--type=android_app_bundle_module')
+
+ is_apk_or_module_target = options.type in ('android_apk',
+ 'android_app_bundle_module')
+
+ if options.uncompress_shared_libraries:
+ if not is_apk_or_module_target:
+ raise Exception('--uncompressed-shared-libraries can only be used '
+ 'with --type=android_apk or '
+ '--type=android_app_bundle_module')
+
+ if options.jar_path and options.supports_android and not options.dex_path:
+ raise Exception('java_library that supports Android requires a dex path.')
+ if any(getattr(options, x) for x in jar_path_options):
+ for attr in jar_path_options:
+ if not getattr(options, attr):
+ raise('Expected %s to be set.' % attr)
+
+ if options.requires_android and not options.supports_android:
+ raise Exception(
+ '--supports-android is required when using --requires-android')
+
+ is_java_target = options.type in (
+ 'java_binary', 'junit_binary', 'java_annotation_processor',
+ 'java_library', 'android_apk', 'dist_aar', 'dist_jar',
+ 'system_java_library', 'android_app_bundle_module')
+
+ is_static_library_dex_provider_target = (
+ options.static_library_dependent_configs and options.proguard_enabled)
+ if is_static_library_dex_provider_target and options.type != 'android_apk':
+ raise Exception(
+ '--static-library-dependent-configs only supports --type=android_apk')
+
+ options.static_library_dependent_configs = build_utils.ParseGnList(
+ options.static_library_dependent_configs)
+ static_library_dependent_configs_by_path = {
+ p: GetDepConfig(p)
+ for p in options.static_library_dependent_configs
+ }
+
+ deps = _DepsFromPaths(
+ build_utils.ParseGnList(options.deps_configs), options.type)
+ processor_deps = _DepsFromPaths(
+ build_utils.ParseGnList(options.annotation_processor_configs or ''),
+ options.type, filter_root_targets=False)
+ classpath_deps = _DepsFromPaths(
+ build_utils.ParseGnList(options.classpath_deps_configs or ''),
+ options.type)
+
+ all_inputs = sorted(
+ set(deps.AllConfigPaths() + processor_deps.AllConfigPaths() +
+ classpath_deps.AllConfigPaths() +
+ list(static_library_dependent_configs_by_path)))
+
+ system_library_deps = deps.Direct('system_java_library')
+ direct_library_deps = deps.Direct('java_library')
+ direct_group_deps = deps.Direct('group')
+ all_group_deps = deps.All('group')
+ all_library_deps = deps.All('java_library')
+ all_resources_deps = deps.All('android_resources')
+ all_classpath_library_deps = classpath_deps.All('java_library')
+
+ base_module_build_config = None
+ if options.base_module_build_config:
+ with open(options.base_module_build_config, 'r') as f:
+ base_module_build_config = json.load(f)
+
+ # Initialize some common config.
+ # Any value that needs to be queryable by dependents must go within deps_info.
+ config = {
+ 'deps_info': {
+ 'name': os.path.basename(options.build_config),
+ 'path': options.build_config,
+ 'type': options.type,
+ 'deps_configs': deps.direct_deps_config_paths
+ },
+ # Info needed only by generate_gradle.py.
+ 'gradle': {}
+ }
+ deps_info = config['deps_info']
+ gradle = config['gradle']
+
+ if options.type == 'android_apk' and options.tested_apk_config:
+ tested_apk_deps = Deps([options.tested_apk_config])
+ tested_apk_config = tested_apk_deps.Direct()[0]
+ tested_apk_resources_deps = tested_apk_deps.All('android_resources')
+ gradle['apk_under_test'] = tested_apk_config['name']
+ all_resources_deps = [
+ d for d in all_resources_deps if not d in tested_apk_resources_deps]
+
+ # Required for generating gradle files.
+ if options.type == 'java_library':
+ deps_info['is_prebuilt'] = bool(options.is_prebuilt)
+ deps_info['gradle_treat_as_prebuilt'] = options.gradle_treat_as_prebuilt
+
+ if options.android_manifest:
+ deps_info['android_manifest'] = options.android_manifest
+
+ if is_java_target:
+ if options.java_sources_file:
+ deps_info['java_sources_file'] = options.java_sources_file
+ if options.bundled_srcjars:
+ gradle['bundled_srcjars'] = (
+ build_utils.ParseGnList(options.bundled_srcjars))
+
+ gradle['dependent_android_projects'] = []
+ gradle['dependent_java_projects'] = []
+ gradle['dependent_prebuilt_jars'] = deps.GradlePrebuiltJarPaths()
+
+ if options.main_class:
+ deps_info['main_class'] = options.main_class
+
+ for c in deps.GradleLibraryProjectDeps():
+ if c['requires_android']:
+ gradle['dependent_android_projects'].append(c['path'])
+ else:
+ gradle['dependent_java_projects'].append(c['path'])
+
+ # TODO(tiborg): Remove creation of JNI info for type group and java_library
+ # once we can generate the JNI registration based on APK / module targets as
+ # opposed to groups and libraries.
+ if is_apk_or_module_target or options.type in (
+ 'group', 'java_library', 'junit_binary'):
+ deps_info['jni'] = {}
+ all_java_sources = [c['java_sources_file'] for c in all_library_deps
+ if 'java_sources_file' in c]
+ if options.java_sources_file:
+ all_java_sources.append(options.java_sources_file)
+
+ if options.apk_proto_resources:
+ deps_info['proto_resources_path'] = options.apk_proto_resources
+
+ if options.module_rtxt_path:
+ deps_info['module_rtxt_path'] = options.module_rtxt_path
+ if options.base_whitelist_rtxt_path:
+ deps_info['base_whitelist_rtxt_path'] = options.base_whitelist_rtxt_path
+ else:
+ # Ensure there is an entry, even if it is empty, for modules
+ # that don't need such a whitelist.
+ deps_info['base_whitelist_rtxt_path'] = ''
+
+ if is_java_target:
+ deps_info['requires_android'] = bool(options.requires_android)
+ deps_info['supports_android'] = bool(options.supports_android)
+
+ if not options.bypass_platform_checks:
+ deps_require_android = (all_resources_deps +
+ [d['name'] for d in all_library_deps if d['requires_android']])
+ deps_not_support_android = (
+ [d['name'] for d in all_library_deps if not d['supports_android']])
+
+ if deps_require_android and not options.requires_android:
+ raise Exception('Some deps require building for the Android platform: '
+ + str(deps_require_android))
+
+ if deps_not_support_android and options.supports_android:
+ raise Exception('Not all deps support the Android platform: '
+ + str(deps_not_support_android))
+
+ if is_java_target:
+ # Classpath values filled in below (after applying tested_apk_config).
+ config['javac'] = {}
+ if options.jar_path:
+ deps_info['jar_path'] = options.jar_path
+ deps_info['unprocessed_jar_path'] = options.unprocessed_jar_path
+ deps_info['interface_jar_path'] = options.interface_jar_path
+ if options.dex_path:
+ deps_info['dex_path'] = options.dex_path
+ if options.type == 'android_apk':
+ deps_info['apk_path'] = options.apk_path
+ deps_info['incremental_apk_path'] = options.incremental_apk_path
+ deps_info['incremental_install_json_path'] = (
+ options.incremental_install_json_path)
+
+ if options.type == 'android_assets':
+ all_asset_sources = []
+ if options.asset_renaming_sources:
+ all_asset_sources.extend(
+ build_utils.ParseGnList(options.asset_renaming_sources))
+ if options.asset_sources:
+ all_asset_sources.extend(build_utils.ParseGnList(options.asset_sources))
+
+ deps_info['assets'] = {
+ 'sources': all_asset_sources
+ }
+ if options.asset_renaming_destinations:
+ deps_info['assets']['outputs'] = (
+ build_utils.ParseGnList(options.asset_renaming_destinations))
+ if options.disable_asset_compression:
+ deps_info['assets']['disable_compression'] = True
+ if options.treat_as_locale_paks:
+ deps_info['assets']['treat_as_locale_paks'] = True
+
+ if options.type == 'android_resources':
+ deps_info['resources_zip'] = options.resources_zip
+ if options.srcjar:
+ deps_info['srcjar'] = options.srcjar
+ if options.android_manifest:
+ manifest = AndroidManifest(options.android_manifest)
+ deps_info['package_name'] = manifest.GetPackageName()
+ if options.package_name:
+ deps_info['package_name'] = options.package_name
+ if options.r_text:
+ deps_info['r_text'] = options.r_text
+
+ deps_info['resources_dirs'] = []
+ if options.resource_dirs:
+ for gyp_list in options.resource_dirs:
+ deps_info['resources_dirs'].extend(build_utils.ParseGnList(gyp_list))
+
+ if options.requires_android and is_java_target:
+ # Lint all resources that are not already linted by a dependent library.
+ owned_resource_dirs = set()
+ owned_resource_zips = set()
+ owned_resource_srcjars = set()
+ for c in all_resources_deps:
+ # Always use resources_dirs in favour of resources_zips so that lint error
+ # messages have paths that are closer to reality (and to avoid needing to
+ # extract during lint).
+ if c['resources_dirs']:
+ owned_resource_dirs.update(c['resources_dirs'])
+ else:
+ owned_resource_zips.add(c['resources_zip'])
+ srcjar = c.get('srcjar')
+ if srcjar:
+ owned_resource_srcjars.add(srcjar)
+
+ for c in itertools.chain(all_library_deps, all_classpath_library_deps):
+ if c['requires_android']:
+ owned_resource_dirs.difference_update(c['owned_resources_dirs'])
+ owned_resource_zips.difference_update(c['owned_resources_zips'])
+ # Many .aar files include R.class files in them, as it makes it easier
+ # for IDEs to resolve symbols. However, including them is not required
+ # and not all prebuilts do. Rather than try to detect their presense,
+ # just assume they are not there. The only consequence is redundant
+ # compilation of the R.class.
+ if not c['is_prebuilt']:
+ owned_resource_srcjars.difference_update(c['owned_resource_srcjars'])
+ deps_info['owned_resources_dirs'] = sorted(owned_resource_dirs)
+ deps_info['owned_resources_zips'] = sorted(owned_resource_zips)
+ deps_info['owned_resource_srcjars'] = sorted(owned_resource_srcjars)
+
+ if options.type == 'java_library':
+ # Used to strip out R.class for android_prebuilt()s.
+ config['javac']['resource_packages'] = [
+ c['package_name'] for c in all_resources_deps if 'package_name' in c]
+
+ if options.type in (
+ 'android_resources', 'android_apk', 'junit_binary', 'resource_rewriter',
+ 'dist_aar', 'android_app_bundle_module'):
+ config['resources'] = {}
+
+ dependency_zips = [
+ c['resources_zip'] for c in all_resources_deps if c['resources_zip']
+ ]
+ extra_package_names = []
+ extra_r_text_files = []
+
+ if options.type != 'android_resources':
+ extra_package_names = [
+ c['package_name'] for c in all_resources_deps if 'package_name' in c]
+ extra_r_text_files = [
+ c['r_text'] for c in all_resources_deps if 'r_text' in c]
+
+ # For feature modules, remove any resources that already exist in the base
+ # module.
+ if base_module_build_config:
+ dependency_zips = [
+ c for c in dependency_zips
+ if c not in base_module_build_config['resources']['dependency_zips']
+ ]
+ extra_package_names = [
+ c for c in extra_package_names if c not in
+ base_module_build_config['resources']['extra_package_names']
+ ]
+ extra_r_text_files = [
+ c for c in extra_r_text_files if c not in
+ base_module_build_config['resources']['extra_r_text_files']
+ ]
+
+ config['resources']['dependency_zips'] = dependency_zips
+ config['resources']['extra_package_names'] = extra_package_names
+ config['resources']['extra_r_text_files'] = extra_r_text_files
+ if options.type == 'android_apk' and options.tested_apk_config:
+ config['resources']['arsc_package_name'] = (
+ tested_apk_config['package_name'])
+
+ if is_apk_or_module_target:
+ deps_dex_files = [c['dex_path'] for c in all_library_deps]
+
+ if options.type == 'group':
+ if options.extra_classpath_jars:
+ # These are .jars to add to javac classpath but not to runtime classpath.
+ extra_jars = build_utils.ParseGnList(options.extra_classpath_jars)
+ deps_info['extra_classpath_jars'] = extra_jars
+ deps_info['extra_classpath_interface_jars'] = extra_jars
+
+ if is_java_target:
+ # The classpath used to compile this target when annotation processors are
+ # present.
+ javac_classpath = [
+ c['unprocessed_jar_path'] for c in direct_library_deps]
+ # The classpath used to compile this target when annotation processors are
+ # not present. These are also always used to know when a target needs to be
+ # rebuilt.
+ javac_interface_classpath = [
+ c['interface_jar_path'] for c in direct_library_deps]
+ # The classpath used for error prone.
+ javac_full_interface_classpath = [
+ c['interface_jar_path'] for c in all_library_deps]
+ # The classpath used for bytecode-rewritting.
+ javac_full_classpath = [
+ c['unprocessed_jar_path'] for c in all_library_deps]
+
+ for dep in direct_group_deps:
+ javac_classpath.extend(dep.get('extra_classpath_jars', []))
+ javac_interface_classpath.extend(
+ dep.get('extra_classpath_interface_jars', []))
+ for dep in all_group_deps:
+ javac_full_classpath.extend(dep.get('extra_classpath_jars', []))
+ javac_full_interface_classpath.extend(
+ dep.get('extra_classpath_interface_jars', []))
+
+ # Deps to add to the compile-time classpath (but not the runtime classpath).
+ # TODO(agrieve): Might be less confusing to fold these into bootclasspath.
+ javac_extra_jars = [
+ c['unprocessed_jar_path'] for c in classpath_deps.All('java_library')
+ ]
+ extra_jars = [c['jar_path'] for c in classpath_deps.All('java_library')]
+ interface_extra_jars = [
+ c['interface_jar_path'] for c in classpath_deps.All('java_library')
+ ]
+
+ # These are jars specified by input_jars_paths that almost never change.
+ # Just add them directly to all the *extra_jars.
+ if options.extra_classpath_jars:
+ # These are .jars to add to javac classpath but not to runtime classpath.
+ javac_extra_jars.extend(
+ build_utils.ParseGnList(options.extra_classpath_jars))
+ extra_jars.extend(build_utils.ParseGnList(options.extra_classpath_jars))
+ interface_extra_jars.extend(
+ build_utils.ParseGnList(options.extra_classpath_jars))
+
+ if extra_jars:
+ deps_info['extra_classpath_jars'] = extra_jars
+
+ if interface_extra_jars:
+ deps_info['extra_classpath_interface_jars'] = interface_extra_jars
+
+ javac_extra_jars = [p for p in javac_extra_jars if p not in javac_classpath]
+ javac_classpath.extend(javac_extra_jars)
+ javac_full_classpath.extend(
+ p for p in javac_extra_jars if p not in javac_full_classpath)
+
+ interface_extra_jars = [
+ p for p in interface_extra_jars if p not in javac_interface_classpath
+ ]
+ javac_interface_classpath.extend(interface_extra_jars)
+ javac_full_interface_classpath.extend(
+ p for p in interface_extra_jars
+ if p not in javac_full_interface_classpath)
+
+ if is_java_target or options.type == 'android_app_bundle':
+ # The classpath to use to run this target (or as an input to ProGuard).
+ java_full_classpath = []
+ if is_java_target and options.jar_path:
+ java_full_classpath.append(options.jar_path)
+ java_full_classpath.extend(c['jar_path'] for c in all_library_deps)
+ if options.type == 'android_app_bundle':
+ for d in deps.Direct('android_app_bundle_module'):
+ java_full_classpath.extend(
+ c for c in d.get('java_runtime_classpath', [])
+ if c not in java_full_classpath)
+
+ all_configs = build_utils.ParseGnList(options.proguard_configs)
+ deps_info['proguard_configs'] = list(all_configs)
+ extra_jars = []
+
+ if is_static_library_dex_provider_target:
+ # Map classpath entries to configs that include them in their classpath.
+ configs_by_classpath_entry = collections.defaultdict(list)
+ for config_path, dep_config in (
+ static_library_dependent_configs_by_path.iteritems()):
+ all_configs.extend(dep_config['proguard_all_configs'])
+ extra_jars.extend(dep_config['proguard_classpath_jars'])
+ all_java_sources.extend(dep_config['jni']['all_source'])
+ for cp_entry in dep_config['java_runtime_classpath']:
+ # The APK Java targets for the static library dependent targets will
+ # have some of the same classes (R.java) due to shared resource
+ # dependencies. To avoid Proguard failures due to duplicate classes, we
+ # merge the APK jars into the static library's jar_path as a
+ # preprocessing build step.
+ if cp_entry != dep_config['jar_path']:
+ configs_by_classpath_entry[cp_entry].append(config_path)
+
+ for cp_entry in java_full_classpath:
+ configs_by_classpath_entry[cp_entry].append(options.build_config)
+
+ # Map configs to classpath entries that should be included in their final
+ # dex.
+ classpath_entries_by_owning_config = collections.defaultdict(list)
+ for cp_entry, candidate_configs in configs_by_classpath_entry.iteritems():
+ config_path = (candidate_configs[0]
+ if len(candidate_configs) == 1 else options.build_config)
+ classpath_entries_by_owning_config[config_path].append(cp_entry)
+ java_full_classpath.append(cp_entry)
+
+ classpath_entries_by_owning_config[options.build_config].append(
+ deps_info['jar_path'])
+
+ java_full_classpath = sorted(set(java_full_classpath))
+ deps_info['static_library_dependent_classpath_configs'] = {
+ path: sorted(set(classpath))
+ for path, classpath in classpath_entries_by_owning_config.iteritems()
+ }
+
+ if is_apk_or_module_target or options.type in ('group', 'java_library',
+ 'junit_binary'):
+ deps_info['jni']['all_source'] = sorted(set(all_java_sources))
+
+ system_jars = [c['jar_path'] for c in system_library_deps]
+ system_interface_jars = [c['interface_jar_path'] for c in system_library_deps]
+ if system_library_deps:
+ config['android'] = {}
+ config['android']['sdk_interface_jars'] = system_interface_jars
+ config['android']['sdk_jars'] = system_jars
+
+ if options.type in ('android_apk', 'dist_aar',
+ 'dist_jar', 'android_app_bundle_module', 'android_app_bundle'):
+ for c in all_library_deps:
+ all_configs.extend(p for p in c.get('proguard_configs', []))
+ extra_jars.extend(p for p in c.get('extra_classpath_jars', []))
+ for c in all_group_deps:
+ extra_jars.extend(p for p in c.get('extra_classpath_jars', []))
+ if options.type == 'android_app_bundle':
+ for c in deps.Direct('android_app_bundle_module'):
+ all_configs.extend(p for p in c.get('proguard_configs', []))
+ if options.type == 'android_app_bundle':
+ for d in deps.Direct('android_app_bundle_module'):
+ extra_jars.extend(
+ c for c in d.get('proguard_classpath_jars', [])
+ if c not in extra_jars)
+
+ if options.type == 'android_app_bundle':
+ deps_proguard_enabled = []
+ deps_proguard_disabled = []
+ for d in deps.Direct('android_app_bundle_module'):
+ if not d['java_runtime_classpath']:
+ # We don't care about modules that have no Java code for proguarding.
+ continue
+ if d['proguard_enabled']:
+ deps_proguard_enabled.append(d['name'])
+ else:
+ deps_proguard_disabled.append(d['name'])
+ if deps_proguard_enabled and deps_proguard_disabled:
+ raise Exception('Deps %s have proguard enabled while deps %s have '
+ 'proguard disabled' % (deps_proguard_enabled,
+ deps_proguard_disabled))
+ else:
+ deps_info['proguard_enabled'] = bool(options.proguard_enabled)
+ if options.proguard_mapping_path:
+ deps_info['proguard_mapping_path'] = options.proguard_mapping_path
+
+ # The java code for an instrumentation test apk is assembled differently for
+ # ProGuard vs. non-ProGuard.
+ #
+ # Without ProGuard: Each library's jar is dexed separately and then combined
+ # into a single classes.dex. A test apk will include all dex files not already
+ # present in the apk-under-test. At runtime all test code lives in the test
+ # apk, and the program code lives in the apk-under-test.
+ #
+ # With ProGuard: Each library's .jar file is fed into ProGuard, which outputs
+ # a single .jar, which is then dexed into a classes.dex. A test apk includes
+ # all jar files from the program and the tests because having them separate
+ # doesn't work with ProGuard's whole-program optimizations. Although the
+ # apk-under-test still has all of its code in its classes.dex, none of it is
+ # used at runtime because the copy of it within the test apk takes precidence.
+ if options.type == 'android_apk' and options.tested_apk_config:
+ if tested_apk_config['proguard_enabled']:
+ assert options.proguard_enabled, ('proguard must be enabled for '
+ 'instrumentation apks if it\'s enabled for the tested apk.')
+ # Mutating lists, so no need to explicitly re-assign to dict.
+ all_configs.extend(p for p in tested_apk_config['proguard_all_configs'])
+ extra_jars.extend(p for p in tested_apk_config['proguard_classpath_jars'])
+ tested_apk_config = GetDepConfig(options.tested_apk_config)
+ deps_info['proguard_under_test_mapping'] = (
+ tested_apk_config['proguard_mapping_path'])
+ elif options.proguard_enabled:
+ # Not sure why you'd want to proguard the test apk when the under-test apk
+ # is not proguarded, but it's easy enough to support.
+ deps_info['proguard_under_test_mapping'] = ''
+
+ expected_tested_package = tested_apk_config['package_name']
+ AndroidManifest(options.android_manifest).CheckInstrumentationElements(
+ expected_tested_package)
+
+ # Add all tested classes to the test's classpath to ensure that the test's
+ # java code is a superset of the tested apk's java code
+ java_full_classpath.extend(
+ p for p in tested_apk_config['java_runtime_classpath']
+ if p not in java_full_classpath)
+ # Include in the classpath classes that are added directly to the apk under
+ # test (those that are not a part of a java_library).
+ javac_classpath.append(tested_apk_config['unprocessed_jar_path'])
+ javac_full_classpath.append(tested_apk_config['unprocessed_jar_path'])
+ javac_interface_classpath.append(tested_apk_config['interface_jar_path'])
+ javac_full_interface_classpath.append(
+ tested_apk_config['interface_jar_path'])
+ javac_full_interface_classpath.extend(
+ p for p in tested_apk_config['javac_full_interface_classpath']
+ if p not in javac_full_interface_classpath)
+ javac_full_classpath.extend(
+ p for p in tested_apk_config['javac_full_classpath']
+ if p not in javac_full_classpath)
+
+ # Exclude dex files from the test apk that exist within the apk under test.
+ # TODO(agrieve): When proguard is enabled, this filtering logic happens
+ # within proguard_util.py. Move the logic for the proguard case into
+ # here as well.
+ tested_apk_library_deps = tested_apk_deps.All('java_library')
+ tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps]
+ deps_dex_files = [
+ p for p in deps_dex_files if not p in tested_apk_deps_dex_files]
+
+ if options.type in ('android_apk', 'dist_aar', 'dist_jar',
+ 'android_app_bundle_module', 'android_app_bundle'):
+ deps_info['proguard_all_configs'] = sorted(set(all_configs))
+ deps_info['proguard_classpath_jars'] = sorted(set(extra_jars))
+
+ # Dependencies for the final dex file of an apk.
+ if is_apk_or_module_target:
+ config['final_dex'] = {}
+ dex_config = config['final_dex']
+ dex_config['dependency_dex_files'] = deps_dex_files
+ dex_config['path'] = options.final_dex_path
+
+ if is_java_target:
+ config['javac']['classpath'] = javac_classpath
+ config['javac']['interface_classpath'] = javac_interface_classpath
+ # Direct() will be of type 'java_annotation_processor'.
+ config['javac']['processor_classpath'] = [
+ c['jar_path'] for c in processor_deps.Direct() if c.get('jar_path')] + [
+ c['jar_path'] for c in processor_deps.All('java_library')]
+ config['javac']['processor_classes'] = [
+ c['main_class'] for c in processor_deps.Direct()]
+ deps_info['javac_full_classpath'] = javac_full_classpath
+ deps_info['javac_full_interface_classpath'] = javac_full_interface_classpath
+ elif options.type == 'android_app_bundle':
+ # bundles require javac_full_classpath to create .aab.jar.info.
+ javac_full_classpath = set()
+ for d in deps.Direct('android_app_bundle_module'):
+ javac_full_classpath.update(p for p in d['javac_full_classpath'])
+ javac_full_classpath.add(d['jar_path'])
+ deps_info['javac_full_classpath'] = sorted(javac_full_classpath)
+
+ if options.type in ('android_apk', 'dist_jar', 'java_binary', 'junit_binary',
+ 'android_app_bundle_module', 'android_app_bundle'):
+ deps_info['java_runtime_classpath'] = java_full_classpath
+
+ if options.type in ('android_apk', 'dist_jar'):
+ all_interface_jars = []
+ if options.jar_path:
+ all_interface_jars.append(options.interface_jar_path)
+ all_interface_jars.extend(c['interface_jar_path'] for c in all_library_deps)
+
+ config['dist_jar'] = {
+ 'all_interface_jars': all_interface_jars,
+ }
+
+ if is_apk_or_module_target:
+ manifest = AndroidManifest(options.android_manifest)
+ deps_info['package_name'] = manifest.GetPackageName()
+ if not options.tested_apk_config and manifest.GetInstrumentationElements():
+ # This must then have instrumentation only for itself.
+ manifest.CheckInstrumentationElements(manifest.GetPackageName())
+
+ library_paths = []
+ java_libraries_list = None
+ if options.shared_libraries_runtime_deps:
+ library_paths = _ExtractSharedLibsFromRuntimeDeps(
+ options.shared_libraries_runtime_deps)
+ java_libraries_list = _CreateJavaLibrariesList(library_paths)
+ all_inputs.append(options.shared_libraries_runtime_deps)
+
+ secondary_abi_library_paths = []
+ if options.secondary_abi_shared_libraries_runtime_deps:
+ secondary_abi_library_paths = _ExtractSharedLibsFromRuntimeDeps(
+ options.secondary_abi_shared_libraries_runtime_deps)
+ all_inputs.append(options.secondary_abi_shared_libraries_runtime_deps)
+
+ secondary_abi_library_paths.extend(
+ build_utils.ParseGnList(options.secondary_native_libs))
+
+ native_library_placeholder_paths = build_utils.ParseGnList(
+ options.native_lib_placeholders)
+
+ secondary_native_library_placeholder_paths = build_utils.ParseGnList(
+ options.secondary_native_lib_placeholders)
+
+ extra_shared_libraries = build_utils.ParseGnList(options.native_libs)
+
+ config['native'] = {
+ 'libraries':
+ library_paths,
+ 'native_library_placeholders':
+ native_library_placeholder_paths,
+ 'secondary_abi_libraries':
+ secondary_abi_library_paths,
+ 'secondary_native_library_placeholders':
+ secondary_native_library_placeholder_paths,
+ 'java_libraries_list':
+ java_libraries_list,
+ 'uncompress_shared_libraries':
+ options.uncompress_shared_libraries,
+ 'extra_shared_libraries':
+ extra_shared_libraries,
+ }
+ config['assets'], config['uncompressed_assets'], locale_paks = (
+ _MergeAssets(deps.All('android_assets')))
+ config['compressed_locales_java_list'] = _CreateJavaLocaleListFromAssets(
+ config['assets'], locale_paks)
+ config['uncompressed_locales_java_list'] = _CreateJavaLocaleListFromAssets(
+ config['uncompressed_assets'], locale_paks)
+
+ config['extra_android_manifests'] = filter(None, (
+ d.get('android_manifest') for d in all_resources_deps))
+
+ # Collect java resources
+ java_resources_jars = [d['java_resources_jar'] for d in all_library_deps
+ if 'java_resources_jar' in d]
+ if options.tested_apk_config:
+ tested_apk_resource_jars = [d['java_resources_jar']
+ for d in tested_apk_library_deps
+ if 'java_resources_jar' in d]
+ java_resources_jars = [jar for jar in java_resources_jars
+ if jar not in tested_apk_resource_jars]
+ config['java_resources_jars'] = java_resources_jars
+
+ if options.java_resources_jar_path:
+ deps_info['java_resources_jar'] = options.java_resources_jar_path
+
+ build_utils.WriteJson(config, options.build_config, only_if_changed=True)
+
+ if options.depfile:
+ build_utils.WriteDepfile(options.depfile, options.build_config, all_inputs,
+ add_pydeps=False) # pydeps listed in GN.
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/gyp/write_build_config.pydeps b/deps/v8/build/android/gyp/write_build_config.pydeps
new file mode 100644
index 0000000000..e317c47cae
--- /dev/null
+++ b/deps/v8/build/android/gyp/write_build_config.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_build_config.pydeps build/android/gyp/write_build_config.py
+../../gn_helpers.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+write_build_config.py
diff --git a/deps/v8/build/android/gyp/write_ordered_libraries.py b/deps/v8/build/android/gyp/write_ordered_libraries.py
new file mode 100755
index 0000000000..61da64ce26
--- /dev/null
+++ b/deps/v8/build/android/gyp/write_ordered_libraries.py
@@ -0,0 +1,117 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes dependency ordered list of native libraries.
+
+The list excludes any Android system libraries, as those are not bundled with
+the APK.
+
+This list of libraries is used for several steps of building an APK.
+In the component build, the --input-libraries only needs to be the top-level
+library (i.e. libcontent_shell_content_view). This will then use readelf to
+inspect the shared libraries and determine the full list of (non-system)
+libraries that should be included in the APK.
+"""
+
+# TODO(cjhopman): See if we can expose the list of library dependencies from
+# gyp, rather than calculating it ourselves.
+# http://crbug.com/225558
+
+import optparse
+import os
+import re
+import sys
+
+from util import build_utils
+
+_readelf = None
+
+_library_re = re.compile(
+ '.*NEEDED.*Shared library: \[(?P<library_name>.+)\]')
+
+_library_path_map = {}
+
+
+def SetReadelfPath(path):
+ global _readelf
+ _readelf = path
+
+
+def CallReadElf(library_or_executable):
+ assert _readelf is not None
+ readelf_cmd = [_readelf, '-d', library_or_executable]
+ return build_utils.CheckOutput(readelf_cmd)
+
+
+def GetDependencies(library_or_executable):
+ elf = CallReadElf(library_or_executable)
+ deps = []
+ for l in _library_re.findall(elf):
+ p = _library_path_map.get(l)
+ if p is not None:
+ deps.append(p)
+ return deps
+
+
+def GetSortedTransitiveDependencies(libraries):
+ """Returns all transitive library dependencies in dependency order."""
+ return build_utils.GetSortedTransitiveDependencies(
+ libraries, GetDependencies)
+
+
+def main():
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_option('--readelf', help='Path to the readelf binary.')
+ parser.add_option('--runtime-deps',
+ help='A file created for the target using write_runtime_deps.')
+ parser.add_option('--exclude-shared-libraries',
+ help='List of shared libraries to exclude from the output.')
+ parser.add_option('--output', help='Path to the generated .json file.')
+
+ options, _ = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:]))
+
+ SetReadelfPath(options.readelf)
+
+ unsorted_lib_paths = []
+ exclude_shared_libraries = []
+ if options.exclude_shared_libraries:
+ exclude_shared_libraries = options.exclude_shared_libraries.split(',')
+ for f in open(options.runtime_deps):
+ f = f[:-1]
+ if f.endswith('.so'):
+ p = f.replace('lib.unstripped/', '')
+ if os.path.basename(p) in exclude_shared_libraries:
+ continue
+ unsorted_lib_paths.append(p)
+ _library_path_map[os.path.basename(p)] = p
+
+ lib_paths = GetSortedTransitiveDependencies(unsorted_lib_paths)
+
+ libraries = [os.path.basename(l) for l in lib_paths]
+
+ # Convert to "base" library names: e.g. libfoo.so -> foo
+ java_libraries_list = (
+ '{%s}' % ','.join(['"%s"' % s[3:-3] for s in libraries]))
+
+ out_json = {
+ 'libraries': libraries,
+ 'lib_paths': lib_paths,
+ 'java_libraries_list': java_libraries_list
+ }
+ build_utils.WriteJson(
+ out_json,
+ options.output,
+ only_if_changed=True)
+
+ if options.depfile:
+ build_utils.WriteDepfile(
+ options.depfile, options.output, lib_paths, add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/gyp/write_ordered_libraries.pydeps b/deps/v8/build/android/gyp/write_ordered_libraries.pydeps
new file mode 100644
index 0000000000..c2ed1fee36
--- /dev/null
+++ b/deps/v8/build/android/gyp/write_ordered_libraries.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_ordered_libraries.pydeps build/android/gyp/write_ordered_libraries.py
+../../gn_helpers.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+write_ordered_libraries.py
diff --git a/deps/v8/build/android/gyp/zip.py b/deps/v8/build/android/gyp/zip.py
new file mode 100755
index 0000000000..b9503960fa
--- /dev/null
+++ b/deps/v8/build/android/gyp/zip.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Archives a set of files."""
+
+import argparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser(args)
+ parser.add_argument('--input-files', help='GN-list of files to zip.')
+ parser.add_argument(
+ '--input-files-base-dir',
+ help='Paths in the archive will be relative to this directory')
+ parser.add_argument('--input-zips', help='GN-list of zips to merge.')
+ parser.add_argument(
+ '--input-zips-excluded-globs',
+ help='GN-list of globs for paths to exclude.')
+ parser.add_argument('--output', required=True, help='Path to output archive.')
+ compress_group = parser.add_mutually_exclusive_group()
+ compress_group.add_argument(
+ '--compress', action='store_true', help='Compress entries')
+ compress_group.add_argument(
+ '--no-compress',
+ action='store_false',
+ dest='compress',
+ help='Do not compress entries')
+ build_utils.AddDepfileOption(parser)
+ options = parser.parse_args(args)
+
+ with build_utils.AtomicOutput(options.output) as f:
+ with zipfile.ZipFile(f.name, 'w') as out_zip:
+ depfile_deps = None
+ if options.input_files:
+ files = build_utils.ParseGnList(options.input_files)
+ build_utils.DoZip(
+ files,
+ out_zip,
+ base_dir=options.input_files_base_dir,
+ compress_fn=lambda _: options.compress)
+
+ if options.input_zips:
+ files = build_utils.ParseGnList(options.input_zips)
+ depfile_deps = files
+ path_transform = None
+ if options.input_zips_excluded_globs:
+ globs = build_utils.ParseGnList(options.input_zips_excluded_globs)
+ path_transform = (
+ lambda p: None if build_utils.MatchesGlob(p, globs) else p)
+ build_utils.MergeZips(
+ out_zip,
+ files,
+ path_transform=path_transform,
+ compress=options.compress)
+
+ # Depfile used only by dist_jar().
+ if options.depfile:
+ build_utils.WriteDepfile(
+ options.depfile, options.output, inputs=depfile_deps, add_pydeps=False)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/gyp/zip.pydeps b/deps/v8/build/android/gyp/zip.pydeps
new file mode 100644
index 0000000000..ce99648ca0
--- /dev/null
+++ b/deps/v8/build/android/gyp/zip.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/zip.pydeps build/android/gyp/zip.py
+../../gn_helpers.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+zip.py
diff --git a/deps/v8/build/android/host_heartbeat.py b/deps/v8/build/android/host_heartbeat.py
new file mode 100755
index 0000000000..89905928ec
--- /dev/null
+++ b/deps/v8/build/android/host_heartbeat.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Sends a heart beat pulse to the currently online Android devices.
+This heart beat lets the devices know that they are connected to a host.
+"""
+# pylint: disable=W0702
+
+import sys
+import time
+
+import devil_chromium
+from devil.android import device_utils
+
+PULSE_PERIOD = 20
+
+def main():
+ devil_chromium.Initialize()
+
+ while True:
+ try:
+ devices = device_utils.DeviceUtils.HealthyDevices(blacklist=None)
+ for d in devices:
+ d.RunShellCommand(['touch', '/sdcard/host_heartbeat'],
+ check_return=True)
+ except:
+ # Keep the heatbeat running bypassing all errors.
+ pass
+ time.sleep(PULSE_PERIOD)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/incremental_install/BUILD.gn b/deps/v8/build/android/incremental_install/BUILD.gn
new file mode 100644
index 0000000000..3093c3a710
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/BUILD.gn
@@ -0,0 +1,20 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+android_library("bootstrap_java") {
+ # Use .dex rather than .dex.jar to be usable by package_apk().
+ dex_path = "$target_gen_dir/bootstrap.dex"
+ java_files = [
+ "java/org/chromium/incrementalinstall/BootstrapApplication.java",
+ "java/org/chromium/incrementalinstall/BootstrapInstrumentation.java",
+ "java/org/chromium/incrementalinstall/ClassLoaderPatcher.java",
+ "java/org/chromium/incrementalinstall/LockFile.java",
+ "java/org/chromium/incrementalinstall/Reflect.java",
+ "java/org/chromium/incrementalinstall/SecondInstrumentation.java",
+ ]
+ emma_never_instrument = true
+ no_build_hooks = true
+}
diff --git a/deps/v8/build/android/incremental_install/README.md b/deps/v8/build/android/incremental_install/README.md
new file mode 100644
index 0000000000..0916e07d23
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/README.md
@@ -0,0 +1,81 @@
+# Incremental Install
+
+Incremental Install is a way of building & deploying an APK that tries to
+minimize the time it takes to make a change and see that change running on
+device. They work best with `is_component_build=true`, and do *not* require a
+rooted device.
+
+## Building
+
+**Option 1:** Add the gn arg:
+
+ incremental_apk_by_default = true
+
+This causes all apks to be built as incremental (except for blacklisted ones).
+
+**Option 2:** Add `_incremental` to the apk target name. E.g.:
+
+ ninja -C out/Debug chrome_public_apk_incremental
+ ninja -C out/Debug chrome_public_test_apk_incremental
+
+## Running
+
+It is not enough to `adb install` them. You must use a generated wrapper script:
+
+ out/Debug/bin/install_chrome_public_apk_incremental
+ out/Debug/bin/run_chrome_public_test_apk_incremental # Automatically sets --fast-local-dev
+
+## Caveats
+
+Isolated processes (on L+) are incompatible with incremental install. As a
+work-around, you can disable isolated processes only for incremental apks using
+gn arg:
+
+ disable_incremental_isolated_processes = true
+
+# How it Works
+
+## Overview
+
+The basic idea is to side-load .dex and .so files to `/data/local/tmp` rather
+than bundling them in the .apk. Then, when making a change, only the changed
+.dex / .so needs to be pushed to the device.
+
+Faster Builds:
+
+ * No `final_dex` step (where all .dex files are merged into one)
+ * No need to rebuild .apk for code-only changes (but required for resources)
+ * Apks sign faster because they are smaller.
+
+Faster Installs:
+
+ * The .apk is smaller, and so faster to verify.
+ * No need to run `adb install` for code-only changes.
+ * Only changed .so / .dex files are pushed. MD5s of existing on-device files
+ are cached on host computer.
+
+Slower Initial Runs:
+
+ * The first time you run an incremental .apk, the `DexOpt` needs to run on all
+ .dex files. This step is normally done during `adb install`, but is done on
+ start-up for incremental apks.
+ * DexOpt results are cached, so subsequent runs are much faster
+
+## The Code
+
+All incremental apks have the same classes.dex, which is built from:
+
+ //build/android/incremental_install:bootstrap_java
+
+They also have a transformed `AndroidManifest.xml`, which overrides the the
+main application class and any instrumentation classes so that they instead
+point to `BootstrapApplication`. This is built by:
+
+ //build/android/incremental_install/generate_android_manifest.py
+
+Wrapper scripts and install logic is contained in:
+
+ //build/android/incremental_install/create_install_script.py
+ //build/android/incremental_install/installer.py
+
+Finally, GN logic for incremental apks is sprinkled throughout.
diff --git a/deps/v8/build/android/incremental_install/__init__.py b/deps/v8/build/android/incremental_install/__init__.py
new file mode 100644
index 0000000000..50b23dff63
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/incremental_install/generate_android_manifest.py b/deps/v8/build/android/incremental_install/generate_android_manifest.py
new file mode 100755
index 0000000000..bf38f4e601
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/generate_android_manifest.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates an AndroidManifest.xml for an incremental APK.
+
+Given the manifest file for the real APK, generates an AndroidManifest.xml with
+the application class changed to IncrementalApplication.
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+import tempfile
+import zipfile
+from xml.etree import ElementTree
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, 'gyp'))
+from util import build_utils
+from util import resource_utils
+
+_INCREMENTAL_APP_NAME = 'org.chromium.incrementalinstall.BootstrapApplication'
+_META_DATA_APP_NAME = 'incremental-install-real-app'
+_DEFAULT_APPLICATION_CLASS = 'android.app.Application'
+_META_DATA_INSTRUMENTATION_NAMES = [
+ 'incremental-install-real-instrumentation-0',
+ 'incremental-install-real-instrumentation-1',
+]
+_INCREMENTAL_INSTRUMENTATION_CLASSES = [
+ 'android.app.Instrumentation',
+ 'org.chromium.incrementalinstall.SecondInstrumentation',
+]
+
+
+def _AddNamespace(name):
+ """Adds the android namespace prefix to the given identifier."""
+ return '{%s}%s' % (resource_utils.ANDROID_NAMESPACE, name)
+
+
+def _ParseArgs(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--src-manifest', required=True, help='The main manifest of the app')
+ parser.add_argument('--disable-isolated-processes',
+ help='Changes all android:isolatedProcess to false. '
+ 'This is required on Android M+',
+ action='store_true')
+ parser.add_argument(
+ '--out-apk', required=True, help='Path to output .ap_ file')
+ parser.add_argument(
+ '--in-apk', required=True, help='Path to non-incremental .ap_ file')
+ parser.add_argument(
+ '--aapt2-path', required=True, help='Path to the Android aapt tool')
+ parser.add_argument(
+ '--android-sdk-jars', help='GN List of resource apks to include.')
+
+ ret = parser.parse_args(build_utils.ExpandFileArgs(args))
+ ret.android_sdk_jars = build_utils.ParseGnList(ret.android_sdk_jars)
+ return ret
+
+
+def _CreateMetaData(parent, name, value):
+ meta_data_node = ElementTree.SubElement(parent, 'meta-data')
+ meta_data_node.set(_AddNamespace('name'), name)
+ meta_data_node.set(_AddNamespace('value'), value)
+
+
+def _ProcessManifest(path, arsc_package_name, disable_isolated_processes):
+ doc, manifest_node, app_node = resource_utils.ParseAndroidManifest(path)
+
+ # Ensure the manifest package matches that of the apk's arsc package
+ # So that resource references resolve correctly. The actual manifest
+ # package name is set via --rename-manifest-package.
+ manifest_node.set('package', arsc_package_name)
+
+ # Pylint for some reason things app_node is an int.
+ # pylint: disable=no-member
+ real_app_class = app_node.get(_AddNamespace('name'),
+ _DEFAULT_APPLICATION_CLASS)
+ app_node.set(_AddNamespace('name'), _INCREMENTAL_APP_NAME)
+ # pylint: enable=no-member
+ _CreateMetaData(app_node, _META_DATA_APP_NAME, real_app_class)
+
+ # Seems to be a bug in ElementTree, as doc.find() doesn't work here.
+ instrumentation_nodes = doc.findall('instrumentation')
+ assert len(instrumentation_nodes) <= 2, (
+ 'Need to update incremental install to support >2 <instrumentation> tags')
+ for i, instrumentation_node in enumerate(instrumentation_nodes):
+ real_instrumentation_class = instrumentation_node.get(_AddNamespace('name'))
+ instrumentation_node.set(_AddNamespace('name'),
+ _INCREMENTAL_INSTRUMENTATION_CLASSES[i])
+ _CreateMetaData(app_node, _META_DATA_INSTRUMENTATION_NAMES[i],
+ real_instrumentation_class)
+
+ ret = ElementTree.tostring(doc.getroot(), encoding='UTF-8')
+ # Disable check for page-aligned native libraries.
+ ret = ret.replace('extractNativeLibs="false"', 'extractNativeLibs="true"')
+ if disable_isolated_processes:
+ ret = ret.replace('isolatedProcess="true"', 'isolatedProcess="false"')
+ return ret
+
+
+def main(raw_args):
+ options = _ParseArgs(raw_args)
+
+ arsc_package, _ = resource_utils.ExtractArscPackage(options.aapt2_path,
+ options.in_apk)
+ # Extract version from the compiled manifest since it might have been set
+ # via aapt, and not exist in the manifest's text form.
+ version_code, version_name, manifest_package = (
+ resource_utils.ExtractBinaryManifestValues(options.aapt2_path,
+ options.in_apk))
+
+ new_manifest_data = _ProcessManifest(options.src_manifest, arsc_package,
+ options.disable_isolated_processes)
+ with tempfile.NamedTemporaryFile() as tmp_manifest, \
+ tempfile.NamedTemporaryFile() as tmp_apk:
+ tmp_manifest.write(new_manifest_data)
+ tmp_manifest.flush()
+ cmd = [
+ options.aapt2_path, 'link', '-o', tmp_apk.name, '--manifest',
+ tmp_manifest.name, '-I', options.in_apk, '--replace-version',
+ '--version-code', version_code, '--version-name', version_name,
+ '--rename-manifest-package', manifest_package, '--debug-mode'
+ ]
+ for j in options.android_sdk_jars:
+ cmd += ['-I', j]
+ subprocess.check_call(cmd)
+ with zipfile.ZipFile(options.out_apk, 'w') as z:
+ path_transform = lambda p: None if p != 'AndroidManifest.xml' else p
+ build_utils.MergeZips(z, [tmp_apk.name], path_transform=path_transform)
+ path_transform = lambda p: None if p == 'AndroidManifest.xml' else p
+ build_utils.MergeZips(z, [options.in_apk], path_transform=path_transform)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/incremental_install/generate_android_manifest.pydeps b/deps/v8/build/android/incremental_install/generate_android_manifest.pydeps
new file mode 100644
index 0000000000..21b49c6f74
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/generate_android_manifest.pydeps
@@ -0,0 +1,29 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/incremental_install --output build/android/incremental_install/generate_android_manifest.pydeps build/android/incremental_install/generate_android_manifest.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../gyp/util/__init__.py
+../gyp/util/build_utils.py
+../gyp/util/md5_check.py
+../gyp/util/resource_utils.py
+generate_android_manifest.py
diff --git a/deps/v8/build/android/incremental_install/installer.py b/deps/v8/build/android/incremental_install/installer.py
new file mode 100755
index 0000000000..95475b1621
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/installer.py
@@ -0,0 +1,303 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Install *_incremental.apk targets as well as their dependent files."""
+
+import argparse
+import glob
+import json
+import logging
+import os
+import posixpath
+import shutil
+import sys
+import zipfile
+
+sys.path.append(
+ os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+import devil_chromium
+from devil.android import apk_helper
+from devil.android import device_utils
+from devil.android.sdk import version_codes
+from devil.utils import reraiser_thread
+from devil.utils import run_tests_helper
+from pylib import constants
+from pylib.utils import time_profile
+
+prev_sys_path = list(sys.path)
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir, 'gyp'))
+from util import build_utils
+sys.path = prev_sys_path
+
+
+def _DeviceCachePath(device):
+ file_name = 'device_cache_%s.json' % device.adb.GetDeviceSerial()
+ return os.path.join(constants.GetOutDirectory(), file_name)
+
+
+def _TransformDexPaths(paths):
+ """Given paths like ["/a/b/c", "/a/c/d"], returns ["b.c", "c.d"]."""
+ if len(paths) == 1:
+ return [os.path.basename(paths[0])]
+
+ prefix_len = len(os.path.commonprefix(paths))
+ return [p[prefix_len:].replace(os.sep, '.') for p in paths]
+
+
+def _Execute(concurrently, *funcs):
+ """Calls all functions in |funcs| concurrently or in sequence."""
+ timer = time_profile.TimeProfile()
+ if concurrently:
+ reraiser_thread.RunAsync(funcs)
+ else:
+ for f in funcs:
+ f()
+ timer.Stop(log=False)
+ return timer
+
+
+def _GetDeviceIncrementalDir(package):
+ """Returns the device path to put incremental files for the given package."""
+ return '/data/local/tmp/incremental-app-%s' % package
+
+
+def _HasClasses(jar_path):
+ """Returns whether the given jar contains classes.dex."""
+ with zipfile.ZipFile(jar_path) as jar:
+ return 'classes.dex' in jar.namelist()
+
+
+def Uninstall(device, package, enable_device_cache=False):
+ """Uninstalls and removes all incremental files for the given package."""
+ main_timer = time_profile.TimeProfile()
+ device.Uninstall(package)
+ if enable_device_cache:
+ # Uninstall is rare, so just wipe the cache in this case.
+ cache_path = _DeviceCachePath(device)
+ if os.path.exists(cache_path):
+ os.unlink(cache_path)
+ device.RunShellCommand(['rm', '-rf', _GetDeviceIncrementalDir(package)],
+ check_return=True)
+ logging.info('Uninstall took %s seconds.', main_timer.GetDelta())
+
+
+def Install(device, install_json, apk=None, enable_device_cache=False,
+ use_concurrency=True, permissions=()):
+ """Installs the given incremental apk and all required supporting files.
+
+ Args:
+ device: A DeviceUtils instance (to install to).
+ install_json: Path to .json file or already parsed .json object.
+ apk: An existing ApkHelper instance for the apk (optional).
+ enable_device_cache: Whether to enable on-device caching of checksums.
+ use_concurrency: Whether to speed things up using multiple threads.
+ permissions: A list of the permissions to grant, or None to grant all
+ non-blacklisted permissions in the manifest.
+ """
+ if isinstance(install_json, basestring):
+ with open(install_json) as f:
+ install_dict = json.load(f)
+ else:
+ install_dict = install_json
+
+ if install_dict.get('dont_even_try'):
+ raise Exception(install_dict['dont_even_try'])
+
+ main_timer = time_profile.TimeProfile()
+ install_timer = time_profile.TimeProfile()
+ push_native_timer = time_profile.TimeProfile()
+ push_dex_timer = time_profile.TimeProfile()
+
+ def fix_path(p):
+ return os.path.normpath(os.path.join(constants.GetOutDirectory(), p))
+
+ if not apk:
+ apk = apk_helper.ToHelper(fix_path(install_dict['apk_path']))
+ split_globs = [fix_path(p) for p in install_dict['split_globs']]
+ native_libs = [fix_path(p) for p in install_dict['native_libs']]
+ dex_files = [fix_path(p) for p in install_dict['dex_files']]
+ show_proguard_warning = install_dict.get('show_proguard_warning')
+
+ apk_package = apk.GetPackageName()
+ device_incremental_dir = _GetDeviceIncrementalDir(apk_package)
+
+ # Install .apk(s) if any of them have changed.
+ def do_install():
+ install_timer.Start()
+ if split_globs:
+ splits = []
+ for split_glob in split_globs:
+ splits.extend((f for f in glob.glob(split_glob)))
+ device.InstallSplitApk(
+ apk,
+ splits,
+ allow_downgrade=True,
+ reinstall=True,
+ allow_cached_props=True,
+ permissions=permissions)
+ else:
+ device.Install(
+ apk, allow_downgrade=True, reinstall=True, permissions=permissions)
+ install_timer.Stop(log=False)
+
+ # Push .so and .dex files to the device (if they have changed).
+ def do_push_files():
+ push_native_timer.Start()
+ if native_libs:
+ with build_utils.TempDir() as temp_dir:
+ device_lib_dir = posixpath.join(device_incremental_dir, 'lib')
+ for path in native_libs:
+ # Note: Can't use symlinks as they don't work when
+ # "adb push parent_dir" is used (like we do here).
+ shutil.copy(path, os.path.join(temp_dir, os.path.basename(path)))
+ device.PushChangedFiles([(temp_dir, device_lib_dir)],
+ delete_device_stale=True)
+ push_native_timer.Stop(log=False)
+
+ push_dex_timer.Start()
+ if dex_files:
+ # Put all .dex files to be pushed into a temporary directory so that we
+ # can use delete_device_stale=True.
+ with build_utils.TempDir() as temp_dir:
+ device_dex_dir = posixpath.join(device_incremental_dir, 'dex')
+ # Ensure no two files have the same name.
+ transformed_names = _TransformDexPaths(dex_files)
+ for src_path, dest_name in zip(dex_files, transformed_names):
+ # Binary targets with no extra classes create .dex.jar without a
+ # classes.dex (which Android chokes on).
+ if _HasClasses(src_path):
+ shutil.copy(src_path, os.path.join(temp_dir, dest_name))
+ device.PushChangedFiles([(temp_dir, device_dex_dir)],
+ delete_device_stale=True)
+ push_dex_timer.Stop(log=False)
+
+ def check_selinux():
+ # Marshmallow has no filesystem access whatsoever. It might be possible to
+ # get things working on Lollipop, but attempts so far have failed.
+ # http://crbug.com/558818
+ has_selinux = device.build_version_sdk >= version_codes.LOLLIPOP
+ if has_selinux and apk.HasIsolatedProcesses():
+ raise Exception('Cannot use incremental installs on Android L+ without '
+ 'first disabling isolated processes.\n'
+ 'To do so, use GN arg:\n'
+ ' disable_incremental_isolated_processes=true')
+
+ cache_path = _DeviceCachePath(device)
+ def restore_cache():
+ if not enable_device_cache:
+ return
+ if os.path.exists(cache_path):
+ logging.info('Using device cache: %s', cache_path)
+ with open(cache_path) as f:
+ device.LoadCacheData(f.read())
+ # Delete the cached file so that any exceptions cause it to be cleared.
+ os.unlink(cache_path)
+ else:
+ logging.info('No device cache present: %s', cache_path)
+
+ def save_cache():
+ if not enable_device_cache:
+ return
+ with open(cache_path, 'w') as f:
+ f.write(device.DumpCacheData())
+ logging.info('Wrote device cache: %s', cache_path)
+
+ # Create 2 lock files:
+ # * install.lock tells the app to pause on start-up (until we release it).
+ # * firstrun.lock is used by the app to pause all secondary processes until
+ # the primary process finishes loading the .dex / .so files.
+ def create_lock_files():
+ # Creates or zeros out lock files.
+ cmd = ('D="%s";'
+ 'mkdir -p $D &&'
+ 'echo -n >$D/install.lock 2>$D/firstrun.lock')
+ device.RunShellCommand(
+ cmd % device_incremental_dir, shell=True, check_return=True)
+
+ # The firstrun.lock is released by the app itself.
+ def release_installer_lock():
+ device.RunShellCommand('echo > %s/install.lock' % device_incremental_dir,
+ check_return=True, shell=True)
+
+ # Concurrency here speeds things up quite a bit, but DeviceUtils hasn't
+ # been designed for multi-threading. Enabling only because this is a
+ # developer-only tool.
+ setup_timer = _Execute(
+ use_concurrency, create_lock_files, restore_cache, check_selinux)
+
+ _Execute(use_concurrency, do_install, do_push_files)
+
+ finalize_timer = _Execute(use_concurrency, release_installer_lock, save_cache)
+
+ logging.info(
+ 'Install of %s took %s seconds '
+ '(setup=%s, install=%s, libs=%s, dex=%s, finalize=%s)',
+ os.path.basename(apk.path), main_timer.GetDelta(), setup_timer.GetDelta(),
+ install_timer.GetDelta(), push_native_timer.GetDelta(),
+ push_dex_timer.GetDelta(), finalize_timer.GetDelta())
+ if show_proguard_warning:
+ logging.warning('Target had proguard enabled, but incremental install uses '
+ 'non-proguarded .dex files. Performance characteristics '
+ 'may differ.')
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('json_path',
+ help='The path to the generated incremental apk .json.')
+ parser.add_argument('-d', '--device', dest='device',
+ help='Target device for apk to install on.')
+ parser.add_argument('--uninstall',
+ action='store_true',
+ default=False,
+ help='Remove the app and all side-loaded files.')
+ parser.add_argument('--output-directory',
+ help='Path to the root build directory.')
+ parser.add_argument('--no-threading',
+ action='store_false',
+ default=True,
+ dest='threading',
+ help='Do not install and push concurrently')
+ parser.add_argument('--no-cache',
+ action='store_false',
+ default=True,
+ dest='cache',
+ help='Do not use cached information about what files are '
+ 'currently on the target device.')
+ parser.add_argument('-v',
+ '--verbose',
+ dest='verbose_count',
+ default=0,
+ action='count',
+ help='Verbose level (multiple times for more)')
+
+ args = parser.parse_args()
+
+ run_tests_helper.SetLogLevel(args.verbose_count)
+ if args.output_directory:
+ constants.SetOutputDirectory(args.output_directory)
+
+ devil_chromium.Initialize(output_directory=constants.GetOutDirectory())
+
+ # Retries are annoying when commands fail for legitimate reasons. Might want
+ # to enable them if this is ever used on bots though.
+ device = device_utils.DeviceUtils.HealthyDevices(
+ device_arg=args.device,
+ default_retries=0,
+ enable_device_files_cache=True)[0]
+
+ if args.uninstall:
+ with open(args.json_path) as f:
+ install_dict = json.load(f)
+ apk = apk_helper.ToHelper(install_dict['apk_path'])
+ Uninstall(device, apk.GetPackageName(), enable_device_cache=args.cache)
+ else:
+ Install(device, args.json_path, enable_device_cache=args.cache,
+ use_concurrency=args.threading)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java
new file mode 100644
index 0000000000..801a4126c2
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java
@@ -0,0 +1,288 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.app.Application;
+import android.app.Instrumentation;
+import android.content.Context;
+import android.content.pm.ApplicationInfo;
+import android.content.pm.PackageManager;
+import android.content.pm.PackageManager.NameNotFoundException;
+import android.os.Bundle;
+import android.util.Log;
+
+import dalvik.system.DexFile;
+
+import java.io.File;
+import java.lang.ref.WeakReference;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * An Application that replaces itself with another Application (as defined in
+ * an AndroidManifext.xml meta-data tag). It loads the other application only
+ * after side-loading its .so and .dex files from /data/local/tmp.
+ *
+ * This class is highly dependent on the private implementation details of
+ * Android's ActivityThread.java. However, it has been tested to work with
+ * JellyBean through Marshmallow.
+ */
+public final class BootstrapApplication extends Application {
+ private static final String TAG = "cr.incrementalinstall";
+ private static final String MANAGED_DIR_PREFIX = "/data/local/tmp/incremental-app-";
+ private static final String REAL_APP_META_DATA_NAME = "incremental-install-real-app";
+ private static final String REAL_INSTRUMENTATION_META_DATA_NAME0 =
+ "incremental-install-real-instrumentation-0";
+ private static final String REAL_INSTRUMENTATION_META_DATA_NAME1 =
+ "incremental-install-real-instrumentation-1";
+
+ private ClassLoaderPatcher mClassLoaderPatcher;
+ private Application mRealApplication;
+ private Instrumentation mOrigInstrumentation;
+ private Instrumentation mRealInstrumentation;
+ private Object mStashedProviderList;
+ private Object mActivityThread;
+ public static DexFile[] sIncrementalDexFiles; // Needed by junit test runner.
+
+ @Override
+ protected void attachBaseContext(Context context) {
+ super.attachBaseContext(context);
+ try {
+ mActivityThread = Reflect.invokeMethod(Class.forName("android.app.ActivityThread"),
+ "currentActivityThread");
+ mClassLoaderPatcher = new ClassLoaderPatcher(context);
+
+ mOrigInstrumentation =
+ (Instrumentation) Reflect.getField(mActivityThread, "mInstrumentation");
+ Context instContext = mOrigInstrumentation.getContext();
+ if (instContext == null) {
+ instContext = context;
+ }
+
+ // When running with an instrumentation that lives in a different package from the
+ // application, we must load the dex files and native libraries from both pacakges.
+ // This logic likely won't work when the instrumentation is incremental, but the app is
+ // non-incremental. This configuration isn't used right now though.
+ String appPackageName = getPackageName();
+ String instPackageName = instContext.getPackageName();
+ boolean instPackageNameDiffers = !appPackageName.equals(instPackageName);
+ Log.i(TAG, "App PackageName: " + appPackageName);
+ if (instPackageNameDiffers) {
+ Log.i(TAG, "Inst PackageName: " + instPackageName);
+ }
+
+ File appIncrementalRootDir = new File(MANAGED_DIR_PREFIX + appPackageName);
+ File appLibDir = new File(appIncrementalRootDir, "lib");
+ File appDexDir = new File(appIncrementalRootDir, "dex");
+ File appInstallLockFile = new File(appIncrementalRootDir, "install.lock");
+ File appFirstRunLockFile = new File(appIncrementalRootDir, "firstrun.lock");
+ File instIncrementalRootDir = new File(MANAGED_DIR_PREFIX + instPackageName);
+ File instLibDir = new File(instIncrementalRootDir, "lib");
+ File instDexDir = new File(instIncrementalRootDir, "dex");
+ File instInstallLockFile = new File(instIncrementalRootDir, "install.lock");
+ File instFirstRunLockFile = new File(instIncrementalRootDir, "firstrun.lock");
+
+ boolean isFirstRun = LockFile.installerLockExists(appFirstRunLockFile)
+ || (instPackageNameDiffers
+ && LockFile.installerLockExists(instFirstRunLockFile));
+ if (isFirstRun) {
+ if (mClassLoaderPatcher.mIsPrimaryProcess) {
+ // Wait for incremental_install.py to finish.
+ LockFile.waitForInstallerLock(appInstallLockFile, 30 * 1000);
+ LockFile.waitForInstallerLock(instInstallLockFile, 30 * 1000);
+ } else {
+ // Wait for the browser process to create the optimized dex files
+ // and copy the library files.
+ LockFile.waitForInstallerLock(appFirstRunLockFile, 60 * 1000);
+ LockFile.waitForInstallerLock(instFirstRunLockFile, 60 * 1000);
+ }
+ }
+
+ mClassLoaderPatcher.importNativeLibs(instLibDir);
+ sIncrementalDexFiles = mClassLoaderPatcher.loadDexFiles(instDexDir);
+ if (instPackageNameDiffers) {
+ mClassLoaderPatcher.importNativeLibs(appLibDir);
+ mClassLoaderPatcher.loadDexFiles(appDexDir);
+ }
+
+ if (isFirstRun && mClassLoaderPatcher.mIsPrimaryProcess) {
+ LockFile.clearInstallerLock(appFirstRunLockFile);
+ if (instPackageNameDiffers) {
+ LockFile.clearInstallerLock(instFirstRunLockFile);
+ }
+ }
+
+ // mInstrumentationAppDir is one of a set of fields that is initialized only when
+ // instrumentation is active.
+ if (Reflect.getField(mActivityThread, "mInstrumentationAppDir") != null) {
+ String metaDataName = REAL_INSTRUMENTATION_META_DATA_NAME0;
+ if (mOrigInstrumentation instanceof SecondInstrumentation) {
+ metaDataName = REAL_INSTRUMENTATION_META_DATA_NAME1;
+ }
+ mRealInstrumentation =
+ initInstrumentation(getClassNameFromMetadata(metaDataName, instContext));
+ } else {
+ Log.i(TAG, "No instrumentation active.");
+ }
+
+ // Even when instrumentation is not enabled, ActivityThread uses a default
+ // Instrumentation instance internally. We hook it here in order to hook into the
+ // call to Instrumentation.onCreate().
+ Reflect.setField(mActivityThread, "mInstrumentation",
+ new BootstrapInstrumentation(this));
+
+ // attachBaseContext() is called from ActivityThread#handleBindApplication() and
+ // Application#mApplication is changed right after we return. Thus, we cannot swap
+ // the Application instances until onCreate() is called.
+ String realApplicationName = getClassNameFromMetadata(REAL_APP_META_DATA_NAME, context);
+ Log.i(TAG, "Instantiating " + realApplicationName);
+ Instrumentation anyInstrumentation =
+ mRealInstrumentation != null ? mRealInstrumentation : mOrigInstrumentation;
+ mRealApplication = anyInstrumentation.newApplication(
+ getClassLoader(), realApplicationName, context);
+
+ // Between attachBaseContext() and onCreate(), ActivityThread tries to instantiate
+ // all ContentProviders. The ContentProviders break without the correct Application
+ // class being installed, so temporarily pretend there are no providers, and then
+ // instantiate them explicitly within onCreate().
+ disableContentProviders();
+ Log.i(TAG, "Waiting for Instrumentation.onCreate");
+ } catch (Exception e) {
+ throw new RuntimeException("Incremental install failed.", e);
+ }
+ }
+
+ /**
+ * Returns the fully-qualified class name for the given key, stored in a
+ * &lt;meta&gt; witin the manifest.
+ */
+ private static String getClassNameFromMetadata(String key, Context context)
+ throws NameNotFoundException {
+ String pkgName = context.getPackageName();
+ ApplicationInfo appInfo = context.getPackageManager().getApplicationInfo(pkgName,
+ PackageManager.GET_META_DATA);
+ String value = appInfo.metaData.getString(key);
+ if (value != null && !value.contains(".")) {
+ value = pkgName + "." + value;
+ }
+ return value;
+ }
+
+ /**
+ * Instantiates and initializes mRealInstrumentation (the real Instrumentation class).
+ */
+ private Instrumentation initInstrumentation(String realInstrumentationName)
+ throws ReflectiveOperationException {
+ if (realInstrumentationName == null) {
+ // This is the case when an incremental app is used as a target for an instrumentation
+ // test. In this case, ActivityThread can instantiate the proper class just fine since
+ // it exists within the test apk (as opposed to the incremental apk-under-test).
+ Log.i(TAG, "Running with external instrumentation");
+ return null;
+ }
+ // For unit tests, the instrumentation class is replaced in the manifest by a build step
+ // because ActivityThread tries to instantiate it before we get a chance to load the
+ // incremental dex files.
+ Log.i(TAG, "Instantiating instrumentation " + realInstrumentationName);
+ Instrumentation ret =
+ (Instrumentation) Reflect.newInstance(Class.forName(realInstrumentationName));
+
+ // Initialize the fields that are set by Instrumentation.init().
+ String[] initFields = {"mAppContext", "mComponent", "mInstrContext", "mMessageQueue",
+ "mThread", "mUiAutomationConnection", "mWatcher"};
+ for (String fieldName : initFields) {
+ Reflect.setField(ret, fieldName, Reflect.getField(mOrigInstrumentation, fieldName));
+ }
+ return ret;
+ }
+
+ /**
+ * Called by BootstrapInstrumentation from Instrumentation.onCreate().
+ * This happens regardless of whether or not instrumentation is enabled.
+ */
+ void onInstrumentationCreate(Bundle arguments) {
+ Log.i(TAG, "Instrumentation.onCreate() called. Swapping references.");
+ try {
+ swapApplicationReferences();
+ enableContentProviders();
+ if (mRealInstrumentation != null) {
+ Reflect.setField(mActivityThread, "mInstrumentation", mRealInstrumentation);
+ mRealInstrumentation.onCreate(arguments);
+ }
+ } catch (Exception e) {
+ throw new RuntimeException("Incremental install failed.", e);
+ }
+ }
+
+ @Override
+ public void onCreate() {
+ super.onCreate();
+ try {
+ Log.i(TAG, "Application.onCreate() called.");
+ mRealApplication.onCreate();
+ } catch (Exception e) {
+ throw new RuntimeException("Incremental install failed.", e);
+ }
+ }
+
+ /**
+ * Nulls out ActivityThread.mBoundApplication.providers.
+ */
+ private void disableContentProviders() throws ReflectiveOperationException {
+ Object data = Reflect.getField(mActivityThread, "mBoundApplication");
+ mStashedProviderList = Reflect.getField(data, "providers");
+ Reflect.setField(data, "providers", null);
+ }
+
+ /**
+ * Restores the value of ActivityThread.mBoundApplication.providers, and invokes
+ * ActivityThread#installContentProviders().
+ */
+ private void enableContentProviders() throws ReflectiveOperationException {
+ Object data = Reflect.getField(mActivityThread, "mBoundApplication");
+ Reflect.setField(data, "providers", mStashedProviderList);
+ if (mStashedProviderList != null && mClassLoaderPatcher.mIsPrimaryProcess) {
+ Log.i(TAG, "Instantiating content providers");
+ Reflect.invokeMethod(mActivityThread, "installContentProviders", mRealApplication,
+ mStashedProviderList);
+ }
+ mStashedProviderList = null;
+ }
+
+ /**
+ * Changes all fields within framework classes that have stored an reference to this
+ * BootstrapApplication to instead store references to mRealApplication.
+ */
+ @SuppressWarnings("unchecked")
+ private void swapApplicationReferences() throws ReflectiveOperationException {
+ if (Reflect.getField(mActivityThread, "mInitialApplication") == this) {
+ Reflect.setField(mActivityThread, "mInitialApplication", mRealApplication);
+ }
+
+ List<Application> allApplications =
+ (List<Application>) Reflect.getField(mActivityThread, "mAllApplications");
+ for (int i = 0; i < allApplications.size(); i++) {
+ if (allApplications.get(i) == this) {
+ allApplications.set(i, mRealApplication);
+ }
+ }
+
+ // Contains a reference to BootstrapApplication and will cause BroadCastReceivers to fail
+ // if not replaced.
+ Context contextImpl = mRealApplication.getBaseContext();
+ Reflect.setField(contextImpl, "mOuterContext", mRealApplication);
+
+ for (String fieldName : new String[] {"mPackages", "mResourcePackages"}) {
+ Map<String, WeakReference<?>> packageMap =
+ (Map<String, WeakReference<?>>) Reflect.getField(mActivityThread, fieldName);
+ for (Map.Entry<String, WeakReference<?>> entry : packageMap.entrySet()) {
+ Object loadedApk = entry.getValue().get();
+ if (loadedApk != null && Reflect.getField(loadedApk, "mApplication") == this) {
+ Reflect.setField(loadedApk, "mApplication", mRealApplication);
+ }
+ }
+ }
+ }
+}
diff --git a/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java
new file mode 100644
index 0000000000..f197406499
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java
@@ -0,0 +1,25 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.app.Instrumentation;
+import android.os.Bundle;
+
+/**
+ * Notifies BootstrapApplication of the call to Instrumentation.onCreate().
+ */
+public final class BootstrapInstrumentation extends Instrumentation {
+ private final BootstrapApplication mApp;
+
+ BootstrapInstrumentation(BootstrapApplication app) {
+ mApp = app;
+ }
+
+ @Override
+ public void onCreate(Bundle arguments) {
+ super.onCreate(arguments);
+ mApp.onInstrumentationCreate(arguments);
+ }
+}
diff --git a/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java
new file mode 100644
index 0000000000..10e438f670
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java
@@ -0,0 +1,291 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.os.Build;
+import android.os.Process;
+import android.util.Log;
+
+import dalvik.system.DexFile;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * Provides the ability to add native libraries and .dex files to an existing class loader.
+ * Tested with Jellybean MR2 - Marshmellow.
+ */
+final class ClassLoaderPatcher {
+ private static final String TAG = "cr.incrementalinstall";
+ private final File mAppFilesSubDir;
+ private final ClassLoader mClassLoader;
+ private final Object mLibcoreOs;
+ private final int mProcessUid;
+ final boolean mIsPrimaryProcess;
+
+ ClassLoaderPatcher(Context context) throws ReflectiveOperationException {
+ mAppFilesSubDir =
+ new File(context.getApplicationInfo().dataDir, "incremental-install-files");
+ mClassLoader = context.getClassLoader();
+ mLibcoreOs = Reflect.getField(Class.forName("libcore.io.Libcore"), "os");
+ mProcessUid = Process.myUid();
+ mIsPrimaryProcess = context.getApplicationInfo().uid == mProcessUid;
+ Log.i(TAG, "uid=" + mProcessUid + " (isPrimary=" + mIsPrimaryProcess + ")");
+ }
+
+ /**
+ * Loads all dex files within |dexDir| into the app's ClassLoader.
+ */
+ @SuppressLint({
+ "SetWorldReadable", "SetWorldWritable",
+ })
+ DexFile[] loadDexFiles(File dexDir) throws ReflectiveOperationException, IOException {
+ Log.i(TAG, "Installing dex files from: " + dexDir);
+
+ // The optimized dex files will be owned by this process' user.
+ // Store them within the app's data dir rather than on /data/local/tmp
+ // so that they are still deleted (by the OS) when we uninstall
+ // (even on a non-rooted device).
+ File incrementalDexesDir = new File(mAppFilesSubDir, "optimized-dexes");
+ File isolatedDexesDir = new File(mAppFilesSubDir, "isolated-dexes");
+ File optimizedDir;
+
+ // In O, optimizedDirectory is ignored, and the files are always put in an "oat"
+ // directory that is a sibling to the dex files themselves. SELinux policies
+ // prevent using odex files from /data/local/tmp, so we must first copy them
+ // into the app's data directory in order to get the odex files to live there.
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+ safeCopyAllFiles(dexDir, incrementalDexesDir);
+ dexDir = incrementalDexesDir;
+ }
+
+ // Ignore "oat" directory.
+ // Also ignore files that sometimes show up (e.g. .jar.arm.flock).
+ File[] dexFilesArr = dexDir.listFiles(f -> f.getName().endsWith(".jar"));
+ if (dexFilesArr == null) {
+ throw new FileNotFoundException("Dex dir does not exist: " + dexDir);
+ }
+
+ if (mIsPrimaryProcess) {
+ ensureAppFilesSubDirExists();
+ // Allows isolated processes to access the same files.
+ incrementalDexesDir.mkdir();
+ incrementalDexesDir.setReadable(true, false);
+ incrementalDexesDir.setExecutable(true, false);
+ // Create a directory for isolated processes to create directories in.
+ isolatedDexesDir.mkdir();
+ isolatedDexesDir.setWritable(true, false);
+ isolatedDexesDir.setExecutable(true, false);
+
+ optimizedDir = incrementalDexesDir;
+ } else {
+ // There is a UID check of the directory in dalvik.system.DexFile():
+ // https://android.googlesource.com/platform/libcore/+/45e0260/dalvik/src/main/java/dalvik/system/DexFile.java#101
+ // Rather than have each isolated process run DexOpt though, we use
+ // symlinks within the directory to point at the browser process'
+ // optimized dex files.
+ optimizedDir = new File(isolatedDexesDir, "isolated-" + mProcessUid);
+ optimizedDir.mkdir();
+ // Always wipe it out and re-create for simplicity.
+ Log.i(TAG, "Creating dex file symlinks for isolated process");
+ for (File f : optimizedDir.listFiles()) {
+ f.delete();
+ }
+ for (File f : incrementalDexesDir.listFiles()) {
+ String to = "../../" + incrementalDexesDir.getName() + "/" + f.getName();
+ File from = new File(optimizedDir, f.getName());
+ createSymlink(to, from);
+ }
+ }
+
+ Log.i(TAG, "Code cache dir: " + optimizedDir);
+ Log.i(TAG, "Loading " + dexFilesArr.length + " dex files");
+
+ Object dexPathList = Reflect.getField(mClassLoader, "pathList");
+ Object[] dexElements = (Object[]) Reflect.getField(dexPathList, "dexElements");
+ dexElements = addDexElements(dexFilesArr, optimizedDir, dexElements);
+ Reflect.setField(dexPathList, "dexElements", dexElements);
+
+ DexFile[] ret = new DexFile[dexElements.length];
+ for (int i = 0; i < ret.length; ++i) {
+ ret[i] = (DexFile) Reflect.getField(dexElements[i], "dexFile");
+ }
+ return ret;
+ }
+
+ /**
+ * Sets up all libraries within |libDir| to be loadable by System.loadLibrary().
+ */
+ @SuppressLint("SetWorldReadable")
+ void importNativeLibs(File libDir) throws ReflectiveOperationException, IOException {
+ Log.i(TAG, "Importing native libraries from: " + libDir);
+ if (!libDir.exists()) {
+ Log.i(TAG, "No native libs exist.");
+ return;
+ }
+ // The library copying is not necessary on older devices, but we do it anyways to
+ // simplify things (it's fast compared to dexing).
+ // https://code.google.com/p/android/issues/detail?id=79480
+ File localLibsDir = new File(mAppFilesSubDir, "lib");
+ safeCopyAllFiles(libDir, localLibsDir);
+ addNativeLibrarySearchPath(localLibsDir);
+ }
+
+ @SuppressLint("SetWorldReadable")
+ private void safeCopyAllFiles(File srcDir, File dstDir) throws IOException {
+ // The library copying is not necessary on older devices, but we do it anyways to
+ // simplify things (it's fast compared to dexing).
+ // https://code.google.com/p/android/issues/detail?id=79480
+ File lockFile = new File(mAppFilesSubDir, dstDir.getName() + ".lock");
+ if (mIsPrimaryProcess) {
+ ensureAppFilesSubDirExists();
+ LockFile lock = LockFile.acquireRuntimeLock(lockFile);
+ if (lock == null) {
+ LockFile.waitForRuntimeLock(lockFile, 10 * 1000);
+ } else {
+ try {
+ dstDir.mkdir();
+ dstDir.setReadable(true, false);
+ dstDir.setExecutable(true, false);
+ copyChangedFiles(srcDir, dstDir);
+ } finally {
+ lock.release();
+ }
+ }
+ } else {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ // TODO: Work around this issue by using APK splits to install each dex / lib.
+ throw new RuntimeException("Incremental install does not work on Android M+ "
+ + "with isolated processes. Use the gn arg:\n"
+ + " disable_incremental_isolated_processes=true\n"
+ + "and try again.");
+ }
+ // Other processes: Waits for primary process to finish copying.
+ LockFile.waitForRuntimeLock(lockFile, 10 * 1000);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void addNativeLibrarySearchPath(File nativeLibDir) throws ReflectiveOperationException {
+ Object dexPathList = Reflect.getField(mClassLoader, "pathList");
+ Object currentDirs = Reflect.getField(dexPathList, "nativeLibraryDirectories");
+ File[] newDirs = new File[] { nativeLibDir };
+ // Switched from an array to an ArrayList in Lollipop.
+ if (currentDirs instanceof List) {
+ List<File> dirsAsList = (List<File>) currentDirs;
+ dirsAsList.add(0, nativeLibDir);
+ } else {
+ File[] dirsAsArray = (File[]) currentDirs;
+ Reflect.setField(dexPathList, "nativeLibraryDirectories",
+ Reflect.concatArrays(newDirs, newDirs, dirsAsArray));
+ }
+
+ Object[] nativeLibraryPathElements;
+ try {
+ nativeLibraryPathElements =
+ (Object[]) Reflect.getField(dexPathList, "nativeLibraryPathElements");
+ } catch (NoSuchFieldException e) {
+ // This field doesn't exist pre-M.
+ return;
+ }
+ Object[] additionalElements = makeNativePathElements(newDirs);
+ Reflect.setField(dexPathList, "nativeLibraryPathElements",
+ Reflect.concatArrays(nativeLibraryPathElements, additionalElements,
+ nativeLibraryPathElements));
+ }
+
+ private static void copyChangedFiles(File srcDir, File dstDir) throws IOException {
+ // No need to delete stale libs since libraries are loaded explicitly.
+ int numNotChanged = 0;
+ for (File f : srcDir.listFiles()) {
+ // Note: Tried using hardlinks, but resulted in EACCES exceptions.
+ File dest = new File(dstDir, f.getName());
+ if (!copyIfModified(f, dest)) {
+ numNotChanged++;
+ }
+ }
+ if (numNotChanged > 0) {
+ Log.i(TAG, numNotChanged + " libs already up to date.");
+ }
+ }
+
+ @SuppressLint("SetWorldReadable")
+ private static boolean copyIfModified(File src, File dest) throws IOException {
+ long lastModified = src.lastModified();
+ if (dest.exists() && dest.lastModified() == lastModified) {
+ return false;
+ }
+ Log.i(TAG, "Copying " + src + " -> " + dest);
+ FileInputStream istream = new FileInputStream(src);
+ FileOutputStream ostream = new FileOutputStream(dest);
+ ostream.getChannel().transferFrom(istream.getChannel(), 0, istream.getChannel().size());
+ istream.close();
+ ostream.close();
+ dest.setReadable(true, false);
+ dest.setExecutable(true, false);
+ dest.setLastModified(lastModified);
+ return true;
+ }
+
+ private void ensureAppFilesSubDirExists() {
+ mAppFilesSubDir.mkdir();
+ mAppFilesSubDir.setExecutable(true, false);
+ }
+
+ private void createSymlink(String to, File from) throws ReflectiveOperationException {
+ Reflect.invokeMethod(mLibcoreOs, "symlink", to, from.getAbsolutePath());
+ }
+
+ private static Object[] makeNativePathElements(File[] paths)
+ throws ReflectiveOperationException {
+ Object[] entries = new Object[paths.length];
+ if (Build.VERSION.SDK_INT >= 26) {
+ Class<?> entryClazz = Class.forName("dalvik.system.DexPathList$NativeLibraryElement");
+ for (int i = 0; i < paths.length; ++i) {
+ entries[i] = Reflect.newInstance(entryClazz, paths[i]);
+ }
+ } else {
+ Class<?> entryClazz = Class.forName("dalvik.system.DexPathList$Element");
+ for (int i = 0; i < paths.length; ++i) {
+ entries[i] = Reflect.newInstance(entryClazz, paths[i], true, null, null);
+ }
+ }
+ return entries;
+ }
+
+ private Object[] addDexElements(File[] files, File optimizedDirectory, Object[] curDexElements)
+ throws ReflectiveOperationException {
+ Class<?> entryClazz = Class.forName("dalvik.system.DexPathList$Element");
+ Class<?> clazz = Class.forName("dalvik.system.DexPathList");
+ Object[] ret =
+ Reflect.concatArrays(curDexElements, curDexElements, new Object[files.length]);
+ File emptyDir = new File("");
+ for (int i = 0; i < files.length; ++i) {
+ File file = files[i];
+ Object dexFile;
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
+ // loadDexFile requires that ret contain all previously added elements.
+ dexFile = Reflect.invokeMethod(clazz, "loadDexFile", file, optimizedDirectory,
+ mClassLoader, ret);
+ } else {
+ dexFile = Reflect.invokeMethod(clazz, "loadDexFile", file, optimizedDirectory);
+ }
+ Object dexElement;
+ if (Build.VERSION.SDK_INT >= 26) {
+ dexElement = Reflect.newInstance(entryClazz, dexFile, file);
+ } else {
+ dexElement = Reflect.newInstance(entryClazz, emptyDir, false, file, dexFile);
+ }
+ ret[curDexElements.length + i] = dexElement;
+ }
+ return ret;
+ }
+}
diff --git a/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java
new file mode 100644
index 0000000000..6e48f3b1ea
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java
@@ -0,0 +1,129 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.util.Log;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.channels.FileLock;
+import java.util.concurrent.Callable;
+
+/**
+ * Helpers for dealing with .lock files used during install / first run.
+ */
+final class LockFile {
+ private static final String TAG = "cr.incrementalinstall";
+
+ private final File mFile;
+ private final FileOutputStream mOutputStream;
+ private final FileLock mFileLock;
+
+ private LockFile(File file, FileOutputStream outputStream, FileLock fileLock) {
+ mFile = file;
+ mOutputStream = outputStream;
+ mFileLock = fileLock;
+ }
+
+ /**
+ * Clears the lock file by writing to it (making it non-zero in length);
+ */
+ static void clearInstallerLock(File lockFile) throws IOException {
+ Log.i(TAG, "Clearing " + lockFile);
+ // On Android M+, we can't delete files in /data/local/tmp, so we write to it instead.
+ FileOutputStream os = new FileOutputStream(lockFile);
+ os.write(1);
+ os.close();
+ }
+
+ /**
+ * Waits for the given file to be non-zero in length.
+ */
+ static void waitForInstallerLock(final File file, long timeoutMs) {
+ pollingWait(new Callable<Boolean>() {
+ @Override public Boolean call() {
+ return !installerLockExists(file);
+ }
+ }, file, timeoutMs);
+ }
+
+ /**
+ * Waits for the given file to be non-zero in length.
+ */
+ private static void pollingWait(Callable<Boolean> func, File file, long timeoutMs) {
+ long pollIntervalMs = 200;
+ for (int i = 0; i < timeoutMs / pollIntervalMs; i++) {
+ try {
+ if (func.call()) {
+ if (i > 0) {
+ Log.i(TAG, "Finished waiting on lock file: " + file);
+ }
+ return;
+ } else if (i == 0) {
+ Log.i(TAG, "Waiting on lock file: " + file);
+ }
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ try {
+ Thread.sleep(pollIntervalMs);
+ } catch (InterruptedException e) {
+ // Should never happen.
+ }
+ }
+ throw new RuntimeException("Timed out waiting for lock file: " + file);
+ }
+
+ /**
+ * Returns whether the given lock file is missing or is in the locked state.
+ */
+ static boolean installerLockExists(File file) {
+ return !file.exists() || file.length() == 0;
+ }
+
+ /**
+ * Attempts to acquire a lock for the given file.
+ * @return Returns the FileLock if it was acquired, or null otherwise.
+ */
+ static LockFile acquireRuntimeLock(File file) {
+ try {
+ FileOutputStream outputStream = new FileOutputStream(file);
+ FileLock lock = outputStream.getChannel().tryLock();
+ if (lock != null) {
+ Log.i(TAG, "Created lock file: " + file);
+ return new LockFile(file, outputStream, lock);
+ }
+ outputStream.close();
+ } catch (IOException e) {
+ // Do nothing. We didn't get the lock.
+ Log.w(TAG, "Exception trying to acquire lock " + file, e);
+ }
+ return null;
+ }
+
+ /**
+ * Waits for the given file to not exist.
+ */
+ static void waitForRuntimeLock(final File file, long timeoutMs) {
+ pollingWait(new Callable<Boolean>() {
+ @Override public Boolean call() {
+ return !file.exists();
+ }
+ }, file, timeoutMs);
+ }
+
+ /**
+ * Releases and deletes the lock file.
+ */
+ void release() throws IOException {
+ Log.i(TAG, "Deleting lock file: " + mFile);
+ mFileLock.release();
+ mOutputStream.close();
+ if (!mFile.delete()) {
+ throw new IOException("Failed to delete lock file: " + mFile);
+ }
+ }
+}
diff --git a/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java
new file mode 100644
index 0000000000..c64dc1e8a3
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java
@@ -0,0 +1,142 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import java.lang.reflect.Array;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.util.Arrays;
+
+/**
+ * Reflection helper methods.
+ */
+final class Reflect {
+ /**
+ * Sets the value of an object's field (even if it's not visible).
+ *
+ * @param instance The object containing the field to set.
+ * @param name The name of the field to set.
+ * @param value The new value for the field.
+ */
+ static void setField(Object instance, String name, Object value)
+ throws ReflectiveOperationException {
+ Field field = findField(instance, name);
+ field.setAccessible(true);
+ field.set(instance, value);
+ }
+
+ /**
+ * Retrieves the value of an object's field (even if it's not visible).
+ *
+ * @param instance The object containing the field to set.
+ * @param name The name of the field to set.
+ * @return The field's value. Primitive values are returned as their boxed
+ * type.
+ */
+ static Object getField(Object instance, String name) throws ReflectiveOperationException {
+ Field field = findField(instance, name);
+ field.setAccessible(true);
+ return field.get(instance);
+ }
+
+ /**
+ * Concatenates two arrays into a new array. The arrays must be of the same
+ * type.
+ */
+ static Object[] concatArrays(Object[] arrType, Object[] left, Object[] right) {
+ Object[] result = (Object[]) Array.newInstance(
+ arrType.getClass().getComponentType(), left.length + right.length);
+ System.arraycopy(left, 0, result, 0, left.length);
+ System.arraycopy(right, 0, result, left.length, right.length);
+ return result;
+ }
+
+ /**
+ * Invokes a method with zero or more parameters. For static methods, use the Class as the
+ * instance.
+ */
+ static Object invokeMethod(Object instance, String name, Object... params)
+ throws ReflectiveOperationException {
+ boolean isStatic = instance instanceof Class;
+ Class<?> clazz = isStatic ? (Class<?>) instance : instance.getClass();
+ Method method = findMethod(clazz, name, params);
+ method.setAccessible(true);
+ return method.invoke(instance, params);
+ }
+
+ /**
+ * Calls a constructor with zero or more parameters.
+ */
+ static Object newInstance(Class<?> clazz, Object... params)
+ throws ReflectiveOperationException {
+ Constructor<?> constructor = findConstructor(clazz, params);
+ constructor.setAccessible(true);
+ return constructor.newInstance(params);
+ }
+
+ private static Field findField(Object instance, String name) throws NoSuchFieldException {
+ boolean isStatic = instance instanceof Class;
+ Class<?> clazz = isStatic ? (Class<?>) instance : instance.getClass();
+ for (; clazz != null; clazz = clazz.getSuperclass()) {
+ try {
+ return clazz.getDeclaredField(name);
+ } catch (NoSuchFieldException e) {
+ // Need to look in the super class.
+ }
+ }
+ throw new NoSuchFieldException("Field " + name + " not found in " + instance.getClass());
+ }
+
+ private static Method findMethod(Class<?> clazz, String name, Object... params)
+ throws NoSuchMethodException {
+ for (; clazz != null; clazz = clazz.getSuperclass()) {
+ for (Method method : clazz.getDeclaredMethods()) {
+ if (method.getName().equals(name)
+ && areParametersCompatible(method.getParameterTypes(), params)) {
+ return method;
+ }
+ }
+ }
+ throw new NoSuchMethodException("Method " + name + " with parameters "
+ + Arrays.asList(params) + " not found in " + clazz);
+ }
+
+ private static Constructor<?> findConstructor(Class<?> clazz, Object... params)
+ throws NoSuchMethodException {
+ for (Constructor<?> constructor : clazz.getDeclaredConstructors()) {
+ if (areParametersCompatible(constructor.getParameterTypes(), params)) {
+ return constructor;
+ }
+ }
+ throw new NoSuchMethodException("Constructor with parameters " + Arrays.asList(params)
+ + " not found in " + clazz);
+ }
+
+ private static boolean areParametersCompatible(Class<?>[] paramTypes, Object... params) {
+ if (params.length != paramTypes.length) {
+ return false;
+ }
+ for (int i = 0; i < params.length; i++) {
+ if (!isAssignableFrom(paramTypes[i], params[i])) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private static boolean isAssignableFrom(Class<?> left, Object right) {
+ if (right == null) {
+ return !left.isPrimitive();
+ }
+ Class<?> rightClazz = right.getClass();
+ if (left.isPrimitive()) {
+ // TODO(agrieve): Fill in the rest as needed.
+ return left == boolean.class && rightClazz == Boolean.class
+ || left == int.class && rightClazz == Integer.class;
+ }
+ return left.isAssignableFrom(rightClazz);
+ }
+}
diff --git a/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java
new file mode 100644
index 0000000000..3e0df0521e
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java
@@ -0,0 +1,12 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.incrementalinstall;
+
+import android.app.Instrumentation;
+
+/**
+ * Exists to support an app having multiple instrumentations.
+ */
+public final class SecondInstrumentation extends Instrumentation {}
diff --git a/deps/v8/build/android/incremental_install/write_installer_json.py b/deps/v8/build/android/incremental_install/write_installer_json.py
new file mode 100755
index 0000000000..75bd6d1aab
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/write_installer_json.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes a .json file with the per-apk details for an incremental install."""
+
+import argparse
+import json
+import os
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'gyp'))
+
+from util import build_utils
+
+
+def _ParseArgs(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--output-path',
+ help='Output path for .json file.',
+ required=True)
+ parser.add_argument('--apk-path',
+ help='Path to .apk relative to output directory.',
+ required=True)
+ parser.add_argument('--split',
+ action='append',
+ dest='split_globs',
+ default=[],
+ help='A glob matching the apk splits. '
+ 'Can be specified multiple times.')
+ parser.add_argument('--native-libs-list',
+ action='append',
+ default=[],
+ help='GN-list of paths to native libraries relative to '
+ 'output directory. Can be repeated.')
+ parser.add_argument('--dex-file',
+ action='append',
+ default=[],
+ dest='dex_files',
+ help='.dex file to include relative to output directory. '
+ 'Can be repeated')
+ parser.add_argument('--dex-file-list',
+ help='GN-list of dex paths relative to output directory.')
+ parser.add_argument('--show-proguard-warning',
+ action='store_true',
+ default=False,
+ help='Print a warning about proguard being disabled')
+ parser.add_argument('--dont-even-try',
+ help='Prints the given message and exits.')
+
+ options = parser.parse_args(args)
+ options.dex_files += build_utils.ParseGnList(options.dex_file_list)
+ all_libs = []
+ for gn_list in options.native_libs_list:
+ all_libs.extend(build_utils.ParseGnList(gn_list))
+ options.native_libs_list = all_libs
+ return options
+
+
+def main(args):
+ options = _ParseArgs(args)
+
+ data = {
+ 'apk_path': options.apk_path,
+ 'native_libs': options.native_libs_list,
+ 'dex_files': options.dex_files,
+ 'dont_even_try': options.dont_even_try,
+ 'show_proguard_warning': options.show_proguard_warning,
+ 'split_globs': options.split_globs,
+ }
+
+ with build_utils.AtomicOutput(options.output_path) as f:
+ json.dump(data, f, indent=2, sort_keys=True)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/deps/v8/build/android/incremental_install/write_installer_json.pydeps b/deps/v8/build/android/incremental_install/write_installer_json.pydeps
new file mode 100644
index 0000000000..851e6c5bd1
--- /dev/null
+++ b/deps/v8/build/android/incremental_install/write_installer_json.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/incremental_install --output build/android/incremental_install/write_installer_json.pydeps build/android/incremental_install/write_installer_json.py
+../../gn_helpers.py
+../gyp/util/__init__.py
+../gyp/util/build_utils.py
+../gyp/util/md5_check.py
+write_installer_json.py
diff --git a/deps/v8/build/android/lighttpd_server.py b/deps/v8/build/android/lighttpd_server.py
new file mode 100755
index 0000000000..c77d740d66
--- /dev/null
+++ b/deps/v8/build/android/lighttpd_server.py
@@ -0,0 +1,258 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides a convenient wrapper for spawning a test lighttpd instance.
+
+Usage:
+ lighttpd_server PATH_TO_DOC_ROOT
+"""
+
+import codecs
+import contextlib
+import httplib
+import os
+import random
+import shutil
+import socket
+import subprocess
+import sys
+import tempfile
+import time
+
+from pylib import constants
+from pylib import pexpect
+
+class LighttpdServer(object):
+ """Wraps lighttpd server, providing robust startup.
+
+ Args:
+ document_root: Path to root of this server's hosted files.
+ port: TCP port on the _host_ machine that the server will listen on. If
+ omitted it will attempt to use 9000, or if unavailable it will find
+ a free port from 8001 - 8999.
+ lighttpd_path, lighttpd_module_path: Optional paths to lighttpd binaries.
+ base_config_path: If supplied this file will replace the built-in default
+ lighttpd config file.
+ extra_config_contents: If specified, this string will be appended to the
+ base config (default built-in, or from base_config_path).
+ config_path, error_log, access_log: Optional paths where the class should
+ place temporary files for this session.
+ """
+
+ def __init__(self, document_root, port=None,
+ lighttpd_path=None, lighttpd_module_path=None,
+ base_config_path=None, extra_config_contents=None,
+ config_path=None, error_log=None, access_log=None):
+ self.temp_dir = tempfile.mkdtemp(prefix='lighttpd_for_chrome_android')
+ self.document_root = os.path.abspath(document_root)
+ self.fixed_port = port
+ self.port = port or constants.LIGHTTPD_DEFAULT_PORT
+ self.server_tag = 'LightTPD ' + str(random.randint(111111, 999999))
+ self.lighttpd_path = lighttpd_path or '/usr/sbin/lighttpd'
+ self.lighttpd_module_path = lighttpd_module_path or '/usr/lib/lighttpd'
+ self.base_config_path = base_config_path
+ self.extra_config_contents = extra_config_contents
+ self.config_path = config_path or self._Mktmp('config')
+ self.error_log = error_log or self._Mktmp('error_log')
+ self.access_log = access_log or self._Mktmp('access_log')
+ self.pid_file = self._Mktmp('pid_file')
+ self.process = None
+
+ def _Mktmp(self, name):
+ return os.path.join(self.temp_dir, name)
+
+ @staticmethod
+ def _GetRandomPort():
+ # The ports of test server is arranged in constants.py.
+ return random.randint(constants.LIGHTTPD_RANDOM_PORT_FIRST,
+ constants.LIGHTTPD_RANDOM_PORT_LAST)
+
+ def StartupHttpServer(self):
+ """Starts up a http server with specified document root and port."""
+ # If we want a specific port, make sure no one else is listening on it.
+ if self.fixed_port:
+ self._KillProcessListeningOnPort(self.fixed_port)
+ while True:
+ if self.base_config_path:
+ # Read the config
+ with codecs.open(self.base_config_path, 'r', 'utf-8') as f:
+ config_contents = f.read()
+ else:
+ config_contents = self._GetDefaultBaseConfig()
+ if self.extra_config_contents:
+ config_contents += self.extra_config_contents
+ # Write out the config, filling in placeholders from the members of |self|
+ with codecs.open(self.config_path, 'w', 'utf-8') as f:
+ f.write(config_contents % self.__dict__)
+ if (not os.path.exists(self.lighttpd_path) or
+ not os.access(self.lighttpd_path, os.X_OK)):
+ raise EnvironmentError(
+ 'Could not find lighttpd at %s.\n'
+ 'It may need to be installed (e.g. sudo apt-get install lighttpd)'
+ % self.lighttpd_path)
+ # pylint: disable=no-member
+ self.process = pexpect.spawn(self.lighttpd_path,
+ ['-D', '-f', self.config_path,
+ '-m', self.lighttpd_module_path],
+ cwd=self.temp_dir)
+ client_error, server_error = self._TestServerConnection()
+ if not client_error:
+ assert int(open(self.pid_file, 'r').read()) == self.process.pid
+ break
+ self.process.close()
+
+ if self.fixed_port or 'in use' not in server_error:
+ print 'Client error:', client_error
+ print 'Server error:', server_error
+ return False
+ self.port = self._GetRandomPort()
+ return True
+
+ def ShutdownHttpServer(self):
+ """Shuts down our lighttpd processes."""
+ if self.process:
+ self.process.terminate()
+ shutil.rmtree(self.temp_dir, ignore_errors=True)
+
+ def _TestServerConnection(self):
+ # Wait for server to start
+ server_msg = ''
+ for timeout in xrange(1, 5):
+ client_error = None
+ try:
+ with contextlib.closing(httplib.HTTPConnection(
+ '127.0.0.1', self.port, timeout=timeout)) as http:
+ http.set_debuglevel(timeout > 3)
+ http.request('HEAD', '/')
+ r = http.getresponse()
+ r.read()
+ if (r.status == 200 and r.reason == 'OK' and
+ r.getheader('Server') == self.server_tag):
+ return (None, server_msg)
+ client_error = ('Bad response: %s %s version %s\n ' %
+ (r.status, r.reason, r.version) +
+ '\n '.join([': '.join(h) for h in r.getheaders()]))
+ except (httplib.HTTPException, socket.error) as client_error:
+ pass # Probably too quick connecting: try again
+ # Check for server startup error messages
+ # pylint: disable=no-member
+ ix = self.process.expect([pexpect.TIMEOUT, pexpect.EOF, '.+'],
+ timeout=timeout)
+ if ix == 2: # stdout spew from the server
+ server_msg += self.process.match.group(0) # pylint: disable=no-member
+ elif ix == 1: # EOF -- server has quit so giveup.
+ client_error = client_error or 'Server exited'
+ break
+ return (client_error or 'Timeout', server_msg)
+
+ @staticmethod
+ def _KillProcessListeningOnPort(port):
+ """Checks if there is a process listening on port number |port| and
+ terminates it if found.
+
+ Args:
+ port: Port number to check.
+ """
+ if subprocess.call(['fuser', '-kv', '%d/tcp' % port]) == 0:
+ # Give the process some time to terminate and check that it is gone.
+ time.sleep(2)
+ assert subprocess.call(['fuser', '-v', '%d/tcp' % port]) != 0, \
+ 'Unable to kill process listening on port %d.' % port
+
+ @staticmethod
+ def _GetDefaultBaseConfig():
+ return """server.tag = "%(server_tag)s"
+server.modules = ( "mod_access",
+ "mod_accesslog",
+ "mod_alias",
+ "mod_cgi",
+ "mod_rewrite" )
+
+# default document root required
+#server.document-root = "."
+
+# files to check for if .../ is requested
+index-file.names = ( "index.php", "index.pl", "index.cgi",
+ "index.html", "index.htm", "default.htm" )
+# mimetype mapping
+mimetype.assign = (
+ ".gif" => "image/gif",
+ ".jpg" => "image/jpeg",
+ ".jpeg" => "image/jpeg",
+ ".png" => "image/png",
+ ".svg" => "image/svg+xml",
+ ".css" => "text/css",
+ ".html" => "text/html",
+ ".htm" => "text/html",
+ ".xhtml" => "application/xhtml+xml",
+ ".xhtmlmp" => "application/vnd.wap.xhtml+xml",
+ ".js" => "application/x-javascript",
+ ".log" => "text/plain",
+ ".conf" => "text/plain",
+ ".text" => "text/plain",
+ ".txt" => "text/plain",
+ ".dtd" => "text/xml",
+ ".xml" => "text/xml",
+ ".manifest" => "text/cache-manifest",
+ )
+
+# Use the "Content-Type" extended attribute to obtain mime type if possible
+mimetype.use-xattr = "enable"
+
+##
+# which extensions should not be handle via static-file transfer
+#
+# .php, .pl, .fcgi are most often handled by mod_fastcgi or mod_cgi
+static-file.exclude-extensions = ( ".php", ".pl", ".cgi" )
+
+server.bind = "127.0.0.1"
+server.port = %(port)s
+
+## virtual directory listings
+dir-listing.activate = "enable"
+#dir-listing.encoding = "iso-8859-2"
+#dir-listing.external-css = "style/oldstyle.css"
+
+## enable debugging
+#debug.log-request-header = "enable"
+#debug.log-response-header = "enable"
+#debug.log-request-handling = "enable"
+#debug.log-file-not-found = "enable"
+
+#### SSL engine
+#ssl.engine = "enable"
+#ssl.pemfile = "server.pem"
+
+# Autogenerated test-specific config follows.
+
+cgi.assign = ( ".cgi" => "/usr/bin/env",
+ ".pl" => "/usr/bin/env",
+ ".asis" => "/bin/cat",
+ ".php" => "/usr/bin/php-cgi" )
+
+server.errorlog = "%(error_log)s"
+accesslog.filename = "%(access_log)s"
+server.upload-dirs = ( "/tmp" )
+server.pid-file = "%(pid_file)s"
+server.document-root = "%(document_root)s"
+
+"""
+
+
+def main(argv):
+ server = LighttpdServer(*argv[1:])
+ try:
+ if server.StartupHttpServer():
+ raw_input('Server running at http://127.0.0.1:%s -'
+ ' press Enter to exit it.' % server.port)
+ else:
+ print 'Server exit code:', server.process.exitstatus
+ finally:
+ server.ShutdownHttpServer()
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/deps/v8/build/android/lint/OWNERS b/deps/v8/build/android/lint/OWNERS
new file mode 100644
index 0000000000..f47bc2f63a
--- /dev/null
+++ b/deps/v8/build/android/lint/OWNERS
@@ -0,0 +1,2 @@
+estevenson@chromium.org
+wnwen@chromium.org
diff --git a/deps/v8/build/android/lint/suppress.py b/deps/v8/build/android/lint/suppress.py
new file mode 100755
index 0000000000..a3719c18fe
--- /dev/null
+++ b/deps/v8/build/android/lint/suppress.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Add all generated lint_result.xml files to suppressions.xml"""
+
+# pylint: disable=no-member
+
+
+import argparse
+import collections
+import os
+import re
+import sys
+from xml.dom import minidom
+
+_BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..')
+sys.path.append(_BUILD_ANDROID_DIR)
+
+from pylib.constants import host_paths
+
+_TMP_DIR_RE = re.compile(r'^/tmp/.*/(SRC_ROOT[0-9]+|PRODUCT_DIR)/')
+_THIS_FILE = os.path.abspath(__file__)
+_DEFAULT_CONFIG_PATH = os.path.join(os.path.dirname(_THIS_FILE),
+ 'suppressions.xml')
+_DOC = (
+ '\nSTOP! It looks like you want to suppress some lint errors:\n'
+ '- Have you tried identifing the offending patch?\n'
+ ' Ask the author for a fix and/or revert the patch.\n'
+ '- It is preferred to add suppressions in the code instead of\n'
+ ' sweeping it under the rug here. See:\n\n'
+ ' http://developer.android.com/tools/debugging/improving-w-lint.html\n'
+ '\n'
+ 'Still reading?\n'
+ '- You can edit this file manually to suppress an issue\n'
+ ' globally if it is not applicable to the project.\n'
+ '- You can also automatically add issues found so for in the\n'
+ ' build process by running:\n\n'
+ ' ' + os.path.relpath(_THIS_FILE, host_paths.DIR_SOURCE_ROOT) + '\n\n'
+ ' which will generate this file (Comments are not preserved).\n'
+ ' Note: PRODUCT_DIR will be substituted at run-time with actual\n'
+ ' directory path (e.g. out/Debug)\n'
+)
+
+
+_Issue = collections.namedtuple('Issue', ['severity', 'paths', 'regexps'])
+
+
+def _ParseConfigFile(config_path):
+ print 'Parsing %s' % config_path
+ issues_dict = {}
+ dom = minidom.parse(config_path)
+ for issue in dom.getElementsByTagName('issue'):
+ issue_id = issue.attributes['id'].value
+ severity = issue.getAttribute('severity')
+
+ path_elements = (
+ p.attributes.get('path')
+ for p in issue.getElementsByTagName('ignore'))
+ paths = set(p.value for p in path_elements if p)
+
+ regexp_elements = (
+ p.attributes.get('regexp')
+ for p in issue.getElementsByTagName('ignore'))
+ regexps = set(r.value for r in regexp_elements if r)
+
+ issues_dict[issue_id] = _Issue(severity, paths, regexps)
+ return issues_dict
+
+
+def _ParseAndMergeResultFile(result_path, issues_dict):
+ print 'Parsing and merging %s' % result_path
+ dom = minidom.parse(result_path)
+ for issue in dom.getElementsByTagName('issue'):
+ issue_id = issue.attributes['id'].value
+ severity = issue.attributes['severity'].value
+ path = issue.getElementsByTagName('location')[0].attributes['file'].value
+ # Strip temporary file path.
+ path = re.sub(_TMP_DIR_RE, '', path)
+ # Escape Java inner class name separator and suppress with regex instead
+ # of path. Doesn't use re.escape() as it is a bit too aggressive and
+ # escapes '_', causing trouble with PRODUCT_DIR.
+ regexp = path.replace('$', r'\$')
+ if issue_id not in issues_dict:
+ issues_dict[issue_id] = _Issue(severity, set(), set())
+ issues_dict[issue_id].regexps.add(regexp)
+
+
+def _WriteConfigFile(config_path, issues_dict):
+ new_dom = minidom.getDOMImplementation().createDocument(None, 'lint', None)
+ top_element = new_dom.documentElement
+ top_element.appendChild(new_dom.createComment(_DOC))
+ for issue_id, issue in sorted(issues_dict.iteritems(), key=lambda i: i[0]):
+ issue_element = new_dom.createElement('issue')
+ issue_element.attributes['id'] = issue_id
+ if issue.severity:
+ issue_element.attributes['severity'] = issue.severity
+ if issue.severity == 'ignore':
+ print 'Warning: [%s] is suppressed globally.' % issue_id
+ else:
+ for path in sorted(issue.paths):
+ ignore_element = new_dom.createElement('ignore')
+ ignore_element.attributes['path'] = path
+ issue_element.appendChild(ignore_element)
+ for regexp in sorted(issue.regexps):
+ ignore_element = new_dom.createElement('ignore')
+ ignore_element.attributes['regexp'] = regexp
+ issue_element.appendChild(ignore_element)
+ top_element.appendChild(issue_element)
+
+ with open(config_path, 'w') as f:
+ f.write(new_dom.toprettyxml(indent=' ', encoding='utf-8'))
+ print 'Updated %s' % config_path
+
+
+def _Suppress(config_path, result_path):
+ issues_dict = _ParseConfigFile(config_path)
+ _ParseAndMergeResultFile(result_path, issues_dict)
+ _WriteConfigFile(config_path, issues_dict)
+
+
+def main():
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument('--config',
+ help='Path to suppression.xml config file',
+ default=_DEFAULT_CONFIG_PATH)
+ parser.add_argument('result_path',
+ help='Lint results xml file',
+ metavar='RESULT_FILE')
+ args = parser.parse_args()
+
+ _Suppress(args.config, args.result_path)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/lint/suppressions.xml b/deps/v8/build/android/lint/suppressions.xml
new file mode 100644
index 0000000000..ed0f8c3211
--- /dev/null
+++ b/deps/v8/build/android/lint/suppressions.xml
@@ -0,0 +1,404 @@
+<?xml version="1.0" encoding="utf-8"?>
+<lint>
+ <!--
+STOP! It looks like you want to suppress some lint errors:
+- Have you tried identifing the offending patch?
+ Ask the author for a fix and/or revert the patch.
+- It is preferred to add suppressions in the code instead of
+ sweeping it under the rug here. See:
+
+ http://developer.android.com/tools/debugging/improving-w-lint.html
+
+Still reading?
+- You can edit this file manually to suppress an issue
+ globally if it is not applicable to the project.
+- You can also automatically add issues found so for in the
+ build process by running:
+
+ build/android/lint/suppress.py
+
+ which will generate this file (Comments are not preserved).
+ Note: PRODUCT_DIR will be substituted at run-time with actual
+ directory path (e.g. out/Debug)
+-->
+ <!-- AllowBackup defaults to true, and causes a lint warning if not explicitly set. -->
+ <issue id="AllowBackup">
+ <ignore path="AndroidManifest.xml"/>
+ </issue>
+ <!-- TODO(crbug.com/804427): Remove this suppression or add rationale. -->
+ <issue id="AppCompatResource" severity="ignore"/>
+ <!-- We use asserts in Chromium. See https://chromium.googlesource.com/chromium/src/+/master/styleguide/java/java.md#Asserts -->
+ <issue id="Assert" severity="ignore"/>
+ <issue id="AuthLeak" severity="Error">
+ <ignore regexp="chrome/android/javatests"/>
+ </issue>
+ <issue id="BadHostnameVerifier" severity="Error">
+ </issue>
+ <issue id="ButtonOrder" severity="Error">
+ <ignore regexp="chrome/android/java/res/layout/homepage_editor.xml"/>
+ </issue>
+ <issue id="ButtonStyle" severity="Error">
+ <ignore regexp="remoting/android/host/res/layout/main.xml"/>
+ </issue>
+ <!-- Found in generated android_chrome_strings.xml. -->
+ <issue id="ByteOrderMark" severity="Error">
+ <ignore regexp="values-pt-rBR/android_chrome_strings.xml"/>
+ </issue>
+ <issue id="ClickableViewAccessibility" severity="ignore"/>
+ <issue id="CommitPrefEdits">
+ <ignore regexp="third_party/cacheinvalidation/src/java/com/google/ipc/invalidation/ticl/android2/channel/AndroidChannelPreferences.java"/>
+ </issue>
+ <issue id="ContentDescription" severity="Error">
+ <ignore regexp="chromecast/internal"/>
+ </issue>
+ <issue id="DefaultLocale">
+ <ignore regexp="clank"/>
+ <ignore regexp="com/android/tv"/>
+ <ignore regexp="org/chromium/chrome/browser/payments/PaymentRequestMetricsTest.class"/>
+ <ignore regexp="third_party/cacheinvalidation/src/java/com/google/ipc/invalidation/external/client/contrib/AndroidListenerState.java"/>
+ </issue>
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <issue id="Deprecated" severity="Error">
+ <ignore regexp="android:singleLine"/>
+ <ignore regexp="AndroidManifest.xml"/>
+ <ignore regexp="/test/"/>
+ </issue>
+ <issue id="DrawAllocation">
+ <ignore regexp="content/public/android/java/src/org/chromium/content/browser/ContentViewRenderView.java"/>
+ <ignore regexp="content/public/android/java/src/org/chromium/content/browser/PopupZoomer.java"/>
+ </issue>
+ <!-- TODO(crbug.com/804432): Remove this and fix the offending xml files. -->
+ <issue id="EllipsizeMaxLines" severity="ignore"/>
+ <issue id="ExifInterface">
+ <!-- TODO(crbug.com/804438): Cannot update until android.media.ExifInterface supports file descriptors -->
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/photo_picker/BitmapUtils.java"/>
+ </issue>
+ <issue id="ExportedContentProvider">
+ <ignore path="AndroidManifest.xml"/>
+ </issue>
+ <issue id="ExportedService" severity="Error">
+ <ignore regexp="AndroidManifest.xml"/>
+ </issue>
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <issue id="GoogleAppIndexingUrlError" severity="Error">
+ <ignore regexp="AndroidManifest.xml"/>
+ </issue>
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <issue id="GoogleAppIndexingWarning" severity="Error">
+ <ignore regexp="AndroidManifest.xml"/>
+ </issue>
+ <issue id="HandlerLeak">
+ <ignore regexp="android_webview/glue/java/src/com/android/webview/chromium/WebViewContentsClientAdapter.java" />
+ <ignore regexp="chromecast/internal" />
+ <ignore regexp="remoting/android/java/src/org/chromium/chromoting/TapGestureDetector.java" />
+ </issue>
+ <issue id="HardcodedDebugMode" severity="Fatal">
+ <ignore path="AndroidManifest.xml"/>
+ </issue>
+ <issue id="HardcodedText" severity="Error">
+ <ignore regexp="chromecast/internal"/>
+ <ignore regexp="remoting/android/host/res/layout/main.xml"/>
+ </issue>
+ <issue id="IconColors" severity="Error">
+ <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-hdpi/notification_icon.png"/>
+ <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-mdpi/notification_icon.png"/>
+ <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-xhdpi/notification_icon.png"/>
+ <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-xxhdpi/notification_icon.png"/>
+ <ignore regexp="tools/android/audio_focus_grabber/java/res/drawable-xxxhdpi/notification_icon.png"/>
+ </issue>
+ <issue id="IconDensities">
+ <!-- This is intentional to save on WebAPKs' size. -->
+ <ignore regexp="chrome/android/webapk/shell_apk/res/drawable-*"/>
+ <!-- crbug.com/457918 is tracking missing assets -->
+ <ignore regexp="chrome/android/java/res/drawable-xxhdpi"/>
+ <ignore regexp="chrome/android/java/res/drawable-xxxhdpi"/>
+ <ignore regexp="chromecast/internal"/>
+ <ignore regexp="content/public/android/java/res/drawable-xxhdpi"/>
+ <ignore regexp="content/public/android/java/res/drawable-xxxhdpi"/>
+ <ignore regexp="ui/android/java/res/drawable-xxhdpi"/>
+ <ignore regexp="ui/android/java/res/drawable-xxxhdpi"/>
+ <!-- This is intentional to reduce APK size. See: http://crrev/c/1352161 -->
+ <ignore regexp="chrome/android/features/autofill_assistant/java/res/drawable-*"/>
+ </issue>
+ <issue id="IconDipSize">
+ <ignore regexp="chromecast/internal"/>
+ <!-- These only need to be 1px for all densities. See: crbug.com/804449 -->
+ <ignore regexp="chrome/android/java/res/.*tab_strip_fade"/>
+ </issue>
+ <issue id="IconDuplicates" severity="Error">
+ <ignore regexp="chromecast/internal"/>
+ </issue>
+ <issue id="IconDuplicatesConfig" severity="Error">
+ <ignore regexp="chromecast/internal"/>
+ </issue>
+ <issue id="IconLauncherFormat" severity="ignore">
+ <!-- TODO(crbug.com/739746): Remove after lint version has been updated. -->
+ <ignore regexp="remoting/android/java/res/mipmap-anydpi-v26/ic_launcher.xml"/>
+ </issue>
+ <issue id="IconLauncherShape" severity="Error">
+ <ignore regexp="chromecast/internal"/>
+ <ignore regexp="chrome/android/webapk/shell_apk/res/mipmap-mdpi/ic_launcher_background.png"/>
+ </issue>
+ <issue id="IconLocation">
+ <ignore regexp="chromecast/internal"/>
+ <!-- This is just for testing -->
+ <ignore regexp="chrome/test/chromedriver/test/webview_shell/java/res/drawable/icon.png"/>
+ <!-- Memconsumer is only for tooling -->
+ <ignore regexp="tools/android/memconsumer/java/res/drawable/"/>
+ <!-- It is OK for content_shell_apk to have missing assets. -->
+ <ignore regexp="content/shell/android/java/res/"/>
+ </issue>
+ <issue id="IconMissingDensityFolder">
+ <!-- see crbug.com/542435 -->
+ <ignore regexp="android_webview/apk/java/res"/>
+ <ignore regexp="android_webview/tools/system_webview_shell/apk/res"/>
+ <ignore regexp="chrome/android/webapk/shell_apk/res"/>
+ <ignore regexp="chromecast/internal"/>
+ <ignore regexp="tools/android/push_apps_to_background/res"/>
+ <ignore regexp="ui/android/java/res"/>
+ <!-- crbug.com/457918 is tracking missing assets -->
+ <ignore regexp="components/embedder_support/android/java/res"/>
+ </issue>
+ <issue id="ImpliedQuantity" severity="Error">
+ <ignore regexp="chrome/android/chrome_strings_grd"/>
+ </issue>
+ <issue id="InconsistentArrays" severity="Error">
+ <ignore regexp="android_webview/locale_paks.resources.zip/values/locale-paks.xml"/>
+ <ignore regexp="chrome/android/chrome_locale_paks.resources.zip/values/locale-paks.xml"/>
+ <ignore regexp="preloaded_fonts.xml"/>
+ </issue>
+ <issue id="InconsistentLayout" severity="ignore"/>
+ <issue id="InefficientWeight" severity="Error">
+ <ignore regexp="android_webview/tools/system_webview_shell/apk/res/layout/activity_webview_browser.xml"/>
+ </issue>
+ <issue id="InflateParams" severity="ignore"/>
+ <issue id="InlinedApi" severity="ignore"/>
+ <issue id="InvalidPackage" severity="Error">
+ <ignore regexp="espresso/espresso_core_java.interface.jar"/>
+ </issue>
+ <issue id="InvalidVectorPath" severity="ignore"/>
+ <issue id="LabelFor" severity="Error">
+ <ignore regexp="android_webview/tools/system_webview_shell/apk/res/layout/activity_webview_browser.xml"/>
+ </issue>
+ <!-- TODO(crbug.com/804453): Remove this after fixing. -->
+ <issue id="KeyboardInaccessibleWidget" severity="ignore"/>
+ <issue id="LintError">
+ <!-- We no longer supply class files to lint. -->
+ <ignore regexp="No `.class` files were found in project"/>
+ </issue>
+ <issue id="LogConditional" severity="ignore"/>
+ <issue id="LongLogTag" severity="ignore"/>
+ <issue id="MissingApplicationIcon" severity="ignore"/>
+ <issue id="MissingDefaultResource">
+ <!-- Only used by ToolbarControlContainer guarded by tablet form-factor. -->
+ <ignore regexp="toolbar_background.9.png"/>
+ <!-- Only used by FirstRunFlowSequencer guarded by tablet form-factor. -->
+ <ignore regexp="window_background.xml"/>
+ </issue>
+ <issue id="MissingPermission" severity="ignore"/>
+ <!-- TODO(yolandyan) remove this once all tests are converted to junit4 -->
+ <issue id="MissingPrefix" severity="ignore"/>
+ <!--
+ TODO(estevenson) remove this once translations are added for
+ IDS_ACCESSIBILITY_TOOLBAR_BTN_TABSWITCHER_TOGGLE (http://crbug.com/635677)
+ -->
+ <issue id="MissingQuantity">
+ <ignore regexp="android_chrome_strings.xml"/>
+ </issue>
+ <issue id="MissingRegistered" severity="ignore"/>
+ <issue id="MissingSuperCall" severity="Error">
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/widget/selection/SelectionToolbar.java"/>
+ </issue>
+ <issue id="MissingTranslation">
+ <!-- http://crbug.com/450548 -->
+ <ignore regexp="chromecast/internal"/>
+ <ignore regexp="restriction_values.xml.*"/>
+ </issue>
+ <issue id="MissingVersion">
+ <ignore path="AndroidManifest.xml"/>
+ </issue>
+ <issue id="NewApi">
+ <!-- Do not add new suppressions without rationale. -->
+ <!-- 2 AutoCloseable has been available since API 15, just hidden. -->
+ <ignore regexp="Class requires API level 19.*java.lang.AutoCloseable"/>
+ <ignore regexp="Call requires API level 19.*java.lang.AutoCloseable#close"/>
+ <!-- 1 We support requireNonNull via desugar. -->
+ <ignore regexp="Call requires API level 19.*`java.util.Objects#requireNonNull`"/>
+ <!-- 2 We support try-with-resources via desugar. -->
+ <ignore regexp="Try-with-resources requires API level 19"/>
+ <ignore regexp="Call requires API level 19.*`java.lang.Throwable#addSuppressed`"/>
+ <!-- 1 We support default methods via desugar. -->
+ <ignore regexp="Default method requires API level 24"/>
+ <!-- 1 We support static interface methods via desugar. -->
+ <ignore regexp="Static interface method requires API level 24"/>
+ <!-- 1 This is for testonly target android_support_chromium_java. -->
+ <ignore regexp="third_party/android_tools/sdk/extras/chromium/support/src/org/chromium/android/support/PackageManagerWrapper.java"/>
+ <!-- 1 This is for testonly target android_support_chromium_java in android_sdk. -->
+ <ignore regexp="third_party/android_sdk/public/extras/chromium/support/src/org/chromium/android/support/PackageManagerWrapper.java"/>
+ <!-- Endnote: Please specify number of suppressions when adding more -->
+ </issue>
+ <!-- This warning just adds a lot of false positives. -->
+ <issue id="ObsoleteSdkInt" severity="ignore"/>
+ <issue id="OldTargetApi">
+ <ignore path="AndroidManifest.xml"/>
+ </issue>
+ <issue id="Overdraw" severity="ignore"/>
+ <issue id="Override">
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <ignore regexp="org/chromium/content/browser/input/ThreadedInputConnection.class"/>
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <ignore regexp="com/android/webview/chromium/ContentSettingsAdapter.class"/>
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <ignore regexp="com/android/webview/chromium/ServiceWorkerControllerAdapter.class"/>
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <ignore regexp="com/android/webview/chromium/ServiceWorkerSettingsAdapter.class"/>
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <ignore regexp="org/chromium/chrome/browser/ChromeActivity.class"/>
+ <!-- TODO(crbug.com/635567): Fix this properly. -->
+ <ignore regexp="org/chromium/chrome/browser/ChromeTabbedActivity.class"/>
+ </issue>
+ <issue id="PackageManagerGetSignatures">
+ <ignore regexp="chrome/android/webapk/libs/client/src/org/chromium/webapk/lib/client/WebApkValidator.java"/>
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/browserservices/OriginVerifier.java"/>
+ </issue>
+ <issue id="PluralsCandidate" severity="Error">
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-en-rGB/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values/android_chrome_strings.xml"/>
+ </issue>
+ <issue id="PrivateApi" severity="ignore"/>
+ <!-- Chrome is a system app. -->
+ <issue id="ProtectedPermissions" severity="ignore"/>
+ <issue id="Recycle" severity="ignore"/>
+ <issue id="Registered" severity="ignore"/>
+ <issue id="ResourceAsColor" severity="ignore"/>
+ <issue id="ResourceType" severity="Error">
+ <ignore regexp="/javatests/"/>
+ </issue>
+ <!-- TODO(crbug.com/831774): Play Services starts complaining about RestrictedApi. Needs investigation -->
+ <issue id="RestrictedApi" severity="ignore"/>
+ <issue id="RtlCompat" severity="ignore"/>
+ <issue id="RtlEnabled" severity="ignore"/>
+ <issue id="RtlSymmetry" severity="ignore"/>
+ <issue id="SetJavaScriptEnabled" severity="ignore"/>
+ <issue id="SignatureOrSystemPermissions" severity="ignore"/>
+ <issue id="SpUsage" severity="Error">
+ <ignore regexp="chromecast/internal"/>
+ </issue>
+ <issue id="StaticFieldLeak">
+ <!-- Nice to fix, but not necessary or performance critical. -->
+ <ignore regexp="This AsyncTask class should be static or leaks might occur"/>
+ </issue>
+ <issue id="StringFormatCount" severity="Error">
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-cs/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-fr/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-pl/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values/android_chrome_strings.xml"/>
+ </issue>
+ <issue id="StringFormatInvalid" severity="Error">
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-cs/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-da/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-et/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-pl/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-pt-rBR/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-sv/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-tl/android_chrome_strings.xml"/>
+ </issue>
+ <!-- We have many C++ enums that we don't care about in java -->
+ <issue id="SwitchIntDef" severity="ignore"/>
+ <issue id="TextFields" severity="Error">
+ <ignore regexp="chromecast/internal"/>
+ </issue>
+ <issue id="TypographyDashes" severity="Error">
+ <ignore regexp="chrome/app/policy/android/values-v21/restriction_values.xml"/>
+ </issue>
+ <!-- Typos check disabled due to lint bug: http://crbug.com/671170 -->
+ <issue id="Typos" severity="ignore" />
+ <issue id="UnusedAttribute" severity="ignore"/>
+ <issue id="UnusedIds" severity="ignore"/>
+ <issue id="UnusedQuantity" severity="Error">
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-cs/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-in/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-ja/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-ko/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-lt/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-ms/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-sk/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-th/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-vi/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-zh-rCN/android_chrome_strings.xml"/>
+ <ignore regexp="chrome/android/chrome_strings_grd.resources.zip/values-zh-rTW/android_chrome_strings.xml"/>
+ </issue>
+ <!-- Our generated enums are allowed to have the same values. -->
+ <issue id="UniqueConstants" severity="ignore"/>
+ <issue id="UnusedResources">
+ <!-- Do not add new suppressions without rationale. -->
+ <!-- 3 raw resources are accessed by URL in various places -->
+ <ignore regexp="PRODUCT_DIR/gen/remoting/android/.*/res/raw/credits.html"/>
+ <ignore regexp="PRODUCT_DIR/gen/remoting/android/.*/res/raw/credits_css.css"/>
+ <ignore regexp="PRODUCT_DIR/gen/remoting/android/.*/res/raw/credits_js.js"/>
+ <!-- 1 string test only, used in CronetSmokeTestCase dynamically -->
+ <ignore regexp="R.string.TestSupportImplClass"/>
+ <!-- 1 string used by Android's policies system, pulled from app directly -->
+ <ignore regexp="restriction_values.xml"/>
+ <!-- 3 resources test only, used in webview tests dynamically -->
+ <ignore regexp="android_webview/tools/automated_ui_tests/java/res/layout/"/>
+ <ignore regexp="android_webview/test/shell/res/raw/resource_file.html"/>
+ <ignore regexp="android_webview/test/shell/res/raw/resource_icon.png"/>
+ <!-- 2 resources used by android webview glue layer, could be refactored -->
+ <ignore regexp="android_webview/java/res/drawable-hdpi/ic_play_circle_outline_black_48dp.png"/>
+ <ignore regexp="R.string.private_browsing_warning"/>
+ <!-- 2 resource sets used by clank widgets for each channel -->
+ <ignore regexp="The resource `R.string.bookmark_widget_title.*` appears to be unused"/>
+ <ignore regexp="The resource `R.string.search_widget_title.*` appears to be unused"/>
+ <!-- 1 resource used by android tv to generate resources.zip file -->
+ <ignore regexp="chromecast/internal/shell/browser/android/java/res/drawable-hdpi/ic_settings_cast.png"/>
+ <!-- TODO(crbug.com/909915): Remove this after full Lite mode launch. -->
+ <!-- 12 resources used by Data Saver during rebranding to Lite mode -->
+ <ignore regexp="The resource `R.string..*lite_mode` appears to be unused"/>
+ <!-- Module titles may only be used by the Play Store. -->
+ <ignore regexp="The resource `R.string.*_module_title` appears to be unused"/>
+ <!-- Endnote: Please specify number of suppressions when adding more -->
+ </issue>
+ <issue id="UseCompoundDrawables">
+ <!-- Upscaling 24dp to 48dp doesn't work as expected with a TextView compound drawable. -->
+ <ignore regexp="chrome/android/java/res/layout/photo_picker_bitmap_view.xml"/>
+ </issue>
+ <issue id="UselessParent">
+ <ignore regexp="android_webview/tools/system_webview_shell/apk/res/layout/activity_webview_browser.xml"/>
+ <ignore regexp="chrome/android/java/res/layout/data_usage_breakdown.xml"/>
+ <ignore regexp="chromecast/internal"/>
+ <ignore regexp="tools/android/kerberos/SpnegoAuthenticator/res/layout/activity_account_authenticator.xml"/>
+ </issue>
+ <issue id="UsesMinSdkAttributes" severity="Error">
+ <ignore regexp="AndroidManifest.xml"/>
+ </issue>
+ <!-- TODO(crbug.com/807734): Investigate and possible remove this -->
+ <issue id="UseSparseArrays" severity="ignore"/>
+ <issue id="ValidFragment" severity="Error">
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/media/router/BaseMediaRouteDialogManager.java"/>
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/media/router/MediaRouteChooserDialogManager.java"/>
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/media/router/MediaRouteControllerDialogManager.java"/>
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/preferences/privacy/OtherFormsOfHistoryDialogFragment.java"/>
+ <ignore regexp="media/capture/content/android/java/src/org/chromium/media/ScreenCapture.java"/>
+ </issue>
+ <issue id="VectorPath" severity="ignore"/>
+ <issue id="ViewConstructor" severity="ignore"/>
+ <issue id="VisibleForTests" severity="Error">
+ <ignore regexp="/javatests/"/>
+ <ignore regexp="/test/"/>
+ <!-- TODO(crbug.com/757124): Remove all these specific Feedback files after underlying issue is resolved -->
+ <!-- Underlying issue is that Android FeedbackOptions.Builder using @VisibleForTesting without 'otherwise='. -->
+ <ignore regexp="clank/java/src/com/google/android/apps/chrome/feedback/FeedbackUtil.java"/>
+ <ignore regexp="clank/java/src/com/google/android/apps/chrome/feedback/PlayServicesFeedbackReporter.java"/>
+ <ignore regexp="clank/java/src/com/google/android/apps/chrome/help/FeedbackCategoryChooserActivity.java"/>
+ <ignore regexp="clank/java/src/com/google/android/apps/chrome/help/HelpAndFeedbackInternal.java"/>
+ </issue>
+ <issue id="WrongCall" severity="ignore"/>
+ <issue id="WrongConstant">
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/bookmarks/BookmarkItemsAdapter.java"/>
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/instantapps/InstantAppsHandler.java"/>
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/SSLClientCertificateRequest.java"/>
+ <ignore regexp="chrome/android/java/src/org/chromium/chrome/browser/widget/prefeditor/EditorDialog.java"/>
+ <ignore regexp="third_party/android_data_chart/java/src/org/chromium/third_party/android/datausagechart/ChartDataUsageView.java"/>
+ </issue>
+</lint>
diff --git a/deps/v8/build/android/list_class_verification_failures.py b/deps/v8/build/android/list_class_verification_failures.py
new file mode 100755
index 0000000000..2206f4bfee
--- /dev/null
+++ b/deps/v8/build/android/list_class_verification_failures.py
@@ -0,0 +1,282 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A helper script to list class verification errors.
+
+This is a wrapper around the device's oatdump executable, parsing desired output
+and accommodating API-level-specific details, such as file paths.
+"""
+
+from __future__ import print_function
+
+import argparse
+import exceptions
+import logging
+import os
+import re
+
+import devil_chromium
+from devil.android import device_errors
+from devil.android import device_temp_file
+from devil.android import device_utils
+from devil.android.ndk import abis
+from devil.android.sdk import version_codes
+from devil.android.tools import script_common
+from devil.utils import logging_common
+from py_utils import tempfile_ext
+
+STATUSES = [
+ 'NotReady',
+ 'RetryVerificationAtRuntime',
+ 'Verified',
+ 'Initialized',
+ 'SuperclassValidated',
+]
+
+
+def DetermineDeviceToUse(devices):
+ """Like DeviceUtils.HealthyDevices(), but only allow a single device.
+
+ Args:
+ devices: A (possibly empty) list of serial numbers, such as from the
+ --device flag.
+ Returns:
+ A single device_utils.DeviceUtils instance.
+ Raises:
+ device_errors.NoDevicesError: Raised when no non-blacklisted devices exist.
+ device_errors.MultipleDevicesError: Raise when multiple devices exist, but
+ |devices| does not distinguish which to use.
+ """
+ if not devices:
+ # If the user did not specify which device, we let HealthyDevices raise
+ # MultipleDevicesError.
+ devices = None
+ usable_devices = device_utils.DeviceUtils.HealthyDevices(device_arg=devices)
+ # If the user specified more than one device, we still only want to support a
+ # single device, so we explicitly raise MultipleDevicesError.
+ if len(usable_devices) > 1:
+ raise device_errors.MultipleDevicesError(usable_devices)
+ return usable_devices[0]
+
+
+class DeviceOSError(Exception):
+ """Raised when a file is missing from the device, or something similar."""
+ pass
+
+
+class UnsupportedDeviceError(Exception):
+ """Raised when the device is not supported by this script."""
+ pass
+
+
+def _GetFormattedArch(device):
+ abi = device.product_cpu_abi
+ # Some architectures don't map 1:1 with the folder names.
+ return {abis.ARM_64: 'arm64', abis.ARM: 'arm'}.get(abi, abi)
+
+
+def PathToDexForPlatformVersion(device, package_name):
+ """Gets the full path to the dex file on the device."""
+ sdk_level = device.build_version_sdk
+ paths_to_apk = device.GetApplicationPaths(package_name)
+ if not paths_to_apk:
+ raise DeviceOSError(
+ 'Could not find data directory for {}. Is it installed?'.format(
+ package_name))
+ if len(paths_to_apk) != 1:
+ raise DeviceOSError(
+ 'Expected exactly one path for {} but found {}'.format(
+ package_name,
+ paths_to_apk))
+ path_to_apk = paths_to_apk[0]
+
+ if version_codes.LOLLIPOP <= sdk_level <= version_codes.LOLLIPOP_MR1:
+ # Of the form "com.example.foo-\d", where \d is some digit (usually 1 or 2)
+ package_with_suffix = os.path.basename(os.path.dirname(path_to_apk))
+ dalvik_prefix = '/data/dalvik-cache/arm'
+ odex_file = '{prefix}/data@app@{package}@base.apk@classes.dex'.format(
+ prefix=dalvik_prefix,
+ package=package_with_suffix)
+ elif sdk_level >= version_codes.MARSHMALLOW:
+ arch = _GetFormattedArch(device)
+ odex_file = '{data_dir}/oat/{arch}/base.odex'.format(
+ data_dir=os.path.dirname(path_to_apk), arch=arch)
+ else:
+ raise UnsupportedDeviceError('Unsupported API level: {}'.format(sdk_level))
+
+ odex_file_exists = device.FileExists(odex_file)
+ if odex_file_exists:
+ return odex_file
+ elif sdk_level >= version_codes.PIE:
+ raise DeviceOSError(
+ 'Unable to find odex file: you must run dex2oat on debuggable apps '
+ 'on >= P after installation.')
+ raise DeviceOSError('Unable to find odex file')
+
+
+def _AdbOatDumpForPackage(device, package_name, out_file):
+ """Runs oatdump on the device."""
+ # Get the path to the odex file.
+ odex_file = PathToDexForPlatformVersion(device, package_name)
+ device.RunShellCommand(['oatdump',
+ '--oat-file=' + odex_file,
+ '--output=' + out_file],
+ shell=True, check_return=True)
+
+
+class JavaClass(object):
+ """This represents a Java Class and its ART Class Verification status."""
+
+ def __init__(self, name, verification_status):
+ self.name = name
+ self.verification_status = verification_status
+
+
+def _ParseMappingFile(proguard_map_file):
+ """Creates a map of obfuscated names to deobfuscated names."""
+ mappings = {}
+ with open(proguard_map_file, 'r') as f:
+ pattern = re.compile(r'^(\S+) -> (\S+):')
+ for line in f:
+ m = pattern.match(line)
+ if m is not None:
+ deobfuscated_name = m.group(1)
+ obfuscated_name = m.group(2)
+ mappings[obfuscated_name] = deobfuscated_name
+ return mappings
+
+
+def _DeobfuscateJavaClassName(dex_code_name, proguard_mappings):
+ return proguard_mappings.get(dex_code_name, dex_code_name)
+
+
+def FormatJavaClassName(dex_code_name, proguard_mappings):
+ obfuscated_name = dex_code_name.replace('/', '.')
+ if proguard_mappings is not None:
+ return _DeobfuscateJavaClassName(obfuscated_name, proguard_mappings)
+ else:
+ return obfuscated_name
+
+
+def ListClassesAndVerificationStatus(oatdump_output, proguard_mappings):
+ """Lists all Java classes in the dex along with verification status."""
+ java_classes = []
+ pattern = re.compile(r'\d+: L([^;]+).*\(type_idx=[^(]+\((\w+)\).*')
+ for line in oatdump_output:
+ m = pattern.match(line)
+ if m is not None:
+ name = FormatJavaClassName(m.group(1), proguard_mappings)
+ # Some platform levels prefix this with "Status" while other levels do
+ # not. Strip this for consistency.
+ verification_status = m.group(2).replace('Status', '')
+ java_classes.append(JavaClass(name, verification_status))
+ return java_classes
+
+
+def _PrintVerificationResults(target_status, java_classes, show_summary):
+ """Prints results for user output."""
+ # Sort to keep output consistent between runs.
+ java_classes.sort(key=lambda c: c.name)
+ d = {}
+ for status in STATUSES:
+ d[status] = 0
+
+ for java_class in java_classes:
+ if java_class.verification_status == target_status:
+ print(java_class.name)
+ if java_class.verification_status not in d:
+ raise exceptions.RuntimeError('Unexpected status: {0}'.format(
+ java_class.verification_status))
+ else:
+ d[java_class.verification_status] += 1
+
+ if show_summary:
+ for status in d:
+ count = d[status]
+ print('Total {status} classes: {num}'.format(
+ status=status, num=count))
+ print('Total number of classes: {num}'.format(
+ num=len(java_classes)))
+
+
+def RealMain(mapping, device_arg, package, status, hide_summary, workdir):
+ if mapping is None:
+ logging.warn('Skipping deobfuscation because no map file was provided.')
+ device = DetermineDeviceToUse(device_arg)
+ device.EnableRoot()
+ with device_temp_file.DeviceTempFile(
+ device.adb) as file_on_device:
+ _AdbOatDumpForPackage(device, package, file_on_device.name)
+ file_on_host = os.path.join(workdir, 'out.dump')
+ device.PullFile(file_on_device.name, file_on_host)
+ proguard_mappings = (_ParseMappingFile(mapping) if mapping else None)
+ with open(file_on_host, 'r') as f:
+ java_classes = ListClassesAndVerificationStatus(f, proguard_mappings)
+ _PrintVerificationResults(status, java_classes, not hide_summary)
+
+
+def main():
+ devil_chromium.Initialize()
+ parser = argparse.ArgumentParser(description="""
+List Java classes in an APK which fail ART class verification.
+""")
+ parser.add_argument(
+ '--package',
+ '-P',
+ type=str,
+ default=None,
+ required=True,
+ help='Specify the full application package name')
+ parser.add_argument(
+ '--mapping',
+ '-m',
+ type=os.path.realpath,
+ default=None,
+ help='Mapping file for the desired APK to deobfuscate class names')
+ parser.add_argument(
+ '--hide-summary',
+ default=False,
+ action='store_true',
+ help='Do not output the total number of classes in each Status.')
+ parser.add_argument(
+ '--status',
+ type=str,
+ default='RetryVerificationAtRuntime',
+ choices=STATUSES,
+ help='Which category of classes to list at the end of the script')
+ parser.add_argument(
+ '--workdir',
+ '-w',
+ type=os.path.realpath,
+ default=None,
+ help=('Work directory for oatdump output (default = temporary '
+ 'directory). If specified, this will not be cleaned up at the end '
+ 'of the script (useful if you want to inspect oatdump output '
+ 'manually)'))
+
+ script_common.AddEnvironmentArguments(parser)
+ script_common.AddDeviceArguments(parser)
+ logging_common.AddLoggingArguments(parser)
+
+ args = parser.parse_args()
+ script_common.InitializeEnvironment(args)
+ logging_common.InitializeLogging(args)
+
+ if args.workdir:
+ if not os.path.isdir(args.workdir):
+ raise RuntimeError('Specified working directory does not exist')
+ RealMain(args.mapping, args.devices, args.package, args.status,
+ args.hide_summary, args.workdir)
+ # Assume the user wants the workdir to persist (useful for debugging).
+ logging.warn('Not cleaning up explicitly-specified workdir: %s',
+ args.workdir)
+ else:
+ with tempfile_ext.NamedTemporaryDirectory() as workdir:
+ RealMain(args.mapping, args.devices, args.package, args.status,
+ args.hide_summary, workdir)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/deps/v8/build/android/list_class_verification_failures_test.py b/deps/v8/build/android/list_class_verification_failures_test.py
new file mode 100644
index 0000000000..a3da0fd6d7
--- /dev/null
+++ b/deps/v8/build/android/list_class_verification_failures_test.py
@@ -0,0 +1,233 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import list_class_verification_failures as list_verification
+
+from pylib.constants import host_paths
+
+import devil_chromium # pylint: disable=unused-import
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android.ndk import abis
+from devil.android.sdk import version_codes
+
+with host_paths.SysPath(host_paths.PYMOCK_PATH):
+ import mock # pylint: disable=import-error
+
+
+def _CreateOdexLine(java_class_name, type_idx, verification_status):
+ """Create a rough approximation of a line of oatdump output."""
+ return ('{type_idx}: L{java_class}; (offset=0xac) (type_idx={type_idx}) '
+ '({verification}) '
+ '(OatClassNoneCompiled)'.format(type_idx=type_idx,
+ java_class=java_class_name,
+ verification=verification_status))
+
+
+def _ClassForName(name, classes):
+ return next(c for c in classes if c.name == name)
+
+
+class _DetermineDeviceToUseTest(unittest.TestCase):
+
+ def testDetermineDeviceToUse_emptyListWithOneAttachedDevice(self):
+ fake_attached_devices = ['123']
+ user_specified_devices = []
+ device_utils.DeviceUtils.HealthyDevices = mock.MagicMock(
+ return_value=fake_attached_devices)
+ result = list_verification.DetermineDeviceToUse(user_specified_devices)
+ self.assertEqual(result, fake_attached_devices[0])
+ device_utils.DeviceUtils.HealthyDevices.assert_called_with(device_arg=None)
+
+ def testDetermineDeviceToUse_emptyListWithNoAttachedDevices(self):
+ user_specified_devices = []
+ device_utils.DeviceUtils.HealthyDevices = mock.MagicMock(
+ side_effect=device_errors.NoDevicesError())
+ with self.assertRaises(device_errors.NoDevicesError) as _:
+ list_verification.DetermineDeviceToUse(user_specified_devices)
+ device_utils.DeviceUtils.HealthyDevices.assert_called_with(device_arg=None)
+
+ def testDetermineDeviceToUse_oneElementListWithOneAttachedDevice(self):
+ user_specified_devices = ['123']
+ fake_attached_devices = ['123']
+ device_utils.DeviceUtils.HealthyDevices = mock.MagicMock(
+ return_value=fake_attached_devices)
+ result = list_verification.DetermineDeviceToUse(user_specified_devices)
+ self.assertEqual(result, fake_attached_devices[0])
+ device_utils.DeviceUtils.HealthyDevices.assert_called_with(
+ device_arg=user_specified_devices)
+
+
+class _ListClassVerificationFailuresTest(unittest.TestCase):
+
+ def testPathToDexForPlatformVersion_noPaths(self):
+ sdk_int = version_codes.LOLLIPOP
+ paths_to_apk = []
+ package_name = 'package.name'
+ arch = abis.ARM_64
+
+ device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+ device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+
+ with self.assertRaises(list_verification.DeviceOSError) as cm:
+ list_verification.PathToDexForPlatformVersion(device, package_name)
+ message = str(cm.exception)
+ self.assertIn('Could not find data directory', message)
+
+ def testPathToDexForPlatformVersion_multiplePaths(self):
+ sdk_int = version_codes.LOLLIPOP
+ paths_to_apk = ['/first/path', '/second/path']
+ package_name = 'package.name'
+ arch = abis.ARM_64
+
+ device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+ device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+
+ with self.assertRaises(list_verification.DeviceOSError) as cm:
+ list_verification.PathToDexForPlatformVersion(device, package_name)
+ message = str(cm.exception)
+ self.assertIn('Expected exactly one path for', message)
+
+ def testPathToDexForPlatformVersion_dalvikApiLevel(self):
+ sdk_int = version_codes.KITKAT
+ paths_to_apk = ['/some/path']
+ package_name = 'package.name'
+ arch = abis.ARM_64
+
+ device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+ device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+
+ with self.assertRaises(list_verification.UnsupportedDeviceError) as _:
+ list_verification.PathToDexForPlatformVersion(device, package_name)
+
+ def testPathToDexForPlatformVersion_lollipopArm(self):
+ sdk_int = version_codes.LOLLIPOP
+ package_name = 'package.name'
+ paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+ arch = 'arm'
+
+ device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+ device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+ device.FileExists = mock.MagicMock(return_value=True)
+
+ odex_file = list_verification.PathToDexForPlatformVersion(device,
+ package_name)
+ self.assertEqual(odex_file,
+ ('/data/dalvik-cache/arm/data@app'
+ '@package.name-1@base.apk@classes.dex'))
+
+ def testPathToDexForPlatformVersion_mashmallowArm(self):
+ sdk_int = version_codes.MARSHMALLOW
+ package_name = 'package.name'
+ paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+ arch = 'arm'
+
+ device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+ device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+ device.FileExists = mock.MagicMock(return_value=True)
+
+ odex_file = list_verification.PathToDexForPlatformVersion(device,
+ package_name)
+ self.assertEqual(odex_file,
+ '/some/path/package.name-1/oat/arm/base.odex')
+
+ def testPathToDexForPlatformVersion_mashmallowArm64(self):
+ sdk_int = version_codes.MARSHMALLOW
+ package_name = 'package.name'
+ paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+ arch = abis.ARM_64
+
+ device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+ device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+ device.FileExists = mock.MagicMock(return_value=True)
+
+ odex_file = list_verification.PathToDexForPlatformVersion(device,
+ package_name)
+ self.assertEqual(odex_file,
+ '/some/path/package.name-1/oat/arm64/base.odex')
+
+ def testPathToDexForPlatformVersion_pieNoOdexFile(self):
+ sdk_int = version_codes.PIE
+ package_name = 'package.name'
+ paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+ arch = abis.ARM_64
+
+ device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+ device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+ device.FileExists = mock.MagicMock(return_value=False)
+
+ with self.assertRaises(list_verification.DeviceOSError) as cm:
+ list_verification.PathToDexForPlatformVersion(device, package_name)
+ message = str(cm.exception)
+ self.assertIn('you must run dex2oat on debuggable apps on >= P', message)
+
+ def testPathToDexForPlatformVersion_lowerApiLevelNoOdexFile(self):
+ sdk_int = version_codes.MARSHMALLOW
+ package_name = 'package.name'
+ paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)]
+ arch = abis.ARM_64
+
+ device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch)
+ device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk)
+ device.FileExists = mock.MagicMock(return_value=False)
+
+ with self.assertRaises(list_verification.DeviceOSError) as _:
+ list_verification.PathToDexForPlatformVersion(device, package_name)
+
+ def testListClasses_noProguardMap(self):
+ oatdump_output = [
+ _CreateOdexLine('a.b.JavaClass1', 6, 'StatusVerified'),
+ _CreateOdexLine('a.b.JavaClass2', 7,
+ 'StatusRetryVerificationAtRuntime'),
+ ]
+
+ classes = list_verification.ListClassesAndVerificationStatus(oatdump_output,
+ None)
+ self.assertEqual(2, len(classes))
+ java_class_1 = _ClassForName('a.b.JavaClass1', classes)
+ java_class_2 = _ClassForName('a.b.JavaClass2', classes)
+ self.assertEqual(java_class_1.verification_status, 'Verified')
+ self.assertEqual(java_class_2.verification_status,
+ 'RetryVerificationAtRuntime')
+
+ def testListClasses_proguardMap(self):
+ oatdump_output = [
+ _CreateOdexLine('a.b.ObfuscatedJavaClass1', 6, 'StatusVerified'),
+ _CreateOdexLine('a.b.ObfuscatedJavaClass2', 7,
+ 'StatusRetryVerificationAtRuntime'),
+ ]
+
+ mapping = {
+ 'a.b.ObfuscatedJavaClass1': 'a.b.JavaClass1',
+ 'a.b.ObfuscatedJavaClass2': 'a.b.JavaClass2',
+ }
+ classes = list_verification.ListClassesAndVerificationStatus(oatdump_output,
+ mapping)
+ self.assertEqual(2, len(classes))
+ java_class_1 = _ClassForName('a.b.JavaClass1', classes)
+ java_class_2 = _ClassForName('a.b.JavaClass2', classes)
+ self.assertEqual(java_class_1.verification_status, 'Verified')
+ self.assertEqual(java_class_2.verification_status,
+ 'RetryVerificationAtRuntime')
+
+ def testListClasses_noStatusPrefix(self):
+ oatdump_output = [
+ _CreateOdexLine('a.b.JavaClass1', 6, 'Verified'),
+ _CreateOdexLine('a.b.JavaClass2', 7, 'RetryVerificationAtRuntime'),
+ ]
+
+ classes = list_verification.ListClassesAndVerificationStatus(oatdump_output,
+ None)
+ self.assertEqual(2, len(classes))
+ java_class_1 = _ClassForName('a.b.JavaClass1', classes)
+ java_class_2 = _ClassForName('a.b.JavaClass2', classes)
+ self.assertEqual(java_class_1.verification_status, 'Verified')
+ self.assertEqual(java_class_2.verification_status,
+ 'RetryVerificationAtRuntime')
+
+if __name__ == '__main__':
+ # Suppress logging messages.
+ unittest.main(buffer=True)
diff --git a/deps/v8/build/android/main_dex_classes.flags b/deps/v8/build/android/main_dex_classes.flags
new file mode 100644
index 0000000000..9163c5097f
--- /dev/null
+++ b/deps/v8/build/android/main_dex_classes.flags
@@ -0,0 +1,61 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Proguard flags for what should be kept in the main dex. Only used
+# during main dex list determination, not during actual proguarding.
+
+-keep @org.chromium.base.annotations.MainDex class * {
+ *;
+}
+
+-keepclasseswithmembers class * {
+ @org.chromium.base.annotations.MainDex <methods>;
+}
+
+# Assume all IDL-generated classes should be kept. They can't reference other
+# non-framework classes, so fairly low-risk.
+-keepclasseswithmembers class * {
+ public static ** asInterface(android.os.IBinder);
+}
+
+# Required when code coverage is enabled.
+-keep class com.vladium.** {
+ *;
+}
+
+# Renderers / GPU process don't load secondary dex.
+-keep public class * extends org.chromium.base.process_launcher.ChildProcessService {
+ *;
+}
+
+# WebView doesn't load secondary dex.
+-keep public class com.android.webview.** {
+ *;
+}
+
+-keep public class org.chromium.android_webview.** {
+ *;
+}
+
+# Used by tests for secondary dex extraction.
+-keep class android.support.v4.content.ContextCompat {
+ *;
+}
+
+# The following are based on $SDK_BUILD_TOOLS/mainDexClasses.rules
+# Ours differ in that:
+# 1. It omits -keeps for application / instrumentation / backupagents (these are
+# redundant since they are added by aapt's main dex list rules output).
+# 2. Omits keep for Application.attachBaseContext(), which is overly broad.
+# 3. Omits keep for all annotations, which is also overly broad (and pulls in
+# any class that has an @IntDef).
+
+######## START mainDexClasses.rules ########
+
+# Keep old fashion tests in the main dex or they'll be silently ignored by InstrumentationTestRunner
+-keep public class * extends android.test.InstrumentationTestCase {
+ <init>();
+}
+
+######## END mainDexClasses.rules ########
diff --git a/deps/v8/build/android/method_count.py b/deps/v8/build/android/method_count.py
new file mode 100755
index 0000000000..490887adbc
--- /dev/null
+++ b/deps/v8/build/android/method_count.py
@@ -0,0 +1,116 @@
+#! /usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+
+import devil_chromium
+from devil.android.sdk import dexdump
+from pylib.constants import host_paths
+
+sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'build', 'util', 'lib',
+ 'common'))
+import perf_tests_results_helper # pylint: disable=import-error
+
+# Example dexdump output:
+# DEX file header:
+# magic : 'dex\n035\0'
+# checksum : b664fc68
+# signature : ae73...87f1
+# file_size : 4579656
+# header_size : 112
+# link_size : 0
+# link_off : 0 (0x000000)
+# string_ids_size : 46148
+# string_ids_off : 112 (0x000070)
+# type_ids_size : 5730
+# type_ids_off : 184704 (0x02d180)
+# proto_ids_size : 8289
+# proto_ids_off : 207624 (0x032b08)
+# field_ids_size : 17854
+# field_ids_off : 307092 (0x04af94)
+# method_ids_size : 33699
+# method_ids_off : 449924 (0x06dd84)
+# class_defs_size : 2616
+# class_defs_off : 719516 (0x0afa9c)
+# data_size : 3776428
+# data_off : 803228 (0x0c419c)
+
+# For what these mean, refer to:
+# https://source.android.com/devices/tech/dalvik/dex-format.html
+
+
+CONTRIBUTORS_TO_DEX_CACHE = {'type_ids_size': 'types',
+ 'string_ids_size': 'strings',
+ 'method_ids_size': 'methods',
+ 'field_ids_size': 'fields'}
+
+
+def _ExtractSizesFromDexFile(dex_path):
+ counts = {}
+ for line in dexdump.DexDump(dex_path, file_summary=True):
+ if not line.strip():
+ # Each method, type, field, and string contributes 4 bytes (1 reference)
+ # to our DexCache size.
+ return counts, sum(counts[x] for x in CONTRIBUTORS_TO_DEX_CACHE) * 4
+ m = re.match(r'([a-z_]+_size) *: (\d+)', line)
+ if m and m.group(1) in CONTRIBUTORS_TO_DEX_CACHE:
+ counts[m.group(1)] = int(m.group(2))
+ raise Exception('Unexpected end of output.')
+
+
+def ExtractSizesFromZip(path):
+ tmpdir = tempfile.mkdtemp(suffix='_dex_extract')
+ try:
+ counts = {}
+ total = 0
+ with zipfile.ZipFile(path, 'r') as z:
+ for subpath in z.namelist():
+ if not subpath.endswith('.dex'):
+ continue
+ extracted_path = z.extract(subpath, tmpdir)
+ cur_counts, cur_total = _ExtractSizesFromDexFile(extracted_path)
+ dex_basename = os.path.basename(extracted_path)
+ counts[dex_basename] = cur_counts
+ total += cur_total
+ return counts, total
+ finally:
+ shutil.rmtree(tmpdir)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('filename')
+
+ args = parser.parse_args()
+
+ devil_chromium.Initialize()
+
+ if os.path.splitext(args.filename)[1] in ('.zip', '.apk', '.jar'):
+ sizes, total_size = ExtractSizesFromZip(args.filename)
+ else:
+ single_set_of_sizes, total_size = _ExtractSizesFromDexFile(args.filename)
+ sizes = {"": single_set_of_sizes}
+
+ file_basename = os.path.basename(args.filename)
+ for classes_dex_file, classes_dex_sizes in sizes.iteritems():
+ for dex_header_name, readable_name in CONTRIBUTORS_TO_DEX_CACHE.iteritems():
+ if dex_header_name in classes_dex_sizes:
+ perf_tests_results_helper.PrintPerfResult(
+ '%s_%s_%s' % (file_basename, classes_dex_file, readable_name),
+ 'total', [classes_dex_sizes[dex_header_name]], readable_name)
+
+ perf_tests_results_helper.PrintPerfResult(
+ '%s_DexCache_size' % (file_basename), 'total', [total_size],
+ 'bytes of permanent dirty memory')
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/multidex.flags b/deps/v8/build/android/multidex.flags
new file mode 100644
index 0000000000..e3543c1324
--- /dev/null
+++ b/deps/v8/build/android/multidex.flags
@@ -0,0 +1,8 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# When multidex is enabled, need to keep the @MainDex annotation so that it
+# can be used to create the main dex list.
+-keepattributes *Annotations*
+-keep @interface org.chromium.base.annotations.MainDex
diff --git a/deps/v8/build/android/play_services/__init__.py b/deps/v8/build/android/play_services/__init__.py
new file mode 100644
index 0000000000..50b23dff63
--- /dev/null
+++ b/deps/v8/build/android/play_services/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/play_services/preprocess.py b/deps/v8/build/android/play_services/preprocess.py
new file mode 100755
index 0000000000..bb3424a80d
--- /dev/null
+++ b/deps/v8/build/android/play_services/preprocess.py
@@ -0,0 +1,244 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+'''Prepares the Google Play services split client libraries before usage by
+Chrome's build system.
+
+We need to preprocess Google Play services before using it in Chrome builds
+mostly to remove unused resources (unsupported languages, unused drawables,
+etc.) as proper resource shrinking is not yet supported by our build system.
+(See https://crbug.com/636448)
+
+The script is meant to be used with an unpacked library repository. One can
+be obtained by downloading the "extra-google-m2repository" from the Android SDK
+Manager and extracting the AARs from the desired version as the following
+structure:
+
+ REPOSITORY_DIR
+ +-- CLIENT_1
+ | +-- <content of the first AAR file>
+ +-- CLIENT_2
+ +-- etc.
+
+The output will follow the same structure, with fewer resource files, in the
+provided output directory.
+'''
+
+import argparse
+import glob
+import itertools
+import os
+import shutil
+import stat
+import sys
+import tempfile
+import textwrap
+import zipfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+from play_services import utils
+from pylib.utils import argparse_utils
+
+
+def main():
+ parser = argparse.ArgumentParser(description=(
+ "Prepares the Google Play services split client libraries before usage "
+ "by Chrome's build system. See the script's documentation for more a "
+ "detailed help."))
+ argparse_utils.CustomHelpAction.EnableFor(parser)
+ required_args = parser.add_argument_group('required named arguments')
+ required_args.add_argument('-r',
+ '--repository',
+ help=('the Google Play services repository '
+ 'location'),
+ required=True,
+ metavar='FILE')
+ required_args.add_argument('-d',
+ '--root-dir',
+ help='the directory which GN considers the root',
+ required=True,
+ metavar='FILE')
+ required_args.add_argument('-o',
+ '--out-dir',
+ help='the output directory',
+ required=True,
+ metavar='FILE')
+ required_args.add_argument('-g',
+ '--gni-out-file',
+ help='the GN output file',
+ required=True,
+ metavar='FILE')
+ required_args.add_argument('-c',
+ '--config-file',
+ help='the config file path',
+ required=True,
+ metavar='FILE')
+ parser.add_argument('--config-help',
+ action='custom_help',
+ custom_help_text=utils.ConfigParser.__doc__,
+ help='show the configuration file format help')
+
+ args = parser.parse_args()
+
+ return ProcessGooglePlayServices(args.repository,
+ args.root_dir,
+ args.out_dir,
+ args.gni_out_file,
+ args.config_file)
+
+
+def ProcessGooglePlayServices(
+ repo, root_dir, out_dir, gni_out_file, config_path):
+ config = utils.ConfigParser(config_path)
+
+ tmp_root = tempfile.mkdtemp()
+ try:
+ tmp_paths = _SetupTempDir(tmp_root)
+ _ImportFromExtractedRepo(config, tmp_paths, repo)
+ _ProcessResources(config, tmp_paths, repo)
+ _CopyToOutput(tmp_paths, out_dir)
+ _EnumerateProguardFiles(root_dir, out_dir, gni_out_file)
+ _UpdateVersionInConfig(config, tmp_paths)
+ finally:
+ shutil.rmtree(tmp_root)
+
+ return 0
+
+
+def _SetupTempDir(tmp_root):
+ tmp_paths = {
+ 'root': tmp_root,
+ 'imported_clients': os.path.join(tmp_root, 'imported_clients'),
+ 'extracted_jars': os.path.join(tmp_root, 'jar'),
+ 'combined_jar': os.path.join(tmp_root, 'google-play-services.jar'),
+ }
+ os.mkdir(tmp_paths['imported_clients'])
+ os.mkdir(tmp_paths['extracted_jars'])
+
+ return tmp_paths
+
+
+def _MakeWritable(dir_path):
+ for root, dirs, files in os.walk(dir_path):
+ for path in itertools.chain(dirs, files):
+ st = os.stat(os.path.join(root, path))
+ os.chmod(os.path.join(root, path), st.st_mode | stat.S_IWUSR)
+
+
+# E.g. turn "base_1p" into "base"
+def _RemovePartySuffix(client):
+ return client[:-3] if client[-3:] == '_1p' else client
+
+
+def _ImportFromExtractedRepo(config, tmp_paths, repo):
+ # Import the clients
+ try:
+ for client in config.clients:
+ client_out_dir = os.path.join(tmp_paths['imported_clients'], client)
+ shutil.copytree(os.path.join(repo, client), client_out_dir)
+ finally:
+ _MakeWritable(tmp_paths['imported_clients'])
+
+
+def _ProcessResources(config, tmp_paths, repo):
+ LOCALIZED_VALUES_BASE_NAME = 'values-'
+ locale_whitelist = set(config.locale_whitelist)
+
+ # The directory structure here is:
+ # <imported_clients temp dir>/<client name>_1p/res/<res type>/<res file>.xml
+ for client_dir in os.listdir(tmp_paths['imported_clients']):
+ client_prefix = _RemovePartySuffix(client_dir) + '_'
+
+ res_path = os.path.join(tmp_paths['imported_clients'], client_dir, 'res')
+ if not os.path.isdir(res_path):
+ continue
+
+ for res_type in os.listdir(res_path):
+ res_type_path = os.path.join(res_path, res_type)
+
+ if res_type.startswith('drawable'):
+ shutil.rmtree(res_type_path)
+ continue
+
+ if res_type.startswith(LOCALIZED_VALUES_BASE_NAME):
+ dir_locale = res_type[len(LOCALIZED_VALUES_BASE_NAME):]
+ if dir_locale not in locale_whitelist:
+ shutil.rmtree(res_type_path)
+ continue
+
+ if res_type.startswith('values'):
+ # Beginning with v3, resource file names are not necessarily unique,
+ # and would overwrite each other when merged at build time. Prefix each
+ # "values" resource file with its client name.
+ for res_file in os.listdir(res_type_path):
+ os.rename(os.path.join(res_type_path, res_file),
+ os.path.join(res_type_path, client_prefix + res_file))
+
+ # Reimport files from the whitelist.
+ for res_path in config.resource_whitelist:
+ for whitelisted_file in glob.glob(os.path.join(repo, res_path)):
+ resolved_file = os.path.relpath(whitelisted_file, repo)
+ rebased_res = os.path.join(tmp_paths['imported_clients'], resolved_file)
+
+ if not os.path.exists(os.path.dirname(rebased_res)):
+ os.makedirs(os.path.dirname(rebased_res))
+
+ try:
+ shutil.copy(os.path.join(repo, whitelisted_file), rebased_res)
+ finally:
+ _MakeWritable(rebased_res)
+
+
+def _CopyToOutput(tmp_paths, out_dir):
+ shutil.rmtree(out_dir, ignore_errors=True)
+ shutil.copytree(tmp_paths['imported_clients'], out_dir)
+
+
+# Write a GN file containing a list of each GMS client's proguard file (if any).
+def _EnumerateProguardFiles(root_dir, out_dir, gni_path):
+ gni_dir = os.path.dirname(gni_path)
+ gni_template = textwrap.dedent('''\
+ # Copyright 2017 The Chromium Authors. All rights reserved.
+ # Use of this source code is governed by a BSD-style license that can be
+ # found in the LICENSE file.
+
+ # This file generated by {script}
+ gms_proguard_configs = [
+ {body}
+ ]
+ ''')
+
+ gni_lines = []
+ for client_dir in os.listdir(out_dir):
+ proguard_path = os.path.join(
+ out_dir, client_dir, 'proguard.txt')
+ if os.path.exists(proguard_path):
+ rooted_path = os.path.relpath(proguard_path, root_dir)
+ gni_lines.append(' "//{}",'.format(rooted_path))
+ gni_lines.sort()
+
+ gni_text = gni_template.format(
+ script=os.path.relpath(sys.argv[0], gni_dir),
+ body='\n'.join(gni_lines))
+
+ with open(gni_path, 'w') as gni_file:
+ gni_file.write(gni_text)
+
+
+def _UpdateVersionInConfig(config, tmp_paths):
+ version_xml_path = os.path.join(tmp_paths['imported_clients'],
+ config.version_xml_path)
+ play_services_full_version = utils.GetVersionNumberFromLibraryResources(
+ version_xml_path)
+ config.UpdateVersionNumber(play_services_full_version)
+
+
+def _ExtractAll(zip_path, out_path):
+ with zipfile.ZipFile(zip_path, 'r') as zip_file:
+ zip_file.extractall(out_path)
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/play_services/utils.py b/deps/v8/build/android/play_services/utils.py
new file mode 100644
index 0000000000..76b3679957
--- /dev/null
+++ b/deps/v8/build/android/play_services/utils.py
@@ -0,0 +1,144 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+'''
+Utility functions for all things related to manipulating google play services
+related files.
+'''
+
+import argparse
+import filecmp
+import json
+import os
+import re
+
+
+_XML_VERSION_NUMBER_PATTERN = re.compile(
+ r'<integer name="google_play_services_version">(\d+)<\/integer>')
+
+
+class DefaultsRawHelpFormatter(argparse.ArgumentDefaultsHelpFormatter,
+ argparse.RawDescriptionHelpFormatter):
+ '''
+ Combines the features of RawDescriptionHelpFormatter and
+ ArgumentDefaultsHelpFormatter, providing defaults for the arguments and raw
+ text for the description.
+ '''
+ pass
+
+
+class ConfigParser(object):
+ '''Reads and writes the configuration files for play services related scripts
+
+ The configuration files are JSON files. Here is the data they are expected
+ to contain:
+
+ - version_number
+ Number. Mirrors @integer/google_play_services_version from the library.
+ Example: 815000
+
+ - sdk_version
+ Version of the Play Services SDK to retrieve, when preprocessing the
+ library from a maven/gradle repository.
+ Example: "8.1.0"
+
+ - clients
+ List of strings. Name of the clients (or play services modules) to
+ include when preprocessing the library.
+ Example: ["play-services-base", "play-services-cast"]
+
+ - version_xml_path
+ String. Path to the version.xml string describing the current version.
+ Should be relative to the library base directory
+ Example: "res/values/version.xml"
+
+ - locale_whitelist
+ List of strings. List of locales to keep from the resources. Can be
+ obtained by generating an android build and looking at the content of
+ `out/Debug/gen/chrome/java/res`; or looking at the android section in
+ `//chrome/app/generated_resources.grd`
+ Example: ["am", "ar", "bg", "ca", "cs"]
+
+ - resource_whitelist
+ List of strings. List of resource files to explicitely keep in the final
+ output. Use it to keep drawables for example, as we currently remove them
+ all.
+ Example: ["play-services-base/res/drawables/foobar.xml"]
+ '''
+ _VERSION_NUMBER_KEY = 'version_number'
+
+ def __init__(self, path):
+ self.path = path
+ self._data = {}
+
+ with open(path, 'r') as stream:
+ self._data = json.load(stream)
+
+ @property
+ def version_number(self):
+ return self._data.get(self._VERSION_NUMBER_KEY)
+
+ @property
+ def sdk_version(self):
+ return self._data.get('sdk_version')
+
+ @property
+ def clients(self):
+ return self._data.get('clients') or []
+
+ @property
+ def version_xml_path(self):
+ return self._data.get('version_xml_path')
+
+ @property
+ def locale_whitelist(self):
+ return self._data.get('locale_whitelist') or []
+
+ @property
+ def resource_whitelist(self):
+ return self._data.get('resource_whitelist') or []
+
+ def UpdateVersionNumber(self, new_version_number):
+ '''Updates the version number and saves it in the configuration file. '''
+
+ with open(self.path, 'w') as stream:
+ self._data[self._VERSION_NUMBER_KEY] = new_version_number
+ stream.write(DumpTrimmedJson(self._data))
+
+
+def DumpTrimmedJson(json_data):
+ '''
+ Default formatting when dumping json to string has trailing spaces and lacks
+ a new line at the end. This function fixes that.
+ '''
+
+ out = json.dumps(json_data, sort_keys=True, indent=2)
+ out = out.replace(' ' + os.linesep, os.linesep)
+ return out + os.linesep
+
+
+def FileEquals(expected_file, actual_file):
+ '''
+ Returns whether the two files are equal. Returns False if any of the files
+ doesn't exist.
+ '''
+
+ if not os.path.isfile(actual_file) or not os.path.isfile(expected_file):
+ return False
+ return filecmp.cmp(expected_file, actual_file)
+
+
+def GetVersionNumberFromLibraryResources(version_xml):
+ '''
+ Extracts a Google Play services version number from its version.xml file.
+ '''
+
+ with open(version_xml, 'r') as version_file:
+ version_file_content = version_file.read()
+
+ match = _XML_VERSION_NUMBER_PATTERN.search(version_file_content)
+ if not match:
+ raise AttributeError('A value for google_play_services_version was not '
+ 'found in ' + version_xml)
+ return int(match.group(1))
diff --git a/deps/v8/build/android/provision_devices.py b/deps/v8/build/android/provision_devices.py
new file mode 100755
index 0000000000..ecf22c9a56
--- /dev/null
+++ b/deps/v8/build/android/provision_devices.py
@@ -0,0 +1,561 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provisions Android devices with settings required for bots.
+
+Usage:
+ ./provision_devices.py [-d <device serial number>]
+"""
+
+import argparse
+import datetime
+import json
+import logging
+import os
+import posixpath
+import re
+import subprocess
+import sys
+import time
+
+# Import _strptime before threaded code. datetime.datetime.strptime is
+# threadsafe except for the initial import of the _strptime module.
+# See crbug.com/584730 and https://bugs.python.org/issue7980.
+import _strptime # pylint: disable=unused-import
+
+import devil_chromium
+from devil.android import battery_utils
+from devil.android import device_blacklist
+from devil.android import device_errors
+from devil.android import device_temp_file
+from devil.android import device_utils
+from devil.android.sdk import keyevent
+from devil.android.sdk import version_codes
+from devil.constants import exit_codes
+from devil.utils import run_tests_helper
+from devil.utils import timeout_retry
+from pylib import constants
+from pylib import device_settings
+from pylib.constants import host_paths
+
+_SYSTEM_WEBVIEW_PATHS = ['/system/app/webview', '/system/app/WebViewGoogle']
+_CHROME_PACKAGE_REGEX = re.compile('.*chrom.*')
+_TOMBSTONE_REGEX = re.compile('tombstone.*')
+
+
+class _DEFAULT_TIMEOUTS(object):
+ # L can take a while to reboot after a wipe.
+ LOLLIPOP = 600
+ PRE_LOLLIPOP = 180
+
+ HELP_TEXT = '{}s on L, {}s on pre-L'.format(LOLLIPOP, PRE_LOLLIPOP)
+
+
+class _PHASES(object):
+ WIPE = 'wipe'
+ PROPERTIES = 'properties'
+ FINISH = 'finish'
+
+ ALL = [WIPE, PROPERTIES, FINISH]
+
+
+def ProvisionDevices(args):
+ blacklist = (device_blacklist.Blacklist(args.blacklist_file)
+ if args.blacklist_file
+ else None)
+ devices = [d for d in device_utils.DeviceUtils.HealthyDevices(blacklist)
+ if not args.emulators or d.adb.is_emulator]
+ if args.device:
+ devices = [d for d in devices if d == args.device]
+ if not devices:
+ raise device_errors.DeviceUnreachableError(args.device)
+ parallel_devices = device_utils.DeviceUtils.parallel(devices)
+ if args.emulators:
+ parallel_devices.pMap(SetProperties, args)
+ else:
+ parallel_devices.pMap(ProvisionDevice, blacklist, args)
+ if args.auto_reconnect:
+ _LaunchHostHeartbeat()
+ blacklisted_devices = blacklist.Read() if blacklist else []
+ if args.output_device_blacklist:
+ with open(args.output_device_blacklist, 'w') as f:
+ json.dump(blacklisted_devices, f)
+ if all(d in blacklisted_devices for d in devices):
+ raise device_errors.NoDevicesError
+ return 0
+
+
+def ProvisionDevice(device, blacklist, options):
+ def should_run_phase(phase_name):
+ return not options.phases or phase_name in options.phases
+
+ def run_phase(phase_func, reboot_timeout, reboot=True):
+ try:
+ device.WaitUntilFullyBooted(timeout=reboot_timeout, retries=0)
+ except device_errors.CommandTimeoutError:
+ logging.error('Device did not finish booting. Will try to reboot.')
+ device.Reboot(timeout=reboot_timeout)
+ phase_func(device, options)
+ if reboot:
+ device.Reboot(False, retries=0)
+ device.adb.WaitForDevice()
+
+ try:
+ if options.reboot_timeout:
+ reboot_timeout = options.reboot_timeout
+ elif device.build_version_sdk >= version_codes.LOLLIPOP:
+ reboot_timeout = _DEFAULT_TIMEOUTS.LOLLIPOP
+ else:
+ reboot_timeout = _DEFAULT_TIMEOUTS.PRE_LOLLIPOP
+
+ if should_run_phase(_PHASES.WIPE):
+ if (options.chrome_specific_wipe or device.IsUserBuild() or
+ device.build_version_sdk >= version_codes.MARSHMALLOW):
+ run_phase(WipeChromeData, reboot_timeout)
+ else:
+ run_phase(WipeDevice, reboot_timeout)
+
+ if should_run_phase(_PHASES.PROPERTIES):
+ run_phase(SetProperties, reboot_timeout)
+
+ if should_run_phase(_PHASES.FINISH):
+ run_phase(FinishProvisioning, reboot_timeout, reboot=False)
+
+ if options.chrome_specific_wipe:
+ package = "com.google.android.gms"
+ version_name = device.GetApplicationVersion(package)
+ logging.info("Version name for %s is %s", package, version_name)
+
+ CheckExternalStorage(device)
+
+ except device_errors.CommandTimeoutError:
+ logging.exception('Timed out waiting for device %s. Adding to blacklist.',
+ str(device))
+ if blacklist:
+ blacklist.Extend([str(device)], reason='provision_timeout')
+
+ except (device_errors.CommandFailedError,
+ device_errors.DeviceUnreachableError):
+ logging.exception('Failed to provision device %s. Adding to blacklist.',
+ str(device))
+ if blacklist:
+ blacklist.Extend([str(device)], reason='provision_failure')
+
+def CheckExternalStorage(device):
+ """Checks that storage is writable and if not makes it writable.
+
+ Arguments:
+ device: The device to check.
+ """
+ try:
+ with device_temp_file.DeviceTempFile(
+ device.adb, suffix='.sh', dir=device.GetExternalStoragePath()) as f:
+ device.WriteFile(f.name, 'test')
+ except device_errors.CommandFailedError:
+ logging.info('External storage not writable. Remounting / as RW')
+ device.RunShellCommand(['mount', '-o', 'remount,rw', '/'],
+ check_return=True, as_root=True)
+ device.EnableRoot()
+ with device_temp_file.DeviceTempFile(
+ device.adb, suffix='.sh', dir=device.GetExternalStoragePath()) as f:
+ device.WriteFile(f.name, 'test')
+
+def WipeChromeData(device, options):
+ """Wipes chrome specific data from device
+
+ (1) uninstall any app whose name matches *chrom*, except
+ com.android.chrome, which is the chrome stable package. Doing so also
+ removes the corresponding dirs under /data/data/ and /data/app/
+ (2) remove any dir under /data/app-lib/ whose name matches *chrom*
+ (3) remove any files under /data/tombstones/ whose name matches "tombstone*"
+ (4) remove /data/local.prop if there is any
+ (5) remove /data/local/chrome-command-line if there is any
+ (6) remove anything under /data/local/.config/ if the dir exists
+ (this is telemetry related)
+ (7) remove anything under /data/local/tmp/
+
+ Arguments:
+ device: the device to wipe
+ """
+ if options.skip_wipe:
+ return
+
+ try:
+ if device.IsUserBuild():
+ _UninstallIfMatch(device, _CHROME_PACKAGE_REGEX,
+ constants.PACKAGE_INFO['chrome_stable'].package)
+ device.RunShellCommand('rm -rf %s/*' % device.GetExternalStoragePath(),
+ check_return=True)
+ device.RunShellCommand('rm -rf /data/local/tmp/*', check_return=True)
+ else:
+ device.EnableRoot()
+ _UninstallIfMatch(device, _CHROME_PACKAGE_REGEX,
+ constants.PACKAGE_INFO['chrome_stable'].package)
+ _WipeUnderDirIfMatch(device, '/data/app-lib/', _CHROME_PACKAGE_REGEX)
+ _WipeUnderDirIfMatch(device, '/data/tombstones/', _TOMBSTONE_REGEX)
+
+ _WipeFileOrDir(device, '/data/local.prop')
+ _WipeFileOrDir(device, '/data/local/chrome-command-line')
+ _WipeFileOrDir(device, '/data/local/.config/')
+ _WipeFileOrDir(device, '/data/local/tmp/')
+ device.RunShellCommand('rm -rf %s/*' % device.GetExternalStoragePath(),
+ check_return=True)
+ except device_errors.CommandFailedError:
+ logging.exception('Possible failure while wiping the device. '
+ 'Attempting to continue.')
+
+
+def WipeDevice(device, options):
+ """Wipes data from device, keeping only the adb_keys for authorization.
+
+ After wiping data on a device that has been authorized, adb can still
+ communicate with the device, but after reboot the device will need to be
+ re-authorized because the adb keys file is stored in /data/misc/adb/.
+ Thus, adb_keys file is rewritten so the device does not need to be
+ re-authorized.
+
+ Arguments:
+ device: the device to wipe
+ """
+ if options.skip_wipe:
+ return
+
+ try:
+ device.EnableRoot()
+ device_authorized = device.FileExists(constants.ADB_KEYS_FILE)
+ if device_authorized:
+ adb_keys = device.ReadFile(constants.ADB_KEYS_FILE,
+ as_root=True).splitlines()
+ device.RunShellCommand(['wipe', 'data'],
+ as_root=True, check_return=True)
+ device.adb.WaitForDevice()
+
+ if device_authorized:
+ adb_keys_set = set(adb_keys)
+ for adb_key_file in options.adb_key_files or []:
+ try:
+ with open(adb_key_file, 'r') as f:
+ adb_public_keys = f.readlines()
+ adb_keys_set.update(adb_public_keys)
+ except IOError:
+ logging.warning('Unable to find adb keys file %s.', adb_key_file)
+ _WriteAdbKeysFile(device, '\n'.join(adb_keys_set))
+ except device_errors.CommandFailedError:
+ logging.exception('Possible failure while wiping the device. '
+ 'Attempting to continue.')
+
+
+def _WriteAdbKeysFile(device, adb_keys_string):
+ dir_path = posixpath.dirname(constants.ADB_KEYS_FILE)
+ device.RunShellCommand(['mkdir', '-p', dir_path],
+ as_root=True, check_return=True)
+ device.RunShellCommand(['restorecon', dir_path],
+ as_root=True, check_return=True)
+ device.WriteFile(constants.ADB_KEYS_FILE, adb_keys_string, as_root=True)
+ device.RunShellCommand(['restorecon', constants.ADB_KEYS_FILE],
+ as_root=True, check_return=True)
+
+
+def SetProperties(device, options):
+ try:
+ device.EnableRoot()
+ except device_errors.CommandFailedError as e:
+ logging.warning(str(e))
+
+ if not device.IsUserBuild():
+ _ConfigureLocalProperties(device, options.enable_java_debug)
+ else:
+ logging.warning('Cannot configure properties in user builds.')
+ device_settings.ConfigureContentSettings(
+ device, device_settings.DETERMINISTIC_DEVICE_SETTINGS)
+ if options.disable_location:
+ device_settings.ConfigureContentSettings(
+ device, device_settings.DISABLE_LOCATION_SETTINGS)
+ else:
+ device_settings.ConfigureContentSettings(
+ device, device_settings.ENABLE_LOCATION_SETTINGS)
+
+ if options.disable_mock_location:
+ device_settings.ConfigureContentSettings(
+ device, device_settings.DISABLE_MOCK_LOCATION_SETTINGS)
+ else:
+ device_settings.ConfigureContentSettings(
+ device, device_settings.ENABLE_MOCK_LOCATION_SETTINGS)
+
+ device_settings.SetLockScreenSettings(device)
+ if options.disable_network:
+ device_settings.ConfigureContentSettings(
+ device, device_settings.NETWORK_DISABLED_SETTINGS)
+ if device.build_version_sdk >= version_codes.MARSHMALLOW:
+ # Ensure that NFC is also switched off.
+ device.RunShellCommand(['svc', 'nfc', 'disable'],
+ as_root=True, check_return=True)
+
+ if options.disable_system_chrome:
+ # The system chrome version on the device interferes with some tests.
+ device.RunShellCommand(['pm', 'disable', 'com.android.chrome'],
+ check_return=True)
+
+ if options.remove_system_webview:
+ if any(device.PathExists(p) for p in _SYSTEM_WEBVIEW_PATHS):
+ logging.info('System WebView exists and needs to be removed')
+ if device.HasRoot():
+ # Disabled Marshmallow's Verity security feature
+ if device.build_version_sdk >= version_codes.MARSHMALLOW:
+ device.adb.DisableVerity()
+ device.Reboot()
+ device.WaitUntilFullyBooted()
+ device.EnableRoot()
+
+ # This is required, e.g., to replace the system webview on a device.
+ device.adb.Remount()
+ device.RunShellCommand(['stop'], check_return=True)
+ device.RunShellCommand(['rm', '-rf'] + _SYSTEM_WEBVIEW_PATHS,
+ check_return=True)
+ device.RunShellCommand(['start'], check_return=True)
+ else:
+ logging.warning('Cannot remove system webview from a non-rooted device')
+ else:
+ logging.info('System WebView already removed')
+
+ # Some device types can momentarily disappear after setting properties.
+ device.adb.WaitForDevice()
+
+
+def _ConfigureLocalProperties(device, java_debug=True):
+ """Set standard readonly testing device properties prior to reboot."""
+ local_props = [
+ 'persist.sys.usb.config=adb',
+ 'ro.monkey=1',
+ 'ro.test_harness=1',
+ 'ro.audio.silent=1',
+ 'ro.setupwizard.mode=DISABLED',
+ ]
+ if java_debug:
+ local_props.append(
+ '%s=all' % device_utils.DeviceUtils.JAVA_ASSERT_PROPERTY)
+ local_props.append('debug.checkjni=1')
+ try:
+ device.WriteFile(
+ device.LOCAL_PROPERTIES_PATH,
+ '\n'.join(local_props), as_root=True)
+ # Android will not respect the local props file if it is world writable.
+ device.RunShellCommand(
+ ['chmod', '644', device.LOCAL_PROPERTIES_PATH],
+ as_root=True, check_return=True)
+ except device_errors.CommandFailedError:
+ logging.exception('Failed to configure local properties.')
+
+
+def FinishProvisioning(device, options):
+ # The lockscreen can't be disabled on user builds, so send a keyevent
+ # to unlock it.
+ if device.IsUserBuild():
+ device.SendKeyEvent(keyevent.KEYCODE_MENU)
+
+ if options.min_battery_level is not None:
+ battery = battery_utils.BatteryUtils(device)
+ try:
+ battery.ChargeDeviceToLevel(options.min_battery_level)
+ except device_errors.DeviceChargingError:
+ device.Reboot()
+ battery.ChargeDeviceToLevel(options.min_battery_level)
+
+ if options.max_battery_temp is not None:
+ try:
+ battery = battery_utils.BatteryUtils(device)
+ battery.LetBatteryCoolToTemperature(options.max_battery_temp)
+ except device_errors.CommandFailedError:
+ logging.exception('Unable to let battery cool to specified temperature.')
+
+ def _set_and_verify_date():
+ if device.build_version_sdk >= version_codes.MARSHMALLOW:
+ date_format = '%m%d%H%M%Y.%S'
+ set_date_command = ['date', '-u']
+ get_date_command = ['date', '-u']
+ else:
+ date_format = '%Y%m%d.%H%M%S'
+ set_date_command = ['date', '-s']
+ get_date_command = ['date']
+
+ # TODO(jbudorick): This is wrong on pre-M devices -- get/set are
+ # dealing in local time, but we're setting based on GMT.
+ strgmtime = time.strftime(date_format, time.gmtime())
+ set_date_command.append(strgmtime)
+ device.RunShellCommand(set_date_command, as_root=True, check_return=True)
+
+ get_date_command.append('+"%Y%m%d.%H%M%S"')
+ device_time = device.RunShellCommand(
+ get_date_command, as_root=True, single_line=True).replace('"', '')
+ device_time = datetime.datetime.strptime(device_time, "%Y%m%d.%H%M%S")
+ correct_time = datetime.datetime.strptime(strgmtime, date_format)
+ tdelta = (correct_time - device_time).seconds
+ if tdelta <= 1:
+ logging.info('Date/time successfully set on %s', device)
+ return True
+ else:
+ logging.error('Date mismatch. Device: %s Correct: %s',
+ device_time.isoformat(), correct_time.isoformat())
+ return False
+
+ # Sometimes the date is not set correctly on the devices. Retry on failure.
+ if device.IsUserBuild():
+ # TODO(bpastene): Figure out how to set the date & time on user builds.
+ pass
+ else:
+ if not timeout_retry.WaitFor(
+ _set_and_verify_date, wait_period=1, max_tries=2):
+ raise device_errors.CommandFailedError(
+ 'Failed to set date & time.', device_serial=str(device))
+
+ props = device.RunShellCommand('getprop', check_return=True)
+ for prop in props:
+ logging.info(' %s', prop)
+ if options.auto_reconnect:
+ _PushAndLaunchAdbReboot(device, options.target)
+
+
+def _UninstallIfMatch(device, pattern, app_to_keep):
+ installed_packages = device.RunShellCommand(['pm', 'list', 'packages'])
+ installed_system_packages = [
+ pkg.split(':')[1] for pkg in device.RunShellCommand(['pm', 'list',
+ 'packages', '-s'])]
+ for package_output in installed_packages:
+ package = package_output.split(":")[1]
+ if pattern.match(package) and not package == app_to_keep:
+ if not device.IsUserBuild() or package not in installed_system_packages:
+ device.Uninstall(package)
+
+
+def _WipeUnderDirIfMatch(device, path, pattern):
+ for filename in device.ListDirectory(path):
+ if pattern.match(filename):
+ _WipeFileOrDir(device, posixpath.join(path, filename))
+
+
+def _WipeFileOrDir(device, path):
+ if device.PathExists(path):
+ device.RunShellCommand(['rm', '-rf', path], check_return=True)
+
+
+def _PushAndLaunchAdbReboot(device, target):
+ """Pushes and launches the adb_reboot binary on the device.
+
+ Arguments:
+ device: The DeviceUtils instance for the device to which the adb_reboot
+ binary should be pushed.
+ target: The build target (example, Debug or Release) which helps in
+ locating the adb_reboot binary.
+ """
+ logging.info('Will push and launch adb_reboot on %s', str(device))
+ # Kill if adb_reboot is already running.
+ device.KillAll('adb_reboot', blocking=True, timeout=2, quiet=True)
+ # Push adb_reboot
+ logging.info(' Pushing adb_reboot ...')
+ adb_reboot = os.path.join(host_paths.DIR_SOURCE_ROOT,
+ 'out/%s/adb_reboot' % target)
+ device.PushChangedFiles([(adb_reboot, '/data/local/tmp/')])
+ # Launch adb_reboot
+ logging.info(' Launching adb_reboot ...')
+ device.RunShellCommand(
+ ['/data/local/tmp/adb_reboot'],
+ check_return=True)
+
+
+def _LaunchHostHeartbeat():
+ # Kill if existing host_heartbeat
+ KillHostHeartbeat()
+ # Launch a new host_heartbeat
+ logging.info('Spawning host heartbeat...')
+ subprocess.Popen([os.path.join(host_paths.DIR_SOURCE_ROOT,
+ 'build/android/host_heartbeat.py')])
+
+def KillHostHeartbeat():
+ ps = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
+ stdout, _ = ps.communicate()
+ matches = re.findall('\\n.*host_heartbeat.*', stdout)
+ for match in matches:
+ logging.info('An instance of host heart beart running... will kill')
+ pid = re.findall(r'(\S+)', match)[1]
+ subprocess.call(['kill', str(pid)])
+
+def main():
+ # Recommended options on perf bots:
+ # --disable-network
+ # TODO(tonyg): We eventually want network on. However, currently radios
+ # can cause perfbots to drain faster than they charge.
+ # --min-battery-level 95
+ # Some perf bots run benchmarks with USB charging disabled which leads
+ # to gradual draining of the battery. We must wait for a full charge
+ # before starting a run in order to keep the devices online.
+
+ parser = argparse.ArgumentParser(
+ description='Provision Android devices with settings required for bots.')
+ parser.add_argument('-d', '--device', metavar='SERIAL',
+ help='the serial number of the device to be provisioned'
+ ' (the default is to provision all devices attached)')
+ parser.add_argument('--adb-path',
+ help='Absolute path to the adb binary to use.')
+ parser.add_argument('--blacklist-file', help='Device blacklist JSON file.')
+ parser.add_argument('--phase', action='append', choices=_PHASES.ALL,
+ dest='phases',
+ help='Phases of provisioning to run. '
+ '(If omitted, all phases will be run.)')
+ parser.add_argument('--skip-wipe', action='store_true', default=False,
+ help="don't wipe device data during provisioning")
+ parser.add_argument('--reboot-timeout', metavar='SECS', type=int,
+ help='when wiping the device, max number of seconds to'
+ ' wait after each reboot '
+ '(default: %s)' % _DEFAULT_TIMEOUTS.HELP_TEXT)
+ parser.add_argument('--min-battery-level', type=int, metavar='NUM',
+ help='wait for the device to reach this minimum battery'
+ ' level before trying to continue')
+ parser.add_argument('--disable-location', action='store_true',
+ help='disable Google location services on devices')
+ parser.add_argument('--disable-mock-location', action='store_true',
+ default=False, help='Set ALLOW_MOCK_LOCATION to false')
+ parser.add_argument('--disable-network', action='store_true',
+ help='disable network access on devices')
+ parser.add_argument('--disable-java-debug', action='store_false',
+ dest='enable_java_debug', default=True,
+ help='disable Java property asserts and JNI checking')
+ parser.add_argument('--disable-system-chrome', action='store_true',
+ help='Disable the system chrome from devices.')
+ parser.add_argument('--remove-system-webview', action='store_true',
+ help='Remove the system webview from devices.')
+ parser.add_argument('-t', '--target', default='Debug',
+ help='the build target (default: %(default)s)')
+ parser.add_argument('-r', '--auto-reconnect', action='store_true',
+ help='push binary which will reboot the device on adb'
+ ' disconnections')
+ parser.add_argument('--adb-key-files', type=str, nargs='+',
+ help='list of adb keys to push to device')
+ parser.add_argument('-v', '--verbose', action='count', default=1,
+ help='Log more information.')
+ parser.add_argument('--max-battery-temp', type=int, metavar='NUM',
+ help='Wait for the battery to have this temp or lower.')
+ parser.add_argument('--output-device-blacklist',
+ help='Json file to output the device blacklist.')
+ parser.add_argument('--chrome-specific-wipe', action='store_true',
+ help='only wipe chrome specific data during provisioning')
+ parser.add_argument('--emulators', action='store_true',
+ help='provision only emulators and ignore usb devices')
+ args = parser.parse_args()
+ constants.SetBuildType(args.target)
+
+ run_tests_helper.SetLogLevel(args.verbose)
+
+ devil_chromium.Initialize(adb_path=args.adb_path)
+
+ try:
+ return ProvisionDevices(args)
+ except (device_errors.DeviceUnreachableError, device_errors.NoDevicesError):
+ logging.exception('Unable to provision local devices.')
+ return exit_codes.INFRA
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/pylib/OWNERS b/deps/v8/build/android/pylib/OWNERS
new file mode 100644
index 0000000000..f008c99765
--- /dev/null
+++ b/deps/v8/build/android/pylib/OWNERS
@@ -0,0 +1,6 @@
+jbudorick@chromium.org
+klundberg@chromium.org
+navabi@chromium.org
+skyostil@chromium.org
+
+# COMPONENT: Test>Android
diff --git a/deps/v8/build/android/pylib/__init__.py b/deps/v8/build/android/pylib/__init__.py
new file mode 100644
index 0000000000..b93eb4fe0b
--- /dev/null
+++ b/deps/v8/build/android/pylib/__init__.py
@@ -0,0 +1,31 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+
+_CATAPULT_PATH = os.path.abspath(os.path.join(
+ os.path.dirname(__file__), '..', '..', '..', 'third_party', 'catapult'))
+
+_DEVIL_PATH = os.path.join(_CATAPULT_PATH, 'devil')
+
+_PYTRACE_PATH = os.path.join(_CATAPULT_PATH, 'common', 'py_trace_event')
+
+_PY_UTILS_PATH = os.path.join(_CATAPULT_PATH, 'common', 'py_utils')
+
+_TRACE2HTML_PATH = os.path.join(_CATAPULT_PATH, 'tracing')
+
+
+if _DEVIL_PATH not in sys.path:
+ sys.path.append(_DEVIL_PATH)
+
+if _PYTRACE_PATH not in sys.path:
+ sys.path.append(_PYTRACE_PATH)
+
+if _PY_UTILS_PATH not in sys.path:
+ sys.path.append(_PY_UTILS_PATH)
+
+if _TRACE2HTML_PATH not in sys.path:
+ sys.path.append(_TRACE2HTML_PATH)
diff --git a/deps/v8/build/android/pylib/android/__init__.py b/deps/v8/build/android/pylib/android/__init__.py
new file mode 100644
index 0000000000..a67c3501b2
--- /dev/null
+++ b/deps/v8/build/android/pylib/android/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/android/logcat_symbolizer.py b/deps/v8/build/android/pylib/android/logcat_symbolizer.py
new file mode 100644
index 0000000000..720629b989
--- /dev/null
+++ b/deps/v8/build/android/pylib/android/logcat_symbolizer.py
@@ -0,0 +1,98 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+
+from devil.android import logcat_monitor
+
+BACKTRACE_LINE_RE = re.compile(r'#\d+')
+THREADTIME_RE = re.compile(
+ logcat_monitor.LogcatMonitor.THREADTIME_RE_FORMAT % (
+ r' *\S* *', r' *\S* *', r' *\S* *', r' *\S* *', r'.*'))
+
+def SymbolizeLogcat(logcat, dest, symbolizer, abi):
+ """Symbolize stack trace in the logcat.
+
+ Symbolize the logcat and write the symbolized logcat to a new file.
+
+ Args:
+ logcat: Path to logcat file.
+ dest: Path to where to write the symbolized logcat.
+ symbolizer: The stack symbolizer to symbolize stack trace in logcat.
+ abi: The device's product_cpu_abi. Symbolizer needs it to symbolize.
+
+ A sample logcat that needs to be symbolized, after stripping the prefix,
+ such as '08-07 18:39:37.692 28649 28649 E Ion : ', would be:
+ Build fingerprint: 'google/shamu/shamu:7.1.1/NMF20B/3370:userdebug/dev-keys'
+ Revision: '0'
+ ABI: 'arm'
+ pid: 28936, tid: 28936, name: chromium.chrome >>> org.chromium.chrome <<<
+ signal 6 (SIGABRT), code -6 (SI_TKILL), fault addr --------
+ Abort message: '[FATAL:debug_urls.cc(151)] Check failed: false.
+ #00 0x63e16c41 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0006cc4
+ #01 0x63f19be3 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016fbe
+ #02 0x63f19737 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016f73
+ #03 0x63f18ddf /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016edd
+ #04 0x63f18b79 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016eb7
+ #05 0xab53f319 /system/lib/libart.so+0x000a3319
+ #06
+ r0 00000000 r1 00007108 r2 00000006 r3 00000008
+ r4 ae60258c r5 00000006 r6 ae602534 r7 0000010c
+ r8 bede5cd0 r9 00000030 sl 00000000 fp 9265a800
+ ip 0000000b sp bede5c38 lr ac8e5537 pc ac8e7da0 cpsr 600f0010
+
+ backtrace:
+ #00 pc 00049da0 /system/lib/libc.so (tgkill+12)
+ #01 pc 00047533 /system/lib/libc.so (pthread_kill+34)
+ #02 pc 0001d635 /system/lib/libc.so (raise+10)
+ #03 pc 00019181 /system/lib/libc.so (__libc_android_abort+34)
+ #04 pc 00017048 /system/lib/libc.so (abort+4)
+ #05 pc 00948605 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+ #06 pc 002c9f73 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+ #07 pc 003ccbe1 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+ #08 pc 003cc735 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+ #09 pc 003cbddf /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+ #10 pc 003cbb77 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so
+ """
+
+ with open(logcat) as logcat_file:
+ with open(dest, 'w') as dest_file:
+ # The current stack script will only print out the symbolized stack,
+ # and completely ignore logs other than the crash log that is used for
+ # symbolization, if any exists. Thus the code here extracts the
+ # crash log inside the logcat and pass only the crash log to the script,
+ # because we don't want to lose other information in the logcat that,
+ # if passed to the stack script, will just be ignored by it.
+ # TODO(crbug.com/755225): Rewrite the logic here.
+ outside_of_crash_log = True
+ in_lower_half_crash = False
+ data_to_symbolize = []
+
+ for line in logcat_file:
+ if outside_of_crash_log:
+ # Check whether it is the start of crash log.
+ if 'Build fingerprint: ' in line:
+ outside_of_crash_log = False
+ # Only include necessary information for symbolization.
+ # The logic here that removes date, time, proc_id etc.
+ # should be in sync with _THREADTIME_RE_FORMAT in logcat_monitor.
+ data_to_symbolize.append(
+ re.search(THREADTIME_RE, line).group(7))
+ else:
+ dest_file.write(line)
+ else:
+ # Once we have reached the end of the backtrace section,
+ # we will start symbolizing.
+ if in_lower_half_crash and not bool(BACKTRACE_LINE_RE.search(line)):
+ outside_of_crash_log = True
+ in_lower_half_crash = False
+ symbolized_lines = symbolizer.ExtractAndResolveNativeStackTraces(
+ data_to_symbolize, abi)
+ dest_file.write('\n'.join(symbolized_lines) + '\n' + line)
+ data_to_symbolize = []
+ else:
+ if not in_lower_half_crash and 'backtrace:' in line:
+ in_lower_half_crash = True
+ data_to_symbolize.append(
+ re.search(THREADTIME_RE, line).group(7))
diff --git a/deps/v8/build/android/pylib/base/__init__.py b/deps/v8/build/android/pylib/base/__init__.py
new file mode 100644
index 0000000000..96196cffb2
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/base/base_test_result.py b/deps/v8/build/android/pylib/base/base_test_result.py
new file mode 100644
index 0000000000..bb25a74186
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/base_test_result.py
@@ -0,0 +1,262 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing base test results classes."""
+
+import threading
+
+
+class ResultType(object):
+ """Class enumerating test types."""
+ # The test passed.
+ PASS = 'SUCCESS'
+
+ # The test was intentionally skipped.
+ SKIP = 'SKIPPED'
+
+ # The test failed.
+ FAIL = 'FAILURE'
+
+ # The test caused the containing process to crash.
+ CRASH = 'CRASH'
+
+ # The test timed out.
+ TIMEOUT = 'TIMEOUT'
+
+ # The test ran, but we couldn't determine what happened.
+ UNKNOWN = 'UNKNOWN'
+
+ # The test did not run.
+ NOTRUN = 'NOTRUN'
+
+ @staticmethod
+ def GetTypes():
+ """Get a list of all test types."""
+ return [ResultType.PASS, ResultType.SKIP, ResultType.FAIL,
+ ResultType.CRASH, ResultType.TIMEOUT, ResultType.UNKNOWN,
+ ResultType.NOTRUN]
+
+
+class BaseTestResult(object):
+ """Base class for a single test result."""
+
+ def __init__(self, name, test_type, duration=0, log=''):
+ """Construct a BaseTestResult.
+
+ Args:
+ name: Name of the test which defines uniqueness.
+ test_type: Type of the test result as defined in ResultType.
+ duration: Time it took for the test to run in milliseconds.
+ log: An optional string listing any errors.
+ """
+ assert name
+ assert test_type in ResultType.GetTypes()
+ self._name = name
+ self._test_type = test_type
+ self._duration = duration
+ self._log = log
+ self._links = {}
+
+ def __str__(self):
+ return self._name
+
+ def __repr__(self):
+ return self._name
+
+ def __cmp__(self, other):
+ # pylint: disable=W0212
+ return cmp(self._name, other._name)
+
+ def __hash__(self):
+ return hash(self._name)
+
+ def SetName(self, name):
+ """Set the test name.
+
+ Because we're putting this into a set, this should only be used if moving
+ this test result into another set.
+ """
+ self._name = name
+
+ def GetName(self):
+ """Get the test name."""
+ return self._name
+
+ def SetType(self, test_type):
+ """Set the test result type."""
+ assert test_type in ResultType.GetTypes()
+ self._test_type = test_type
+
+ def GetType(self):
+ """Get the test result type."""
+ return self._test_type
+
+ def GetDuration(self):
+ """Get the test duration."""
+ return self._duration
+
+ def SetLog(self, log):
+ """Set the test log."""
+ self._log = log
+
+ def GetLog(self):
+ """Get the test log."""
+ return self._log
+
+ def SetLink(self, name, link_url):
+ """Set link with test result data."""
+ self._links[name] = link_url
+
+ def GetLinks(self):
+ """Get dict containing links to test result data."""
+ return self._links
+
+
+class TestRunResults(object):
+ """Set of results for a test run."""
+
+ def __init__(self):
+ self._links = {}
+ self._results = set()
+ self._results_lock = threading.RLock()
+
+ def SetLink(self, name, link_url):
+ """Add link with test run results data."""
+ self._links[name] = link_url
+
+ def GetLinks(self):
+ """Get dict containing links to test run result data."""
+ return self._links
+
+ def GetLogs(self):
+ """Get the string representation of all test logs."""
+ with self._results_lock:
+ s = []
+ for test_type in ResultType.GetTypes():
+ if test_type != ResultType.PASS:
+ for t in sorted(self._GetType(test_type)):
+ log = t.GetLog()
+ if log:
+ s.append('[%s] %s:' % (test_type, t))
+ s.append(log)
+ return '\n'.join(s)
+
+ def GetGtestForm(self):
+ """Get the gtest string representation of this object."""
+ with self._results_lock:
+ s = []
+ plural = lambda n, s, p: '%d %s' % (n, p if n != 1 else s)
+ tests = lambda n: plural(n, 'test', 'tests')
+
+ s.append('[==========] %s ran.' % (tests(len(self.GetAll()))))
+ s.append('[ PASSED ] %s.' % (tests(len(self.GetPass()))))
+
+ skipped = self.GetSkip()
+ if skipped:
+ s.append('[ SKIPPED ] Skipped %s, listed below:' % tests(len(skipped)))
+ for t in sorted(skipped):
+ s.append('[ SKIPPED ] %s' % str(t))
+
+ all_failures = self.GetFail().union(self.GetCrash(), self.GetTimeout(),
+ self.GetUnknown())
+ if all_failures:
+ s.append('[ FAILED ] %s, listed below:' % tests(len(all_failures)))
+ for t in sorted(self.GetFail()):
+ s.append('[ FAILED ] %s' % str(t))
+ for t in sorted(self.GetCrash()):
+ s.append('[ FAILED ] %s (CRASHED)' % str(t))
+ for t in sorted(self.GetTimeout()):
+ s.append('[ FAILED ] %s (TIMEOUT)' % str(t))
+ for t in sorted(self.GetUnknown()):
+ s.append('[ FAILED ] %s (UNKNOWN)' % str(t))
+ s.append('')
+ s.append(plural(len(all_failures), 'FAILED TEST', 'FAILED TESTS'))
+ return '\n'.join(s)
+
+ def GetShortForm(self):
+ """Get the short string representation of this object."""
+ with self._results_lock:
+ s = []
+ s.append('ALL: %d' % len(self._results))
+ for test_type in ResultType.GetTypes():
+ s.append('%s: %d' % (test_type, len(self._GetType(test_type))))
+ return ''.join([x.ljust(15) for x in s])
+
+ def __str__(self):
+ return self.GetGtestForm()
+
+ def AddResult(self, result):
+ """Add |result| to the set.
+
+ Args:
+ result: An instance of BaseTestResult.
+ """
+ assert isinstance(result, BaseTestResult)
+ with self._results_lock:
+ self._results.discard(result)
+ self._results.add(result)
+
+ def AddResults(self, results):
+ """Add |results| to the set.
+
+ Args:
+ results: An iterable of BaseTestResult objects.
+ """
+ with self._results_lock:
+ for t in results:
+ self.AddResult(t)
+
+ def AddTestRunResults(self, results):
+ """Add the set of test results from |results|.
+
+ Args:
+ results: An instance of TestRunResults.
+ """
+ assert isinstance(results, TestRunResults), (
+ 'Expected TestRunResult object: %s' % type(results))
+ with self._results_lock:
+ # pylint: disable=W0212
+ self._results.update(results._results)
+
+ def GetAll(self):
+ """Get the set of all test results."""
+ with self._results_lock:
+ return self._results.copy()
+
+ def _GetType(self, test_type):
+ """Get the set of test results with the given test type."""
+ with self._results_lock:
+ return set(t for t in self._results if t.GetType() == test_type)
+
+ def GetPass(self):
+ """Get the set of all passed test results."""
+ return self._GetType(ResultType.PASS)
+
+ def GetSkip(self):
+ """Get the set of all skipped test results."""
+ return self._GetType(ResultType.SKIP)
+
+ def GetFail(self):
+ """Get the set of all failed test results."""
+ return self._GetType(ResultType.FAIL)
+
+ def GetCrash(self):
+ """Get the set of all crashed test results."""
+ return self._GetType(ResultType.CRASH)
+
+ def GetTimeout(self):
+ """Get the set of all timed out test results."""
+ return self._GetType(ResultType.TIMEOUT)
+
+ def GetUnknown(self):
+ """Get the set of all unknown test results."""
+ return self._GetType(ResultType.UNKNOWN)
+
+ def GetNotPass(self):
+ """Get the set of all non-passed test results."""
+ return self.GetAll() - self.GetPass()
+
+ def DidRunPass(self):
+ """Return whether the test run was successful."""
+ return not self.GetNotPass() - self.GetSkip()
diff --git a/deps/v8/build/android/pylib/base/base_test_result_unittest.py b/deps/v8/build/android/pylib/base/base_test_result_unittest.py
new file mode 100644
index 0000000000..6f0cba7726
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/base_test_result_unittest.py
@@ -0,0 +1,82 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for TestRunResults."""
+
+import unittest
+
+from pylib.base.base_test_result import BaseTestResult
+from pylib.base.base_test_result import TestRunResults
+from pylib.base.base_test_result import ResultType
+
+
+class TestTestRunResults(unittest.TestCase):
+ def setUp(self):
+ self.p1 = BaseTestResult('p1', ResultType.PASS, log='pass1')
+ other_p1 = BaseTestResult('p1', ResultType.PASS)
+ self.p2 = BaseTestResult('p2', ResultType.PASS)
+ self.f1 = BaseTestResult('f1', ResultType.FAIL, log='failure1')
+ self.c1 = BaseTestResult('c1', ResultType.CRASH, log='crash1')
+ self.u1 = BaseTestResult('u1', ResultType.UNKNOWN)
+ self.tr = TestRunResults()
+ self.tr.AddResult(self.p1)
+ self.tr.AddResult(other_p1)
+ self.tr.AddResult(self.p2)
+ self.tr.AddResults(set([self.f1, self.c1, self.u1]))
+
+ def testGetAll(self):
+ self.assertFalse(
+ self.tr.GetAll().symmetric_difference(
+ [self.p1, self.p2, self.f1, self.c1, self.u1]))
+
+ def testGetPass(self):
+ self.assertFalse(self.tr.GetPass().symmetric_difference(
+ [self.p1, self.p2]))
+
+ def testGetNotPass(self):
+ self.assertFalse(self.tr.GetNotPass().symmetric_difference(
+ [self.f1, self.c1, self.u1]))
+
+ def testGetAddTestRunResults(self):
+ tr2 = TestRunResults()
+ other_p1 = BaseTestResult('p1', ResultType.PASS)
+ f2 = BaseTestResult('f2', ResultType.FAIL)
+ tr2.AddResult(other_p1)
+ tr2.AddResult(f2)
+ tr2.AddTestRunResults(self.tr)
+ self.assertFalse(
+ tr2.GetAll().symmetric_difference(
+ [self.p1, self.p2, self.f1, self.c1, self.u1, f2]))
+
+ def testGetLogs(self):
+ log_print = ('[FAIL] f1:\n'
+ 'failure1\n'
+ '[CRASH] c1:\n'
+ 'crash1')
+ self.assertEqual(self.tr.GetLogs(), log_print)
+
+ def testGetShortForm(self):
+ short_print = ('ALL: 5 PASS: 2 FAIL: 1 '
+ 'CRASH: 1 TIMEOUT: 0 UNKNOWN: 1 ')
+ self.assertEqual(self.tr.GetShortForm(), short_print)
+
+ def testGetGtestForm(self):
+ gtest_print = ('[==========] 5 tests ran.\n'
+ '[ PASSED ] 2 tests.\n'
+ '[ FAILED ] 3 tests, listed below:\n'
+ '[ FAILED ] f1\n'
+ '[ FAILED ] c1 (CRASHED)\n'
+ '[ FAILED ] u1 (UNKNOWN)\n'
+ '\n'
+ '3 FAILED TESTS')
+ self.assertEqual(gtest_print, self.tr.GetGtestForm())
+
+ def testRunPassed(self):
+ self.assertFalse(self.tr.DidRunPass())
+ tr2 = TestRunResults()
+ self.assertTrue(tr2.DidRunPass())
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/base/environment.py b/deps/v8/build/android/pylib/base/environment.py
new file mode 100644
index 0000000000..744c392c1b
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/environment.py
@@ -0,0 +1,49 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class Environment(object):
+ """An environment in which tests can be run.
+
+ This is expected to handle all logic that is applicable to an entire specific
+ environment but is independent of the test type.
+
+ Examples include:
+ - The local device environment, for running tests on devices attached to
+ the local machine.
+ - The local machine environment, for running tests directly on the local
+ machine.
+ """
+
+ def __init__(self, output_manager):
+ """Environment constructor.
+
+ Args:
+ output_manager: Instance of |output_manager.OutputManager| used to
+ save test output.
+ """
+ self._output_manager = output_manager
+
+ # Some subclasses have different teardown behavior on receiving SIGTERM.
+ self._received_sigterm = False
+
+ def SetUp(self):
+ raise NotImplementedError
+
+ def TearDown(self):
+ raise NotImplementedError
+
+ def __enter__(self):
+ self.SetUp()
+ return self
+
+ def __exit__(self, _exc_type, _exc_val, _exc_tb):
+ self.TearDown()
+
+ @property
+ def output_manager(self):
+ return self._output_manager
+
+ def ReceivedSigterm(self):
+ self._received_sigterm = True
diff --git a/deps/v8/build/android/pylib/base/environment_factory.py b/deps/v8/build/android/pylib/base/environment_factory.py
new file mode 100644
index 0000000000..fdca803eff
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/environment_factory.py
@@ -0,0 +1,19 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import constants
+from pylib.local.device import local_device_environment
+from pylib.local.machine import local_machine_environment
+
+def CreateEnvironment(args, output_manager, error_func):
+
+ if args.environment == 'local':
+ if args.command not in constants.LOCAL_MACHINE_TESTS:
+ return local_device_environment.LocalDeviceEnvironment(
+ args, output_manager, error_func)
+ else:
+ return local_machine_environment.LocalMachineEnvironment(
+ args, output_manager, error_func)
+
+ error_func('Unable to create %s environment.' % args.environment)
diff --git a/deps/v8/build/android/pylib/base/mock_environment.py b/deps/v8/build/android/pylib/base/mock_environment.py
new file mode 100644
index 0000000000..9ebb083a08
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/mock_environment.py
@@ -0,0 +1,12 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import environment
+from pylib.constants import host_paths
+
+with host_paths.SysPath(host_paths.PYMOCK_PATH):
+ import mock # pylint: disable=import-error
+
+
+MockEnvironment = mock.MagicMock(environment.Environment)
diff --git a/deps/v8/build/android/pylib/base/mock_test_instance.py b/deps/v8/build/android/pylib/base/mock_test_instance.py
new file mode 100644
index 0000000000..18def01990
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/mock_test_instance.py
@@ -0,0 +1,12 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import test_instance
+from pylib.constants import host_paths
+
+with host_paths.SysPath(host_paths.PYMOCK_PATH):
+ import mock # pylint: disable=import-error
+
+
+MockTestInstance = mock.MagicMock(test_instance.TestInstance)
diff --git a/deps/v8/build/android/pylib/base/output_manager.py b/deps/v8/build/android/pylib/base/output_manager.py
new file mode 100644
index 0000000000..60b8123b8d
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/output_manager.py
@@ -0,0 +1,158 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import logging
+import os
+import tempfile
+
+from devil.utils import reraiser_thread
+
+
+class Datatype(object):
+ HTML = 'text/html'
+ JSON = 'application/json'
+ PNG = 'image/png'
+ TEXT = 'text/plain'
+
+
+class OutputManager(object):
+
+ def __init__(self):
+ """OutputManager Constructor.
+
+ This class provides a simple interface to save test output. Subclasses
+ of this will allow users to save test results in the cloud or locally.
+ """
+ self._allow_upload = False
+ self._thread_group = None
+
+ @contextlib.contextmanager
+ def ArchivedTempfile(
+ self, out_filename, out_subdir, datatype=Datatype.TEXT):
+ """Archive file contents asynchonously and then deletes file.
+
+ Args:
+ out_filename: Name for saved file.
+ out_subdir: Directory to save |out_filename| to.
+ datatype: Datatype of file.
+
+ Returns:
+ An ArchivedFile file. This file will be uploaded async when the context
+ manager exits. AFTER the context manager exits, you can get the link to
+ where the file will be stored using the Link() API. You can use typical
+ file APIs to write and flish the ArchivedFile. You can also use file.name
+ to get the local filepath to where the underlying file exists. If you do
+ this, you are responsible of flushing the file before exiting the context
+ manager.
+ """
+ if not self._allow_upload:
+ raise Exception('Must run |SetUp| before attempting to upload!')
+
+ f = self._CreateArchivedFile(out_filename, out_subdir, datatype)
+ try:
+ yield f
+ finally:
+ f.PrepareArchive()
+
+ def archive():
+ try:
+ f.Archive()
+ finally:
+ f.Delete()
+
+ thread = reraiser_thread.ReraiserThread(func=archive)
+ thread.start()
+ self._thread_group.Add(thread)
+
+ def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
+ """Returns an instance of ArchivedFile."""
+ raise NotImplementedError
+
+ def SetUp(self):
+ self._allow_upload = True
+ self._thread_group = reraiser_thread.ReraiserThreadGroup()
+
+ def TearDown(self):
+ self._allow_upload = False
+ logging.info('Finishing archiving output.')
+ self._thread_group.JoinAll()
+
+ def __enter__(self):
+ self.SetUp()
+ return self
+
+ def __exit__(self, _exc_type, _exc_val, _exc_tb):
+ self.TearDown()
+
+
+class ArchivedFile(object):
+
+ def __init__(self, out_filename, out_subdir, datatype):
+ self._out_filename = out_filename
+ self._out_subdir = out_subdir
+ self._datatype = datatype
+
+ self._f = tempfile.NamedTemporaryFile(delete=False)
+ self._ready_to_archive = False
+
+ @property
+ def name(self):
+ return self._f.name
+
+ def write(self, *args, **kwargs):
+ if self._ready_to_archive:
+ raise Exception('Cannot write to file after archiving has begun!')
+ self._f.write(*args, **kwargs)
+
+ def flush(self, *args, **kwargs):
+ if self._ready_to_archive:
+ raise Exception('Cannot flush file after archiving has begun!')
+ self._f.flush(*args, **kwargs)
+
+ def Link(self):
+ """Returns location of archived file."""
+ if not self._ready_to_archive:
+ raise Exception('Cannot get link to archived file before archiving '
+ 'has begun')
+ return self._Link()
+
+ def _Link(self):
+ """Note for when overriding this function.
+
+ This function will certainly be called before the file
+ has finished being archived. Therefore, this needs to be able to know the
+ exact location of the archived file before it is finished being archived.
+ """
+ raise NotImplementedError
+
+ def PrepareArchive(self):
+ """Meant to be called synchronously to prepare file for async archiving."""
+ self.flush()
+ self._ready_to_archive = True
+ self._PrepareArchive()
+
+ def _PrepareArchive(self):
+ """Note for when overriding this function.
+
+ This function is needed for things such as computing the location of
+ content addressed files. This is called after the file is written but
+ before archiving has begun.
+ """
+ pass
+
+ def Archive(self):
+ """Archives file."""
+ if not self._ready_to_archive:
+ raise Exception('File is not ready to archive. Be sure you are not '
+ 'writing to the file and PrepareArchive has been called')
+ self._Archive()
+
+ def _Archive(self):
+ raise NotImplementedError
+
+ def Delete(self):
+ """Deletes the backing file."""
+ self._f.close()
+ os.remove(self.name)
diff --git a/deps/v8/build/android/pylib/base/output_manager_factory.py b/deps/v8/build/android/pylib/base/output_manager_factory.py
new file mode 100644
index 0000000000..7a644bcf8a
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/output_manager_factory.py
@@ -0,0 +1,16 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import constants
+from pylib.output import local_output_manager
+from pylib.output import remote_output_manager
+
+
+def CreateOutputManager(args):
+ if args.local_output:
+ return local_output_manager.LocalOutputManager(
+ output_dir=constants.GetOutDirectory())
+ else:
+ return remote_output_manager.RemoteOutputManager(
+ bucket=args.gs_results_bucket)
diff --git a/deps/v8/build/android/pylib/base/output_manager_test_case.py b/deps/v8/build/android/pylib/base/output_manager_test_case.py
new file mode 100644
index 0000000000..1e4cd7ef68
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/output_manager_test_case.py
@@ -0,0 +1,14 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os.path
+import unittest
+
+
+class OutputManagerTestCase(unittest.TestCase):
+
+ def assertUsableTempFile(self, archived_tempfile):
+ self.assertTrue(bool(archived_tempfile.name))
+ self.assertTrue(os.path.exists(archived_tempfile.name))
+ self.assertTrue(os.path.isfile(archived_tempfile.name))
diff --git a/deps/v8/build/android/pylib/base/test_collection.py b/deps/v8/build/android/pylib/base/test_collection.py
new file mode 100644
index 0000000000..de510272bd
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/test_collection.py
@@ -0,0 +1,80 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import threading
+
+class TestCollection(object):
+ """A threadsafe collection of tests.
+
+ Args:
+ tests: List of tests to put in the collection.
+ """
+
+ def __init__(self, tests=None):
+ if not tests:
+ tests = []
+ self._lock = threading.Lock()
+ self._tests = []
+ self._tests_in_progress = 0
+ # Used to signal that an item is available or all items have been handled.
+ self._item_available_or_all_done = threading.Event()
+ for t in tests:
+ self.add(t)
+
+ def _pop(self):
+ """Pop a test from the collection.
+
+ Waits until a test is available or all tests have been handled.
+
+ Returns:
+ A test or None if all tests have been handled.
+ """
+ while True:
+ # Wait for a test to be available or all tests to have been handled.
+ self._item_available_or_all_done.wait()
+ with self._lock:
+ # Check which of the two conditions triggered the signal.
+ if self._tests_in_progress == 0:
+ return None
+ try:
+ return self._tests.pop(0)
+ except IndexError:
+ # Another thread beat us to the available test, wait again.
+ self._item_available_or_all_done.clear()
+
+ def add(self, test):
+ """Add a test to the collection.
+
+ Args:
+ test: A test to add.
+ """
+ with self._lock:
+ self._tests.append(test)
+ self._item_available_or_all_done.set()
+ self._tests_in_progress += 1
+
+ def test_completed(self):
+ """Indicate that a test has been fully handled."""
+ with self._lock:
+ self._tests_in_progress -= 1
+ if self._tests_in_progress == 0:
+ # All tests have been handled, signal all waiting threads.
+ self._item_available_or_all_done.set()
+
+ def __iter__(self):
+ """Iterate through tests in the collection until all have been handled."""
+ while True:
+ r = self._pop()
+ if r is None:
+ break
+ yield r
+
+ def __len__(self):
+ """Return the number of tests currently in the collection."""
+ return len(self._tests)
+
+ def test_names(self):
+ """Return a list of the names of the tests currently in the collection."""
+ with self._lock:
+ return list(t.test for t in self._tests)
diff --git a/deps/v8/build/android/pylib/base/test_exception.py b/deps/v8/build/android/pylib/base/test_exception.py
new file mode 100644
index 0000000000..c98d2cb73e
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/test_exception.py
@@ -0,0 +1,8 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class TestException(Exception):
+ """Base class for exceptions thrown by the test runner."""
+ pass
diff --git a/deps/v8/build/android/pylib/base/test_instance.py b/deps/v8/build/android/pylib/base/test_instance.py
new file mode 100644
index 0000000000..7b1099cffa
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/test_instance.py
@@ -0,0 +1,40 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class TestInstance(object):
+ """A type of test.
+
+ This is expected to handle all logic that is test-type specific but
+ independent of the environment or device.
+
+ Examples include:
+ - gtests
+ - instrumentation tests
+ """
+
+ def __init__(self):
+ pass
+
+ def TestType(self):
+ raise NotImplementedError
+
+ # pylint: disable=no-self-use
+ def GetPreferredAbis(self):
+ return None
+
+ # pylint: enable=no-self-use
+
+ def SetUp(self):
+ raise NotImplementedError
+
+ def TearDown(self):
+ raise NotImplementedError
+
+ def __enter__(self):
+ self.SetUp()
+ return self
+
+ def __exit__(self, _exc_type, _exc_val, _exc_tb):
+ self.TearDown()
diff --git a/deps/v8/build/android/pylib/base/test_instance_factory.py b/deps/v8/build/android/pylib/base/test_instance_factory.py
new file mode 100644
index 0000000000..7c21260161
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/test_instance_factory.py
@@ -0,0 +1,31 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.gtest import gtest_test_instance
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.junit import junit_test_instance
+from pylib.linker import linker_test_instance
+from pylib.monkey import monkey_test_instance
+from pylib.perf import perf_test_instance
+from pylib.utils import device_dependencies
+
+
+def CreateTestInstance(args, error_func):
+
+ if args.command == 'gtest':
+ return gtest_test_instance.GtestTestInstance(
+ args, device_dependencies.GetDataDependencies, error_func)
+ elif args.command == 'instrumentation':
+ return instrumentation_test_instance.InstrumentationTestInstance(
+ args, device_dependencies.GetDataDependencies, error_func)
+ elif args.command == 'junit':
+ return junit_test_instance.JunitTestInstance(args, error_func)
+ elif args.command == 'linker':
+ return linker_test_instance.LinkerTestInstance(args)
+ elif args.command == 'monkey':
+ return monkey_test_instance.MonkeyTestInstance(args, error_func)
+ elif args.command == 'perf':
+ return perf_test_instance.PerfTestInstance(args, error_func)
+
+ error_func('Unable to create %s test instance.' % args.command)
diff --git a/deps/v8/build/android/pylib/base/test_run.py b/deps/v8/build/android/pylib/base/test_run.py
new file mode 100644
index 0000000000..fc72d3a547
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/test_run.py
@@ -0,0 +1,50 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class TestRun(object):
+ """An execution of a particular test on a particular device.
+
+ This is expected to handle all logic that is specific to the combination of
+ environment and test type.
+
+ Examples include:
+ - local gtests
+ - local instrumentation tests
+ """
+
+ def __init__(self, env, test_instance):
+ self._env = env
+ self._test_instance = test_instance
+
+ # Some subclasses have different teardown behavior on receiving SIGTERM.
+ self._received_sigterm = False
+
+ def TestPackage(self):
+ raise NotImplementedError
+
+ def SetUp(self):
+ raise NotImplementedError
+
+ def RunTests(self, results):
+ """Runs Tests and populates |results|.
+
+ Args:
+ results: An array that should be populated with
+ |base_test_result.TestRunResults| objects.
+ """
+ raise NotImplementedError
+
+ def TearDown(self):
+ raise NotImplementedError
+
+ def __enter__(self):
+ self.SetUp()
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.TearDown()
+
+ def ReceivedSigterm(self):
+ self._received_sigterm = True
diff --git a/deps/v8/build/android/pylib/base/test_run_factory.py b/deps/v8/build/android/pylib/base/test_run_factory.py
new file mode 100644
index 0000000000..1f63a059c9
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/test_run_factory.py
@@ -0,0 +1,56 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.gtest import gtest_test_instance
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.junit import junit_test_instance
+from pylib.linker import linker_test_instance
+from pylib.monkey import monkey_test_instance
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_gtest_run
+from pylib.local.device import local_device_instrumentation_test_run
+from pylib.local.device import local_device_linker_test_run
+from pylib.local.device import local_device_monkey_test_run
+from pylib.local.device import local_device_perf_test_run
+from pylib.local.machine import local_machine_environment
+from pylib.local.machine import local_machine_junit_test_run
+from pylib.perf import perf_test_instance
+
+
+def _CreatePerfTestRun(args, env, test_instance):
+ if args.print_step:
+ return local_device_perf_test_run.PrintStep(
+ env, test_instance)
+ elif args.output_json_list:
+ return local_device_perf_test_run.OutputJsonList(
+ env, test_instance)
+ return local_device_perf_test_run.LocalDevicePerfTestRun(
+ env, test_instance)
+
+
+def CreateTestRun(args, env, test_instance, error_func):
+ if isinstance(env, local_device_environment.LocalDeviceEnvironment):
+ if isinstance(test_instance, gtest_test_instance.GtestTestInstance):
+ return local_device_gtest_run.LocalDeviceGtestRun(env, test_instance)
+ if isinstance(test_instance,
+ instrumentation_test_instance.InstrumentationTestInstance):
+ return (local_device_instrumentation_test_run
+ .LocalDeviceInstrumentationTestRun(env, test_instance))
+ if isinstance(test_instance, linker_test_instance.LinkerTestInstance):
+ return (local_device_linker_test_run
+ .LocalDeviceLinkerTestRun(env, test_instance))
+ if isinstance(test_instance, monkey_test_instance.MonkeyTestInstance):
+ return (local_device_monkey_test_run
+ .LocalDeviceMonkeyTestRun(env, test_instance))
+ if isinstance(test_instance,
+ perf_test_instance.PerfTestInstance):
+ return _CreatePerfTestRun(args, env, test_instance)
+
+ if isinstance(env, local_machine_environment.LocalMachineEnvironment):
+ if isinstance(test_instance, junit_test_instance.JunitTestInstance):
+ return (local_machine_junit_test_run
+ .LocalMachineJunitTestRun(env, test_instance))
+
+ error_func('Unable to create test run for %s tests in %s environment'
+ % (str(test_instance), str(env)))
diff --git a/deps/v8/build/android/pylib/base/test_server.py b/deps/v8/build/android/pylib/base/test_server.py
new file mode 100644
index 0000000000..763e1212c3
--- /dev/null
+++ b/deps/v8/build/android/pylib/base/test_server.py
@@ -0,0 +1,18 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+class TestServer(object):
+ """Base class for any server that needs to be set up for the tests."""
+
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def SetUp(self):
+ raise NotImplementedError
+
+ def Reset(self):
+ raise NotImplementedError
+
+ def TearDown(self):
+ raise NotImplementedError
diff --git a/deps/v8/build/android/pylib/constants/__init__.py b/deps/v8/build/android/pylib/constants/__init__.py
new file mode 100644
index 0000000000..901a942482
--- /dev/null
+++ b/deps/v8/build/android/pylib/constants/__init__.py
@@ -0,0 +1,274 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines a set of constants shared by test runners and other scripts."""
+
+# TODO(jbudorick): Split these constants into coherent modules.
+
+# pylint: disable=W0212
+
+import collections
+import glob
+import logging
+import os
+import subprocess
+
+import devil.android.sdk.keyevent
+from devil.android.constants import chrome
+from devil.android.sdk import version_codes
+from devil.constants import exit_codes
+
+
+keyevent = devil.android.sdk.keyevent
+
+
+DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT',
+ os.path.abspath(os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir, os.pardir, os.pardir)))
+
+PACKAGE_INFO = dict(chrome.PACKAGE_INFO)
+PACKAGE_INFO.update({
+ 'legacy_browser': chrome.PackageInfo(
+ 'com.google.android.browser',
+ 'com.android.browser.BrowserActivity',
+ None,
+ None),
+ 'chromecast_shell': chrome.PackageInfo(
+ 'com.google.android.apps.mediashell',
+ 'com.google.android.apps.mediashell.MediaShellActivity',
+ 'castshell-command-line',
+ None),
+ 'android_webview_shell': chrome.PackageInfo(
+ 'org.chromium.android_webview.shell',
+ 'org.chromium.android_webview.shell.AwShellActivity',
+ 'android-webview-command-line',
+ None),
+ 'gtest': chrome.PackageInfo(
+ 'org.chromium.native_test',
+ 'org.chromium.native_test.NativeUnitTestActivity',
+ 'chrome-native-tests-command-line',
+ None),
+ 'components_browsertests': chrome.PackageInfo(
+ 'org.chromium.components_browsertests_apk',
+ ('org.chromium.components_browsertests_apk' +
+ '.ComponentsBrowserTestsActivity'),
+ 'chrome-native-tests-command-line',
+ None),
+ 'content_browsertests': chrome.PackageInfo(
+ 'org.chromium.content_browsertests_apk',
+ 'org.chromium.content_browsertests_apk.ContentBrowserTestsActivity',
+ 'chrome-native-tests-command-line',
+ None),
+ 'chromedriver_webview_shell': chrome.PackageInfo(
+ 'org.chromium.chromedriver_webview_shell',
+ 'org.chromium.chromedriver_webview_shell.Main',
+ None,
+ None),
+ 'android_webview_cts': chrome.PackageInfo(
+ 'com.android.webview',
+ 'com.android.cts.webkit.WebViewStartupCtsActivity',
+ 'webview-command-line',
+ None),
+})
+
+
+# Ports arrangement for various test servers used in Chrome for Android.
+# Lighttpd server will attempt to use 9000 as default port, if unavailable it
+# will find a free port from 8001 - 8999.
+LIGHTTPD_DEFAULT_PORT = 9000
+LIGHTTPD_RANDOM_PORT_FIRST = 8001
+LIGHTTPD_RANDOM_PORT_LAST = 8999
+TEST_SYNC_SERVER_PORT = 9031
+TEST_SEARCH_BY_IMAGE_SERVER_PORT = 9041
+TEST_POLICY_SERVER_PORT = 9051
+
+
+TEST_EXECUTABLE_DIR = '/data/local/tmp'
+# Directories for common java libraries for SDK build.
+# These constants are defined in build/android/ant/common.xml
+SDK_BUILD_JAVALIB_DIR = 'lib.java'
+SDK_BUILD_TEST_JAVALIB_DIR = 'test.lib.java'
+SDK_BUILD_APKS_DIR = 'apks'
+
+ADB_KEYS_FILE = '/data/misc/adb/adb_keys'
+
+PERF_OUTPUT_DIR = os.path.join(DIR_SOURCE_ROOT, 'out', 'step_results')
+# The directory on the device where perf test output gets saved to.
+DEVICE_PERF_OUTPUT_DIR = (
+ '/data/data/' + PACKAGE_INFO['chrome'].package + '/files')
+
+SCREENSHOTS_DIR = os.path.join(DIR_SOURCE_ROOT, 'out_screenshots')
+
+ANDROID_SDK_VERSION = version_codes.OREO_MR1
+ANDROID_SDK_BUILD_TOOLS_VERSION = '27.0.3'
+ANDROID_SDK_ROOT = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'android_sdk',
+ 'public')
+ANDROID_SDK_TOOLS = os.path.join(ANDROID_SDK_ROOT,
+ 'build-tools', ANDROID_SDK_BUILD_TOOLS_VERSION)
+ANDROID_NDK_ROOT = os.path.join(DIR_SOURCE_ROOT,
+ 'third_party', 'android_ndk')
+
+BAD_DEVICES_JSON = os.path.join(DIR_SOURCE_ROOT,
+ os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+ 'bad_devices.json')
+
+UPSTREAM_FLAKINESS_SERVER = 'test-results.appspot.com'
+
+# TODO(jbudorick): Remove once unused.
+DEVICE_LOCAL_PROPERTIES_PATH = '/data/local.prop'
+
+# Configure ubsan to print stack traces in the format understood by "stack" so
+# that they will be symbolized, and disable signal handlers because they
+# interfere with the breakpad and sandbox tests.
+# This value is duplicated in
+# base/android/java/src/org/chromium/base/library_loader/LibraryLoader.java
+UBSAN_OPTIONS = (
+ 'print_stacktrace=1 stack_trace_format=\'#%n pc %o %m\' '
+ 'handle_segv=0 handle_sigbus=0 handle_sigfpe=0')
+
+# TODO(jbudorick): Rework this into testing/buildbot/
+PYTHON_UNIT_TEST_SUITES = {
+ 'pylib_py_unittests': {
+ 'path':
+ os.path.join(DIR_SOURCE_ROOT, 'build', 'android'),
+ 'test_modules': [
+ 'devil.android.device_utils_test',
+ 'devil.android.md5sum_test',
+ 'devil.utils.cmd_helper_test',
+ 'pylib.results.json_results_test',
+ 'pylib.utils.proguard_test',
+ ]
+ },
+ 'gyp_py_unittests': {
+ 'path':
+ os.path.join(DIR_SOURCE_ROOT, 'build', 'android', 'gyp'),
+ 'test_modules': [
+ 'java_cpp_enum_tests',
+ 'java_cpp_strings_tests',
+ 'java_google_api_keys_tests',
+ 'extract_unwind_tables_tests',
+ ]
+ },
+}
+
+LOCAL_MACHINE_TESTS = ['junit', 'python']
+VALID_ENVIRONMENTS = ['local']
+VALID_TEST_TYPES = ['gtest', 'instrumentation', 'junit', 'linker', 'monkey',
+ 'perf', 'python']
+VALID_DEVICE_TYPES = ['Android', 'iOS']
+
+
+def SetBuildType(build_type):
+ """Set the BUILDTYPE environment variable.
+
+ NOTE: Using this function is deprecated, in favor of SetOutputDirectory(),
+ it is still maintained for a few scripts that typically call it
+ to implement their --release and --debug command-line options.
+
+ When writing a new script, consider supporting an --output-dir or
+ --chromium-output-dir option instead, and calling SetOutputDirectory()
+ instead.
+
+ NOTE: If CHROMIUM_OUTPUT_DIR if defined, or if SetOutputDirectory() was
+ called previously, this will be completely ignored.
+ """
+ chromium_output_dir = os.environ.get('CHROMIUM_OUTPUT_DIR')
+ if chromium_output_dir:
+ logging.warning(
+ 'SetBuildType("%s") ignored since CHROMIUM_OUTPUT_DIR is already '
+ 'defined as (%s)', build_type, chromium_output_dir)
+ os.environ['BUILDTYPE'] = build_type
+
+
+def SetOutputDirectory(output_directory):
+ """Set the Chromium output directory.
+
+ This must be called early by scripts that rely on GetOutDirectory() or
+ CheckOutputDirectory(). Typically by providing an --output-dir or
+ --chromium-output-dir option.
+ """
+ os.environ['CHROMIUM_OUTPUT_DIR'] = output_directory
+
+
+# The message that is printed when the Chromium output directory cannot
+# be found. Note that CHROMIUM_OUT_DIR and BUILDTYPE are not mentioned
+# intentionally to encourage the use of CHROMIUM_OUTPUT_DIR instead.
+_MISSING_OUTPUT_DIR_MESSAGE = '\
+The Chromium output directory could not be found. Please use an option such as \
+--output-directory to provide it (see --help for details). Otherwise, \
+define the CHROMIUM_OUTPUT_DIR environment variable.'
+
+
+def GetOutDirectory():
+ """Returns the Chromium build output directory.
+
+ NOTE: This is determined in the following way:
+ - From a previous call to SetOutputDirectory()
+ - Otherwise, from the CHROMIUM_OUTPUT_DIR env variable, if it is defined.
+ - Otherwise, from the current Chromium source directory, and a previous
+ call to SetBuildType() or the BUILDTYPE env variable, in combination
+ with the optional CHROMIUM_OUT_DIR env variable.
+ """
+ if 'CHROMIUM_OUTPUT_DIR' in os.environ:
+ return os.path.abspath(os.path.join(
+ DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUTPUT_DIR')))
+
+ build_type = os.environ.get('BUILDTYPE')
+ if not build_type:
+ raise EnvironmentError(_MISSING_OUTPUT_DIR_MESSAGE)
+
+ return os.path.abspath(os.path.join(
+ DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+ build_type))
+
+
+def CheckOutputDirectory():
+ """Checks that the Chromium output directory is set, or can be found.
+
+ If it is not already set, this will also perform a little auto-detection:
+
+ - If the current directory contains a build.ninja file, use it as
+ the output directory.
+
+ - If CHROME_HEADLESS is defined in the environment (e.g. on a bot),
+ look if there is a single output directory under DIR_SOURCE_ROOT/out/,
+ and if so, use it as the output directory.
+
+ Raises:
+ Exception: If no output directory is detected.
+ """
+ output_dir = os.environ.get('CHROMIUM_OUTPUT_DIR')
+ if output_dir:
+ return
+
+ build_type = os.environ.get('BUILDTYPE')
+ if build_type and len(build_type) > 1:
+ return
+
+ # If CWD is an output directory, then assume it's the desired one.
+ if os.path.exists('build.ninja'):
+ output_dir = os.getcwd()
+ SetOutputDirectory(output_dir)
+ return
+
+ # When running on bots, see if the output directory is obvious.
+ # TODO(http://crbug.com/833808): Get rid of this by ensuring bots always set
+ # CHROMIUM_OUTPUT_DIR correctly.
+ if os.environ.get('CHROME_HEADLESS'):
+ dirs = glob.glob(os.path.join(DIR_SOURCE_ROOT, 'out', '*', 'build.ninja'))
+ if len(dirs) == 1:
+ SetOutputDirectory(dirs[0])
+ return
+
+ raise Exception(
+ 'Chromium output directory not set, and CHROME_HEADLESS detected. ' +
+ 'However, multiple out dirs exist: %r' % dirs)
+
+ raise Exception(_MISSING_OUTPUT_DIR_MESSAGE)
+
+
+# Exit codes
+ERROR_EXIT_CODE = exit_codes.ERROR
+INFRA_EXIT_CODE = exit_codes.INFRA
+WARNING_EXIT_CODE = exit_codes.WARNING
diff --git a/deps/v8/build/android/pylib/constants/host_paths.py b/deps/v8/build/android/pylib/constants/host_paths.py
new file mode 100644
index 0000000000..b249d3c291
--- /dev/null
+++ b/deps/v8/build/android/pylib/constants/host_paths.py
@@ -0,0 +1,95 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import os
+import sys
+
+from pylib import constants
+
+DIR_SOURCE_ROOT = os.environ.get(
+ 'CHECKOUT_SOURCE_ROOT',
+ os.path.abspath(os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir, os.pardir, os.pardir)))
+
+BUILD_COMMON_PATH = os.path.join(
+ DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common')
+
+# third-party libraries
+ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH = os.path.join(
+ DIR_SOURCE_ROOT, 'third_party', 'android_platform', 'development',
+ 'scripts')
+DEVIL_PATH = os.path.join(
+ DIR_SOURCE_ROOT, 'third_party', 'catapult', 'devil')
+PYMOCK_PATH = os.path.join(
+ DIR_SOURCE_ROOT, 'third_party', 'pymock')
+TRACING_PATH = os.path.join(
+ DIR_SOURCE_ROOT, 'third_party', 'catapult', 'tracing')
+
+@contextlib.contextmanager
+def SysPath(path, position=None):
+ if position is None:
+ sys.path.append(path)
+ else:
+ sys.path.insert(position, path)
+ try:
+ yield
+ finally:
+ if sys.path[-1] == path:
+ sys.path.pop()
+ else:
+ sys.path.remove(path)
+
+
+# Map of CPU architecture name to (toolchain_name, binprefix) pairs.
+# TODO(digit): Use the build_vars.txt file generated by gn.
+_TOOL_ARCH_MAP = {
+ 'arm': ('arm-linux-androideabi-4.9', 'arm-linux-androideabi'),
+ 'arm64': ('aarch64-linux-android-4.9', 'aarch64-linux-android'),
+ 'x86': ('x86-4.9', 'i686-linux-android'),
+ 'x86_64': ('x86_64-4.9', 'x86_64-linux-android'),
+ 'x64': ('x86_64-4.9', 'x86_64-linux-android'),
+ 'mips': ('mipsel-linux-android-4.9', 'mipsel-linux-android'),
+}
+
+# Cache used to speed up the results of ToolPath()
+# Maps (arch, tool_name) pairs to fully qualified program paths.
+# Useful because ToolPath() is called repeatedly for demangling C++ symbols.
+_cached_tool_paths = {}
+
+
+def ToolPath(tool, cpu_arch):
+ """Return a fully qualifed path to an arch-specific toolchain program.
+
+ Args:
+ tool: Unprefixed toolchain program name (e.g. 'objdump')
+ cpu_arch: Target CPU architecture (e.g. 'arm64')
+ Returns:
+ Fully qualified path (e.g. ..../aarch64-linux-android-objdump')
+ Raises:
+ Exception if the toolchain could not be found.
+ """
+ tool_path = _cached_tool_paths.get((tool, cpu_arch))
+ if tool_path:
+ return tool_path
+
+ toolchain_source, toolchain_prefix = _TOOL_ARCH_MAP.get(
+ cpu_arch, (None, None))
+ if not toolchain_source:
+ raise Exception('Could not find tool chain for ' + cpu_arch)
+
+ toolchain_subdir = (
+ 'toolchains/%s/prebuilt/linux-x86_64/bin' % toolchain_source)
+
+ tool_path = os.path.join(constants.ANDROID_NDK_ROOT,
+ toolchain_subdir,
+ toolchain_prefix + '-' + tool)
+
+ _cached_tool_paths[(tool, cpu_arch)] = tool_path
+ return tool_path
+
+
+def GetAaptPath():
+ """Returns the path to the 'aapt' executable."""
+ return os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt')
diff --git a/deps/v8/build/android/pylib/constants/host_paths_unittest.py b/deps/v8/build/android/pylib/constants/host_paths_unittest.py
new file mode 100755
index 0000000000..658ed08bd9
--- /dev/null
+++ b/deps/v8/build/android/pylib/constants/host_paths_unittest.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import unittest
+
+import pylib.constants as constants
+import pylib.constants.host_paths as host_paths
+
+
+# This map corresponds to the binprefix of NDK prebuilt toolchains for various
+# target CPU architectures. Note that 'x86_64' and 'x64' are the same.
+_EXPECTED_NDK_TOOL_SUBDIR_MAP = {
+ 'arm': 'toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin/' +
+ 'arm-linux-androideabi-',
+ 'arm64':
+ 'toolchains/aarch64-linux-android-4.9/prebuilt/linux-x86_64/bin/' +
+ 'aarch64-linux-android-',
+ 'x86': 'toolchains/x86-4.9/prebuilt/linux-x86_64/bin/i686-linux-android-',
+ 'x86_64':
+ 'toolchains/x86_64-4.9/prebuilt/linux-x86_64/bin/x86_64-linux-android-',
+ 'x64':
+ 'toolchains/x86_64-4.9/prebuilt/linux-x86_64/bin/x86_64-linux-android-',
+ 'mips':
+ 'toolchains/mipsel-linux-android-4.9/prebuilt/linux-x86_64/bin/' +
+ 'mipsel-linux-android-'
+}
+
+
+class HostPathsTest(unittest.TestCase):
+ def setUp(self):
+ logging.getLogger().setLevel(logging.ERROR)
+
+ def test_GetAaptPath(self):
+ _EXPECTED_AAPT_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt')
+ self.assertEqual(host_paths.GetAaptPath(), _EXPECTED_AAPT_PATH)
+ self.assertEqual(host_paths.GetAaptPath(), _EXPECTED_AAPT_PATH)
+
+ def test_ToolPath(self):
+ for cpu_arch, binprefix in _EXPECTED_NDK_TOOL_SUBDIR_MAP.iteritems():
+ expected_binprefix = os.path.join(constants.ANDROID_NDK_ROOT, binprefix)
+ expected_path = expected_binprefix + 'foo'
+ self.assertEqual(host_paths.ToolPath('foo', cpu_arch), expected_path)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/content_settings.py b/deps/v8/build/android/pylib/content_settings.py
new file mode 100644
index 0000000000..3bf11bc490
--- /dev/null
+++ b/deps/v8/build/android/pylib/content_settings.py
@@ -0,0 +1,80 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class ContentSettings(dict):
+
+ """A dict interface to interact with device content settings.
+
+ System properties are key/value pairs as exposed by adb shell content.
+ """
+
+ def __init__(self, table, device):
+ super(ContentSettings, self).__init__()
+ self._table = table
+ self._device = device
+
+ @staticmethod
+ def _GetTypeBinding(value):
+ if isinstance(value, bool):
+ return 'b'
+ if isinstance(value, float):
+ return 'f'
+ if isinstance(value, int):
+ return 'i'
+ if isinstance(value, long):
+ return 'l'
+ if isinstance(value, str):
+ return 's'
+ raise ValueError('Unsupported type %s' % type(value))
+
+ def iteritems(self):
+ # Example row:
+ # 'Row: 0 _id=13, name=logging_id2, value=-1fccbaa546705b05'
+ for row in self._device.RunShellCommand(
+ 'content query --uri content://%s' % self._table, as_root=True):
+ fields = row.split(', ')
+ key = None
+ value = None
+ for field in fields:
+ k, _, v = field.partition('=')
+ if k == 'name':
+ key = v
+ elif k == 'value':
+ value = v
+ if not key:
+ continue
+ if not value:
+ value = ''
+ yield key, value
+
+ def __getitem__(self, key):
+ return self._device.RunShellCommand(
+ 'content query --uri content://%s --where "name=\'%s\'" '
+ '--projection value' % (self._table, key), as_root=True).strip()
+
+ def __setitem__(self, key, value):
+ if key in self:
+ self._device.RunShellCommand(
+ 'content update --uri content://%s '
+ '--bind value:%s:%s --where "name=\'%s\'"' % (
+ self._table,
+ self._GetTypeBinding(value), value, key),
+ as_root=True)
+ else:
+ self._device.RunShellCommand(
+ 'content insert --uri content://%s '
+ '--bind name:%s:%s --bind value:%s:%s' % (
+ self._table,
+ self._GetTypeBinding(key), key,
+ self._GetTypeBinding(value), value),
+ as_root=True)
+
+ def __delitem__(self, key):
+ self._device.RunShellCommand(
+ 'content delete --uri content://%s '
+ '--bind name:%s:%s' % (
+ self._table,
+ self._GetTypeBinding(key), key),
+ as_root=True)
diff --git a/deps/v8/build/android/pylib/device/__init__.py b/deps/v8/build/android/pylib/device/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/deps/v8/build/android/pylib/device/__init__.py
diff --git a/deps/v8/build/android/pylib/device/commands/BUILD.gn b/deps/v8/build/android/pylib/device/commands/BUILD.gn
new file mode 100644
index 0000000000..480db1e88f
--- /dev/null
+++ b/deps/v8/build/android/pylib/device/commands/BUILD.gn
@@ -0,0 +1,20 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+group("commands") {
+ data_deps = [
+ ":chromium_commands_java",
+ ]
+}
+
+android_library("chromium_commands_java") {
+ emma_never_instrument = true
+ java_files = [ "java/src/org/chromium/android/commands/unzip/Unzip.java" ]
+ dex_path = "$root_build_dir/lib.java/chromium_commands.dex.jar"
+ data = [
+ dex_path,
+ ]
+}
diff --git a/deps/v8/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java b/deps/v8/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java
new file mode 100644
index 0000000000..5428af25f9
--- /dev/null
+++ b/deps/v8/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java
@@ -0,0 +1,95 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.android.commands.unzip;
+
+import android.util.Log;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+/**
+ * Minimal implementation of the command-line unzip utility for Android.
+ */
+public class Unzip {
+
+ private static final String TAG = "Unzip";
+
+ public static void main(String[] args) {
+ try {
+ (new Unzip()).run(args);
+ } catch (RuntimeException e) {
+ Log.e(TAG, e.toString());
+ System.exit(1);
+ }
+ }
+
+ private void showUsage(PrintStream s) {
+ s.println("Usage:");
+ s.println("unzip [zipfile]");
+ }
+
+ @SuppressWarnings("Finally")
+ private void unzip(String[] args) {
+ ZipInputStream zis = null;
+ try {
+ String zipfile = args[0];
+ zis = new ZipInputStream(new BufferedInputStream(new FileInputStream(zipfile)));
+ ZipEntry ze = null;
+
+ byte[] bytes = new byte[1024];
+ while ((ze = zis.getNextEntry()) != null) {
+ File outputFile = new File(ze.getName());
+ if (ze.isDirectory()) {
+ if (!outputFile.exists() && !outputFile.mkdirs()) {
+ throw new RuntimeException(
+ "Failed to create directory: " + outputFile.toString());
+ }
+ } else {
+ File parentDir = outputFile.getParentFile();
+ if (!parentDir.exists() && !parentDir.mkdirs()) {
+ throw new RuntimeException(
+ "Failed to create directory: " + parentDir.toString());
+ }
+ OutputStream out = new BufferedOutputStream(new FileOutputStream(outputFile));
+ int actual_bytes = 0;
+ int total_bytes = 0;
+ while ((actual_bytes = zis.read(bytes)) != -1) {
+ out.write(bytes, 0, actual_bytes);
+ total_bytes += actual_bytes;
+ }
+ out.close();
+ }
+ zis.closeEntry();
+ }
+
+ } catch (IOException e) {
+ throw new RuntimeException("Error while unzipping: " + e.toString());
+ } finally {
+ try {
+ if (zis != null) zis.close();
+ } catch (IOException e) {
+ throw new RuntimeException("Error while closing zip: " + e.toString());
+ }
+ }
+ }
+
+ public void run(String[] args) {
+ if (args.length != 1) {
+ showUsage(System.err);
+ throw new RuntimeException("Incorrect usage!");
+ }
+
+ unzip(args);
+ }
+}
+
diff --git a/deps/v8/build/android/pylib/device_settings.py b/deps/v8/build/android/pylib/device_settings.py
new file mode 100644
index 0000000000..ab4ad1b900
--- /dev/null
+++ b/deps/v8/build/android/pylib/device_settings.py
@@ -0,0 +1,199 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from pylib import content_settings
+
+_LOCK_SCREEN_SETTINGS_PATH = '/data/system/locksettings.db'
+_ALTERNATE_LOCK_SCREEN_SETTINGS_PATH = (
+ '/data/data/com.android.providers.settings/databases/settings.db')
+PASSWORD_QUALITY_UNSPECIFIED = '0'
+_COMPATIBLE_BUILD_TYPES = ['userdebug', 'eng']
+
+
+def ConfigureContentSettings(device, desired_settings):
+ """Configures device content setings from a list.
+
+ Many settings are documented at:
+ http://developer.android.com/reference/android/provider/Settings.Global.html
+ http://developer.android.com/reference/android/provider/Settings.Secure.html
+ http://developer.android.com/reference/android/provider/Settings.System.html
+
+ Many others are undocumented.
+
+ Args:
+ device: A DeviceUtils instance for the device to configure.
+ desired_settings: A list of (table, [(key: value), ...]) for all
+ settings to configure.
+ """
+ for table, key_value in desired_settings:
+ settings = content_settings.ContentSettings(table, device)
+ for key, value in key_value:
+ settings[key] = value
+ logging.info('\n%s %s', table, (80 - len(table)) * '-')
+ for key, value in sorted(settings.iteritems()):
+ logging.info('\t%s: %s', key, value)
+
+
+def SetLockScreenSettings(device):
+ """Sets lock screen settings on the device.
+
+ On certain device/Android configurations we need to disable the lock screen in
+ a different database. Additionally, the password type must be set to
+ DevicePolicyManager.PASSWORD_QUALITY_UNSPECIFIED.
+ Lock screen settings are stored in sqlite on the device in:
+ /data/system/locksettings.db
+
+ IMPORTANT: The first column is used as a primary key so that all rows with the
+ same value for that column are removed from the table prior to inserting the
+ new values.
+
+ Args:
+ device: A DeviceUtils instance for the device to configure.
+
+ Raises:
+ Exception if the setting was not properly set.
+ """
+ if device.build_type not in _COMPATIBLE_BUILD_TYPES:
+ logging.warning('Unable to disable lockscreen on %s builds.',
+ device.build_type)
+ return
+
+ def get_lock_settings(table):
+ return [(table, 'lockscreen.disabled', '1'),
+ (table, 'lockscreen.password_type', PASSWORD_QUALITY_UNSPECIFIED),
+ (table, 'lockscreen.password_type_alternate',
+ PASSWORD_QUALITY_UNSPECIFIED)]
+
+ if device.FileExists(_LOCK_SCREEN_SETTINGS_PATH):
+ db = _LOCK_SCREEN_SETTINGS_PATH
+ locksettings = get_lock_settings('locksettings')
+ columns = ['name', 'user', 'value']
+ generate_values = lambda k, v: [k, '0', v]
+ elif device.FileExists(_ALTERNATE_LOCK_SCREEN_SETTINGS_PATH):
+ db = _ALTERNATE_LOCK_SCREEN_SETTINGS_PATH
+ locksettings = get_lock_settings('secure') + get_lock_settings('system')
+ columns = ['name', 'value']
+ generate_values = lambda k, v: [k, v]
+ else:
+ logging.warning('Unable to find database file to set lock screen settings.')
+ return
+
+ for table, key, value in locksettings:
+ # Set the lockscreen setting for default user '0'
+ values = generate_values(key, value)
+
+ cmd = """begin transaction;
+delete from '%(table)s' where %(primary_key)s='%(primary_value)s';
+insert into '%(table)s' (%(columns)s) values (%(values)s);
+commit transaction;""" % {
+ 'table': table,
+ 'primary_key': columns[0],
+ 'primary_value': values[0],
+ 'columns': ', '.join(columns),
+ 'values': ', '.join(["'%s'" % value for value in values])
+ }
+ output_msg = device.RunShellCommand('sqlite3 %s "%s"' % (db, cmd),
+ as_root=True)
+ if output_msg:
+ logging.info(' '.join(output_msg))
+
+
+ENABLE_LOCATION_SETTINGS = [
+ # Note that setting these in this order is required in order for all of
+ # them to take and stick through a reboot.
+ ('com.google.settings/partner', [
+ ('use_location_for_services', 1),
+ ]),
+ ('settings/secure', [
+ # Ensure Geolocation is enabled and allowed for tests.
+ ('location_providers_allowed', 'gps,network'),
+ ]),
+ ('com.google.settings/partner', [
+ ('network_location_opt_in', 1),
+ ])
+]
+
+DISABLE_LOCATION_SETTINGS = [
+ ('com.google.settings/partner', [
+ ('use_location_for_services', 0),
+ ]),
+ ('settings/secure', [
+ # Ensure Geolocation is disabled.
+ ('location_providers_allowed', ''),
+ ]),
+]
+
+ENABLE_MOCK_LOCATION_SETTINGS = [
+ ('settings/secure', [
+ ('mock_location', 1),
+ ]),
+]
+
+DISABLE_MOCK_LOCATION_SETTINGS = [
+ ('settings/secure', [
+ ('mock_location', 0),
+ ]),
+]
+
+DETERMINISTIC_DEVICE_SETTINGS = [
+ ('settings/global', [
+ ('assisted_gps_enabled', 0),
+
+ # Disable "auto time" and "auto time zone" to avoid network-provided time
+ # to overwrite the device's datetime and timezone synchronized from host
+ # when running tests later. See b/6569849.
+ ('auto_time', 0),
+ ('auto_time_zone', 0),
+
+ ('development_settings_enabled', 1),
+
+ # Flag for allowing ActivityManagerService to send ACTION_APP_ERROR intents
+ # on application crashes and ANRs. If this is disabled, the crash/ANR dialog
+ # will never display the "Report" button.
+ # Type: int ( 0 = disallow, 1 = allow )
+ ('send_action_app_error', 0),
+
+ ('stay_on_while_plugged_in', 3),
+
+ ('verifier_verify_adb_installs', 0),
+ ]),
+ ('settings/secure', [
+ ('allowed_geolocation_origins',
+ 'http://www.google.co.uk http://www.google.com'),
+
+ # Ensure that we never get random dialogs like "Unfortunately the process
+ # android.process.acore has stopped", which steal the focus, and make our
+ # automation fail (because the dialog steals the focus then mistakenly
+ # receives the injected user input events).
+ ('anr_show_background', 0),
+
+ ('lockscreen.disabled', 1),
+
+ ('screensaver_enabled', 0),
+
+ ('skip_first_use_hints', 1),
+ ]),
+ ('settings/system', [
+ # Don't want devices to accidentally rotate the screen as that could
+ # affect performance measurements.
+ ('accelerometer_rotation', 0),
+
+ ('lockscreen.disabled', 1),
+
+ # Turn down brightness and disable auto-adjust so that devices run cooler.
+ ('screen_brightness', 5),
+ ('screen_brightness_mode', 0),
+
+ ('user_rotation', 0),
+ ]),
+]
+
+NETWORK_DISABLED_SETTINGS = [
+ ('settings/global', [
+ ('airplane_mode_on', 1),
+ ('wifi_on', 0),
+ ]),
+]
diff --git a/deps/v8/build/android/pylib/gtest/__init__.py b/deps/v8/build/android/pylib/gtest/__init__.py
new file mode 100644
index 0000000000..96196cffb2
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/gtest/filter/OWNERS b/deps/v8/build/android/pylib/gtest/filter/OWNERS
new file mode 100644
index 0000000000..72e8ffc0db
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/filter/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/deps/v8/build/android/pylib/gtest/filter/base_unittests_disabled b/deps/v8/build/android/pylib/gtest/filter/base_unittests_disabled
new file mode 100644
index 0000000000..533d3e167b
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/filter/base_unittests_disabled
@@ -0,0 +1,25 @@
+# List of suppressions
+
+# Android will not support StackTrace.
+StackTrace.*
+#
+# Sometimes this is automatically generated by run_tests.py
+VerifyPathControlledByUserTest.Symlinks
+
+# http://crbug.com/138845
+MessagePumpLibeventTest.TestWatchingFromBadThread
+
+StringPrintfTest.StringPrintfMisc
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringPrintfBounds
+# TODO(jrg): Fails on bots. Works locally. Figure out why. 2/6/12
+FieldTrialTest.*
+# Flaky?
+ScopedJavaRefTest.RefCounts
+FileTest.MemoryCorruption
+MessagePumpLibeventTest.QuitOutsideOfRun
+ScopedFD.ScopedFDCrashesOnCloseFailure
+
+# http://crbug.com/245043
+StackContainer.BufferAlignment
diff --git a/deps/v8/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled b/deps/v8/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled
new file mode 100644
index 0000000000..6bec7d015b
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled
@@ -0,0 +1,10 @@
+# Additional list of suppressions from emulator
+#
+# Automatically generated by run_tests.py
+PathServiceTest.Get
+SharedMemoryTest.OpenClose
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringPrintfBounds
+StringPrintfTest.StringPrintfMisc
+VerifyPathControlledByUserTest.Symlinks
diff --git a/deps/v8/build/android/pylib/gtest/filter/breakpad_unittests_disabled b/deps/v8/build/android/pylib/gtest/filter/breakpad_unittests_disabled
new file mode 100644
index 0000000000..cefc64fd5e
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/filter/breakpad_unittests_disabled
@@ -0,0 +1,9 @@
+FileIDStripTest.StripSelf
+# crbug.com/303960
+ExceptionHandlerTest.InstructionPointerMemoryNullPointer
+# crbug.com/171419
+MinidumpWriterTest.MappingInfoContained
+# crbug.com/310088
+MinidumpWriterTest.MinidumpSizeLimit
+# crbug.com/375838
+ElfCoreDumpTest.ValidCoreFile
diff --git a/deps/v8/build/android/pylib/gtest/filter/content_browsertests_disabled b/deps/v8/build/android/pylib/gtest/filter/content_browsertests_disabled
new file mode 100644
index 0000000000..974f131c2b
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/filter/content_browsertests_disabled
@@ -0,0 +1,50 @@
+# List of suppressions
+# Timeouts
+DatabaseTest.*
+
+# Crashes
+RenderFrameHostManagerTest.IgnoreRendererDebugURLsWhenCrashed
+
+# Plugins are not supported.
+BrowserPluginThreadedCompositorPixelTest.*
+BrowserPluginHostTest.*
+BrowserPluginTest.*
+PluginTest.*
+
+# http://crbug.com/463740
+CrossPlatformAccessibilityBrowserTest.SelectedEditableTextAccessibility
+
+# http://crbug.com/297230
+DumpAccessibilityTreeTest.AccessibilityAriaLevel/*
+DumpAccessibilityTreeTest.AccessibilityAriaProgressbar/*
+DumpAccessibilityTreeTest.AccessibilityListMarkers/*
+DumpAccessibilityTreeTest.AccessibilityUl/*
+DumpAccessibilityTreeTest.AccessibilityCanvas/*
+DumpAccessibilityTreeTest.AccessibilityDialog/*
+DumpAccessibilityTreeTest.AccessibilityModalDialogClosed/*
+DumpAccessibilityTreeTest.AccessibilityModalDialogInIframeOpened/*
+RenderAccessibilityImplTest.DetachAccessibilityObject
+
+# http://crbug.com/187500
+RenderViewImplTest.ImeComposition
+RenderViewImplTest.InsertCharacters
+RenderViewImplTest.OnHandleKeyboardEvent
+RenderViewImplTest.OnNavStateChanged
+# ZoomLevel is not used on Android
+RenderFrameImplTest.ZoomLimit
+RendererAccessibilityTest.SendFullAccessibilityTreeOnReload
+RendererAccessibilityTest.HideAccessibilityObject
+RendererAccessibilityTest.ShowAccessibilityObject
+RendererAccessibilityTest.TextSelectionShouldSendRoot
+
+# http://crbug.com/386227
+IndexedDBBrowserTest.VersionChangeCrashResilience
+
+# http://crbug.com/233118
+IndexedDBBrowserTest.NullKeyPathPersistence
+
+# http://crbug.com/338421
+GinBrowserTest.GinAndGarbageCollection
+
+# http://crbug.com/343604
+MSE_ClearKey/EncryptedMediaTest.ConfigChangeVideo/0
diff --git a/deps/v8/build/android/pylib/gtest/filter/unit_tests_disabled b/deps/v8/build/android/pylib/gtest/filter/unit_tests_disabled
new file mode 100644
index 0000000000..6a7340db43
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/filter/unit_tests_disabled
@@ -0,0 +1,80 @@
+# List of suppressions
+
+# The UDP related tests currently do not work on Android because
+# we lack a UDP forwarder tool.
+NetworkStatsTestUDP.*
+
+# Missing test resource of 16MB.
+HistoryProfileTest.TypicalProfileVersion
+
+# crbug.com/139408
+SQLitePersistentCookieStoreTest.TestDontLoadOldSessionCookies
+SQLitePersistentCookieStoreTest.PersistIsPersistent
+
+# crbug.com/139433
+AutofillTableTest.AutofillProfile*
+AutofillTableTest.UpdateAutofillProfile
+
+# crbug.com/139400
+AutofillProfileTest.*
+CreditCardTest.SetInfoExpirationMonth
+
+# crbug.com/139398
+DownloadItemModelTest.InterruptTooltip
+
+# Tests crashing in the APK
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+DownloadItemModelTest.InterruptStatus
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+PageInfoTest.OnSiteDataAccessed
+
+# crbug.com/139423
+ValueStoreFrontendTest.GetExistingData
+
+# crbug.com/139421
+ChromeSelectFilePolicyTest.ExpectAsynchronousListenerCall
+
+# http://crbug.com/139033
+ChromeDownloadManagerDelegateTest.StartDownload_PromptAlways
+
+# crbug.com/139411
+AutocompleteProviderTest.*
+HistoryContentsProviderBodyOnlyTest.*
+HistoryContentsProviderTest.*
+HQPOrderingTest.*
+SearchProviderTest.*
+
+ProtocolHandlerRegistryTest.TestOSRegistrationFailure
+
+# crbug.com/139418
+SQLiteServerBoundCertStoreTest.TestUpgradeV1
+SQLiteServerBoundCertStoreTest.TestUpgradeV2
+
+ProfileSyncComponentsFactoryImplTest.*
+PermissionsTest.GetWarningMessages_Plugins
+ImageOperations.ResizeShouldAverageColors
+
+# crbug.com/139643
+VariationsUtilTest.DisableAfterInitialization
+VariationsUtilTest.AssociateGoogleVariationID
+VariationsUtilTest.NoAssociation
+
+# crbug.com/141473
+AutofillManagerTest.UpdatePasswordSyncState
+AutofillManagerTest.UpdatePasswordGenerationState
+
+# crbug.com/145843
+EntropyProviderTest.UseOneTimeRandomizationSHA1
+EntropyProviderTest.UseOneTimeRandomizationPermuted
+
+# crbug.com/147500
+ManifestTest.RestrictedKeys
+
+# crbug.com/152599
+SyncSearchEngineDataTypeControllerTest.*
+
+# crbug.com/256259
+DiagnosticsModelTest.RunAll
+
+# Death tests are not supported with apks.
+*DeathTest*
diff --git a/deps/v8/build/android/pylib/gtest/gtest_config.py b/deps/v8/build/android/pylib/gtest/gtest_config.py
new file mode 100644
index 0000000000..3ac195586c
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/gtest_config.py
@@ -0,0 +1,57 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configuration file for android gtest suites."""
+
+# Add new suites here before upgrading them to the stable list below.
+EXPERIMENTAL_TEST_SUITES = [
+ 'components_browsertests',
+ 'heap_profiler_unittests',
+ 'devtools_bridge_tests',
+]
+
+TELEMETRY_EXPERIMENTAL_TEST_SUITES = [
+ 'telemetry_unittests',
+]
+
+# Do not modify this list without approval of an android owner.
+# This list determines which suites are run by default, both for local
+# testing and on android trybots running on commit-queue.
+STABLE_TEST_SUITES = [
+ 'android_webview_unittests',
+ 'base_unittests',
+ 'blink_unittests',
+ 'breakpad_unittests',
+ 'cc_unittests',
+ 'components_unittests',
+ 'content_browsertests',
+ 'content_unittests',
+ 'events_unittests',
+ 'gl_tests',
+ 'gl_unittests',
+ 'gpu_unittests',
+ 'ipc_tests',
+ 'media_unittests',
+ 'midi_unittests',
+ 'net_unittests',
+ 'sandbox_linux_unittests',
+ 'skia_unittests',
+ 'sql_unittests',
+ 'storage_unittests',
+ 'ui_android_unittests',
+ 'ui_base_unittests',
+ 'ui_touch_selection_unittests',
+ 'unit_tests_apk',
+]
+
+# Tests fail in component=shared_library build, which is required for ASan.
+# http://crbug.com/344868
+ASAN_EXCLUDED_TEST_SUITES = [
+ 'breakpad_unittests',
+ 'sandbox_linux_unittests',
+
+ # The internal ASAN recipe cannot run step "unit_tests_apk", this is the
+ # only internal recipe affected. See http://crbug.com/607850
+ 'unit_tests_apk',
+]
diff --git a/deps/v8/build/android/pylib/gtest/gtest_test_instance.py b/deps/v8/build/android/pylib/gtest/gtest_test_instance.py
new file mode 100644
index 0000000000..d3bedee19b
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/gtest_test_instance.py
@@ -0,0 +1,530 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import HTMLParser
+import logging
+import os
+import re
+import tempfile
+import threading
+import xml.etree.ElementTree
+
+from devil.android import apk_helper
+from pylib import constants
+from pylib.constants import host_paths
+from pylib.base import base_test_result
+from pylib.base import test_instance
+from pylib.symbols import stack_symbolizer
+from pylib.utils import test_filter
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+ import unittest_util # pylint: disable=import-error
+
+
+BROWSER_TEST_SUITES = [
+ 'components_browsertests',
+ 'content_browsertests',
+]
+
+RUN_IN_SUB_THREAD_TEST_SUITES = [
+ # Multiprocess tests should be run outside of the main thread.
+ 'base_unittests', # file_locking_unittest.cc uses a child process.
+ 'ipc_perftests',
+ 'ipc_tests',
+ 'mojo_perftests',
+ 'mojo_unittests',
+ 'net_unittests'
+]
+
+
+# Used for filtering large data deps at a finer grain than what's allowed in
+# isolate files since pushing deps to devices is expensive.
+# Wildcards are allowed.
+_DEPS_EXCLUSION_LIST = [
+ 'chrome/test/data/extensions/api_test',
+ 'chrome/test/data/extensions/secure_shell',
+ 'chrome/test/data/firefox*',
+ 'chrome/test/data/gpu',
+ 'chrome/test/data/image_decoding',
+ 'chrome/test/data/import',
+ 'chrome/test/data/page_cycler',
+ 'chrome/test/data/perf',
+ 'chrome/test/data/pyauto_private',
+ 'chrome/test/data/safari_import',
+ 'chrome/test/data/scroll',
+ 'chrome/test/data/third_party',
+ 'third_party/hunspell_dictionaries/*.dic',
+ # crbug.com/258690
+ 'webkit/data/bmp_decoder',
+ 'webkit/data/ico_decoder',
+]
+
+
+_EXTRA_NATIVE_TEST_ACTIVITY = (
+ 'org.chromium.native_test.NativeTestInstrumentationTestRunner.'
+ 'NativeTestActivity')
+_EXTRA_RUN_IN_SUB_THREAD = (
+ 'org.chromium.native_test.NativeTest.RunInSubThread')
+EXTRA_SHARD_NANO_TIMEOUT = (
+ 'org.chromium.native_test.NativeTestInstrumentationTestRunner.'
+ 'ShardNanoTimeout')
+_EXTRA_SHARD_SIZE_LIMIT = (
+ 'org.chromium.native_test.NativeTestInstrumentationTestRunner.'
+ 'ShardSizeLimit')
+
+# TODO(jbudorick): Remove these once we're no longer parsing stdout to generate
+# results.
+_RE_TEST_STATUS = re.compile(
+ r'\[ +((?:RUN)|(?:FAILED)|(?:OK)|(?:CRASHED)) +\]'
+ r' ?([^ ]+)?(?: \((\d+) ms\))?$')
+# Crash detection constants.
+_RE_TEST_ERROR = re.compile(r'FAILURES!!! Tests run: \d+,'
+ r' Failures: \d+, Errors: 1')
+_RE_TEST_CURRENTLY_RUNNING = re.compile(r'\[ERROR:.*?\]'
+ r' Currently running: (.*)')
+_RE_DISABLED = re.compile(r'DISABLED_')
+_RE_FLAKY = re.compile(r'FLAKY_')
+
+# Detect stack line in stdout.
+_STACK_LINE_RE = re.compile(r'\s*#\d+')
+
+def ParseGTestListTests(raw_list):
+ """Parses a raw test list as provided by --gtest_list_tests.
+
+ Args:
+ raw_list: The raw test listing with the following format:
+
+ IPCChannelTest.
+ SendMessageInChannelConnected
+ IPCSyncChannelTest.
+ Simple
+ DISABLED_SendWithTimeoutMixedOKAndTimeout
+
+ Returns:
+ A list of all tests. For the above raw listing:
+
+ [IPCChannelTest.SendMessageInChannelConnected, IPCSyncChannelTest.Simple,
+ IPCSyncChannelTest.DISABLED_SendWithTimeoutMixedOKAndTimeout]
+ """
+ ret = []
+ current = ''
+ for test in raw_list:
+ if not test:
+ continue
+ if not test.startswith(' '):
+ test_case = test.split()[0]
+ if test_case.endswith('.'):
+ current = test_case
+ else:
+ test = test.strip()
+ if test and not 'YOU HAVE' in test:
+ test_name = test.split()[0]
+ ret += [current + test_name]
+ return ret
+
+
+def ParseGTestOutput(output, symbolizer, device_abi):
+ """Parses raw gtest output and returns a list of results.
+
+ Args:
+ output: A list of output lines.
+ symbolizer: The symbolizer used to symbolize stack.
+ device_abi: Device abi that is needed for symbolization.
+ Returns:
+ A list of base_test_result.BaseTestResults.
+ """
+ duration = 0
+ fallback_result_type = None
+ log = []
+ stack = []
+ result_type = None
+ results = []
+ test_name = None
+
+ def symbolize_stack_and_merge_with_log():
+ log_string = '\n'.join(log or [])
+ if not stack:
+ stack_string = ''
+ else:
+ stack_string = '\n'.join(
+ symbolizer.ExtractAndResolveNativeStackTraces(
+ stack, device_abi))
+ return '%s\n%s' % (log_string, stack_string)
+
+ def handle_possibly_unknown_test():
+ if test_name is not None:
+ results.append(base_test_result.BaseTestResult(
+ TestNameWithoutDisabledPrefix(test_name),
+ fallback_result_type or base_test_result.ResultType.UNKNOWN,
+ duration, log=symbolize_stack_and_merge_with_log()))
+
+ for l in output:
+ matcher = _RE_TEST_STATUS.match(l)
+ if matcher:
+ if matcher.group(1) == 'RUN':
+ handle_possibly_unknown_test()
+ duration = 0
+ fallback_result_type = None
+ log = []
+ stack = []
+ result_type = None
+ elif matcher.group(1) == 'OK':
+ result_type = base_test_result.ResultType.PASS
+ elif matcher.group(1) == 'FAILED':
+ result_type = base_test_result.ResultType.FAIL
+ elif matcher.group(1) == 'CRASHED':
+ fallback_result_type = base_test_result.ResultType.CRASH
+ # Be aware that test name and status might not appear on same line.
+ test_name = matcher.group(2) if matcher.group(2) else test_name
+ duration = int(matcher.group(3)) if matcher.group(3) else 0
+
+ else:
+ # Needs another matcher here to match crashes, like those of DCHECK.
+ matcher = _RE_TEST_CURRENTLY_RUNNING.match(l)
+ if matcher:
+ test_name = matcher.group(1)
+ result_type = base_test_result.ResultType.CRASH
+ duration = 0 # Don't know.
+
+ if log is not None:
+ if not matcher and _STACK_LINE_RE.match(l):
+ stack.append(l)
+ else:
+ log.append(l)
+
+ if result_type and test_name:
+ # Don't bother symbolizing output if the test passed.
+ if result_type == base_test_result.ResultType.PASS:
+ stack = []
+ results.append(base_test_result.BaseTestResult(
+ TestNameWithoutDisabledPrefix(test_name), result_type, duration,
+ log=symbolize_stack_and_merge_with_log()))
+ test_name = None
+
+ handle_possibly_unknown_test()
+
+ return results
+
+
+def ParseGTestXML(xml_content):
+ """Parse gtest XML result."""
+ results = []
+ if not xml_content:
+ return results
+
+ html = HTMLParser.HTMLParser()
+
+ testsuites = xml.etree.ElementTree.fromstring(xml_content)
+ for testsuite in testsuites:
+ suite_name = testsuite.attrib['name']
+ for testcase in testsuite:
+ case_name = testcase.attrib['name']
+ result_type = base_test_result.ResultType.PASS
+ log = []
+ for failure in testcase:
+ result_type = base_test_result.ResultType.FAIL
+ log.append(html.unescape(failure.attrib['message']))
+
+ results.append(base_test_result.BaseTestResult(
+ '%s.%s' % (suite_name, TestNameWithoutDisabledPrefix(case_name)),
+ result_type,
+ int(float(testcase.attrib['time']) * 1000),
+ log=('\n'.join(log) if log else '')))
+
+ return results
+
+
+def TestNameWithoutDisabledPrefix(test_name):
+ """Modify the test name without disabled prefix if prefix 'DISABLED_' or
+ 'FLAKY_' presents.
+
+ Args:
+ test_name: The name of a test.
+ Returns:
+ A test name without prefix 'DISABLED_' or 'FLAKY_'.
+ """
+ disabled_prefixes = [_RE_DISABLED, _RE_FLAKY]
+ for dp in disabled_prefixes:
+ test_name = dp.sub('', test_name)
+ return test_name
+
+class GtestTestInstance(test_instance.TestInstance):
+
+ def __init__(self, args, data_deps_delegate, error_func):
+ super(GtestTestInstance, self).__init__()
+ # TODO(jbudorick): Support multiple test suites.
+ if len(args.suite_name) > 1:
+ raise ValueError('Platform mode currently supports only 1 gtest suite')
+ self._isolated_script_test_perf_output = (
+ args.isolated_script_test_perf_output)
+ self._exe_dist_dir = None
+ self._external_shard_index = args.test_launcher_shard_index
+ self._extract_test_list_from_filter = args.extract_test_list_from_filter
+ self._filter_tests_lock = threading.Lock()
+ self._gs_test_artifacts_bucket = args.gs_test_artifacts_bucket
+ self._shard_timeout = args.shard_timeout
+ self._store_tombstones = args.store_tombstones
+ self._suite = args.suite_name[0]
+ self._symbolizer = stack_symbolizer.Symbolizer(None)
+ self._total_external_shards = args.test_launcher_total_shards
+ self._wait_for_java_debugger = args.wait_for_java_debugger
+
+ # GYP:
+ if args.executable_dist_dir:
+ self._exe_dist_dir = os.path.abspath(args.executable_dist_dir)
+ else:
+ # TODO(agrieve): Remove auto-detection once recipes pass flag explicitly.
+ exe_dist_dir = os.path.join(constants.GetOutDirectory(),
+ '%s__dist' % self._suite)
+
+ if os.path.exists(exe_dist_dir):
+ self._exe_dist_dir = exe_dist_dir
+
+ incremental_part = ''
+ if args.test_apk_incremental_install_json:
+ incremental_part = '_incremental'
+
+ apk_path = os.path.join(
+ constants.GetOutDirectory(), '%s_apk' % self._suite,
+ '%s-debug%s.apk' % (self._suite, incremental_part))
+ self._test_apk_incremental_install_json = (
+ args.test_apk_incremental_install_json)
+ if not os.path.exists(apk_path):
+ self._apk_helper = None
+ else:
+ self._apk_helper = apk_helper.ApkHelper(apk_path)
+ self._extras = {
+ _EXTRA_NATIVE_TEST_ACTIVITY: self._apk_helper.GetActivityName(),
+ }
+ if self._suite in RUN_IN_SUB_THREAD_TEST_SUITES:
+ self._extras[_EXTRA_RUN_IN_SUB_THREAD] = 1
+ if self._suite in BROWSER_TEST_SUITES:
+ self._extras[_EXTRA_SHARD_SIZE_LIMIT] = 1
+ self._extras[EXTRA_SHARD_NANO_TIMEOUT] = int(1e9 * self._shard_timeout)
+ self._shard_timeout = 10 * self._shard_timeout
+ if args.wait_for_java_debugger:
+ self._extras[EXTRA_SHARD_NANO_TIMEOUT] = int(1e15) # Forever
+
+ if not self._apk_helper and not self._exe_dist_dir:
+ error_func('Could not find apk or executable for %s' % self._suite)
+
+ self._data_deps = []
+ self._gtest_filter = test_filter.InitializeFilterFromArgs(args)
+ self._run_disabled = args.run_disabled
+
+ self._data_deps_delegate = data_deps_delegate
+ self._runtime_deps_path = args.runtime_deps_path
+ if not self._runtime_deps_path:
+ logging.warning('No data dependencies will be pushed.')
+
+ if args.app_data_files:
+ self._app_data_files = args.app_data_files
+ if args.app_data_file_dir:
+ self._app_data_file_dir = args.app_data_file_dir
+ else:
+ self._app_data_file_dir = tempfile.mkdtemp()
+ logging.critical('Saving app files to %s', self._app_data_file_dir)
+ else:
+ self._app_data_files = None
+ self._app_data_file_dir = None
+
+ self._flags = None
+ self._initializeCommandLineFlags(args)
+
+ # TODO(jbudorick): Remove this once it's deployed.
+ self._enable_xml_result_parsing = args.enable_xml_result_parsing
+
+ def _initializeCommandLineFlags(self, args):
+ self._flags = []
+ if args.command_line_flags:
+ self._flags.extend(args.command_line_flags)
+ if args.device_flags_file:
+ with open(args.device_flags_file) as f:
+ stripped_lines = (l.strip() for l in f)
+ self._flags.extend(flag for flag in stripped_lines if flag)
+ if args.run_disabled:
+ self._flags.append('--gtest_also_run_disabled_tests')
+
+ @property
+ def activity(self):
+ return self._apk_helper and self._apk_helper.GetActivityName()
+
+ @property
+ def apk(self):
+ return self._apk_helper and self._apk_helper.path
+
+ @property
+ def apk_helper(self):
+ return self._apk_helper
+
+ @property
+ def app_file_dir(self):
+ return self._app_data_file_dir
+
+ @property
+ def app_files(self):
+ return self._app_data_files
+
+ @property
+ def enable_xml_result_parsing(self):
+ return self._enable_xml_result_parsing
+
+ @property
+ def exe_dist_dir(self):
+ return self._exe_dist_dir
+
+ @property
+ def external_shard_index(self):
+ return self._external_shard_index
+
+ @property
+ def extract_test_list_from_filter(self):
+ return self._extract_test_list_from_filter
+
+ @property
+ def extras(self):
+ return self._extras
+
+ @property
+ def flags(self):
+ return self._flags
+
+ @property
+ def gs_test_artifacts_bucket(self):
+ return self._gs_test_artifacts_bucket
+
+ @property
+ def gtest_filter(self):
+ return self._gtest_filter
+
+ @property
+ def isolated_script_test_perf_output(self):
+ return self._isolated_script_test_perf_output
+
+ @property
+ def package(self):
+ return self._apk_helper and self._apk_helper.GetPackageName()
+
+ @property
+ def permissions(self):
+ return self._apk_helper and self._apk_helper.GetPermissions()
+
+ @property
+ def runner(self):
+ return self._apk_helper and self._apk_helper.GetInstrumentationName()
+
+ @property
+ def shard_timeout(self):
+ return self._shard_timeout
+
+ @property
+ def store_tombstones(self):
+ return self._store_tombstones
+
+ @property
+ def suite(self):
+ return self._suite
+
+ @property
+ def symbolizer(self):
+ return self._symbolizer
+
+ @property
+ def test_apk_incremental_install_json(self):
+ return self._test_apk_incremental_install_json
+
+ @property
+ def total_external_shards(self):
+ return self._total_external_shards
+
+ @property
+ def wait_for_java_debugger(self):
+ return self._wait_for_java_debugger
+
+ #override
+ def TestType(self):
+ return 'gtest'
+
+ #override
+ def GetPreferredAbis(self):
+ if not self._apk_helper:
+ return None
+ return self._apk_helper.GetAbis()
+
+ #override
+ def SetUp(self):
+ """Map data dependencies via isolate."""
+ self._data_deps.extend(
+ self._data_deps_delegate(self._runtime_deps_path))
+
+ def GetDataDependencies(self):
+ """Returns the test suite's data dependencies.
+
+ Returns:
+ A list of (host_path, device_path) tuples to push. If device_path is
+ None, the client is responsible for determining where to push the file.
+ """
+ return self._data_deps
+
+ def FilterTests(self, test_list, disabled_prefixes=None):
+ """Filters |test_list| based on prefixes and, if present, a filter string.
+
+ Args:
+ test_list: The list of tests to filter.
+ disabled_prefixes: A list of test prefixes to filter. Defaults to
+ DISABLED_, FLAKY_, FAILS_, PRE_, and MANUAL_
+ Returns:
+ A filtered list of tests to run.
+ """
+ gtest_filter_strings = [
+ self._GenerateDisabledFilterString(disabled_prefixes)]
+ if self._gtest_filter:
+ gtest_filter_strings.append(self._gtest_filter)
+
+ filtered_test_list = test_list
+ # This lock is required because on older versions of Python
+ # |unittest_util.FilterTestNames| use of |fnmatch| is not threadsafe.
+ with self._filter_tests_lock:
+ for gtest_filter_string in gtest_filter_strings:
+ logging.debug('Filtering tests using: %s', gtest_filter_string)
+ filtered_test_list = unittest_util.FilterTestNames(
+ filtered_test_list, gtest_filter_string)
+
+ if self._run_disabled and self._gtest_filter:
+ out_filtered_test_list = list(set(test_list)-set(filtered_test_list))
+ for test in out_filtered_test_list:
+ test_name_no_disabled = TestNameWithoutDisabledPrefix(test)
+ if test_name_no_disabled != test and unittest_util.FilterTestNames(
+ [test_name_no_disabled], self._gtest_filter):
+ filtered_test_list.append(test)
+ return filtered_test_list
+
+ def _GenerateDisabledFilterString(self, disabled_prefixes):
+ disabled_filter_items = []
+
+ if disabled_prefixes is None:
+ disabled_prefixes = ['FAILS_', 'PRE_']
+ if '--run-manual' not in self._flags:
+ disabled_prefixes += ['MANUAL_']
+ if not self._run_disabled:
+ disabled_prefixes += ['DISABLED_', 'FLAKY_']
+
+ disabled_filter_items += ['%s*' % dp for dp in disabled_prefixes]
+ disabled_filter_items += ['*.%s*' % dp for dp in disabled_prefixes]
+
+ disabled_tests_file_path = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'pylib', 'gtest',
+ 'filter', '%s_disabled' % self._suite)
+ if disabled_tests_file_path and os.path.exists(disabled_tests_file_path):
+ with open(disabled_tests_file_path) as disabled_tests_file:
+ disabled_filter_items += [
+ '%s' % l for l in (line.strip() for line in disabled_tests_file)
+ if l and not l.startswith('#')]
+
+ return '*-%s' % ':'.join(disabled_filter_items)
+
+ #override
+ def TearDown(self):
+ """Do nothing."""
+ pass
diff --git a/deps/v8/build/android/pylib/gtest/gtest_test_instance_test.py b/deps/v8/build/android/pylib/gtest/gtest_test_instance_test.py
new file mode 100755
index 0000000000..b39da527df
--- /dev/null
+++ b/deps/v8/build/android/pylib/gtest/gtest_test_instance_test.py
@@ -0,0 +1,217 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.base import base_test_result
+from pylib.gtest import gtest_test_instance
+
+
+class GtestTestInstanceTests(unittest.TestCase):
+
+ def testParseGTestListTests_simple(self):
+ raw_output = [
+ 'TestCaseOne.',
+ ' testOne',
+ ' testTwo',
+ 'TestCaseTwo.',
+ ' testThree',
+ ' testFour',
+ ]
+ actual = gtest_test_instance.ParseGTestListTests(raw_output)
+ expected = [
+ 'TestCaseOne.testOne',
+ 'TestCaseOne.testTwo',
+ 'TestCaseTwo.testThree',
+ 'TestCaseTwo.testFour',
+ ]
+ self.assertEqual(expected, actual)
+
+ def testParseGTestListTests_typeParameterized_old(self):
+ raw_output = [
+ 'TPTestCase/WithTypeParam/0.',
+ ' testOne',
+ ' testTwo',
+ ]
+ actual = gtest_test_instance.ParseGTestListTests(raw_output)
+ expected = [
+ 'TPTestCase/WithTypeParam/0.testOne',
+ 'TPTestCase/WithTypeParam/0.testTwo',
+ ]
+ self.assertEqual(expected, actual)
+
+ def testParseGTestListTests_typeParameterized_new(self):
+ raw_output = [
+ 'TPTestCase/WithTypeParam/0. # TypeParam = TypeParam0',
+ ' testOne',
+ ' testTwo',
+ ]
+ actual = gtest_test_instance.ParseGTestListTests(raw_output)
+ expected = [
+ 'TPTestCase/WithTypeParam/0.testOne',
+ 'TPTestCase/WithTypeParam/0.testTwo',
+ ]
+ self.assertEqual(expected, actual)
+
+ def testParseGTestListTests_valueParameterized_old(self):
+ raw_output = [
+ 'VPTestCase.',
+ ' testWithValueParam/0',
+ ' testWithValueParam/1',
+ ]
+ actual = gtest_test_instance.ParseGTestListTests(raw_output)
+ expected = [
+ 'VPTestCase.testWithValueParam/0',
+ 'VPTestCase.testWithValueParam/1',
+ ]
+ self.assertEqual(expected, actual)
+
+ def testParseGTestListTests_valueParameterized_new(self):
+ raw_output = [
+ 'VPTestCase.',
+ ' testWithValueParam/0 # GetParam() = 0',
+ ' testWithValueParam/1 # GetParam() = 1',
+ ]
+ actual = gtest_test_instance.ParseGTestListTests(raw_output)
+ expected = [
+ 'VPTestCase.testWithValueParam/0',
+ 'VPTestCase.testWithValueParam/1',
+ ]
+ self.assertEqual(expected, actual)
+
+ def testParseGTestListTests_emptyTestName(self):
+ raw_output = [
+ 'TestCase.',
+ ' ',
+ ' nonEmptyTestName',
+ ]
+ actual = gtest_test_instance.ParseGTestListTests(raw_output)
+ expected = [
+ 'TestCase.nonEmptyTestName',
+ ]
+ self.assertEqual(expected, actual)
+
+ def testParseGTestOutput_pass(self):
+ raw_output = [
+ '[ RUN ] FooTest.Bar',
+ '[ OK ] FooTest.Bar (1 ms)',
+ ]
+ actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+ self.assertEquals(1, len(actual))
+ self.assertEquals('FooTest.Bar', actual[0].GetName())
+ self.assertEquals(1, actual[0].GetDuration())
+ self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType())
+
+ def testParseGTestOutput_fail(self):
+ raw_output = [
+ '[ RUN ] FooTest.Bar',
+ '[ FAILED ] FooTest.Bar (1 ms)',
+ ]
+ actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+ self.assertEquals(1, len(actual))
+ self.assertEquals('FooTest.Bar', actual[0].GetName())
+ self.assertEquals(1, actual[0].GetDuration())
+ self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType())
+
+ def testParseGTestOutput_crash(self):
+ raw_output = [
+ '[ RUN ] FooTest.Bar',
+ '[ CRASHED ] FooTest.Bar (1 ms)',
+ ]
+ actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+ self.assertEquals(1, len(actual))
+ self.assertEquals('FooTest.Bar', actual[0].GetName())
+ self.assertEquals(1, actual[0].GetDuration())
+ self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType())
+
+ def testParseGTestOutput_errorCrash(self):
+ raw_output = [
+ '[ RUN ] FooTest.Bar',
+ '[ERROR:blah] Currently running: FooTest.Bar',
+ ]
+ actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+ self.assertEquals(1, len(actual))
+ self.assertEquals('FooTest.Bar', actual[0].GetName())
+ self.assertEquals(0, actual[0].GetDuration())
+ self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType())
+
+ def testParseGTestOutput_unknown(self):
+ raw_output = [
+ '[ RUN ] FooTest.Bar',
+ ]
+ actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+ self.assertEquals(1, len(actual))
+ self.assertEquals('FooTest.Bar', actual[0].GetName())
+ self.assertEquals(0, actual[0].GetDuration())
+ self.assertEquals(base_test_result.ResultType.UNKNOWN, actual[0].GetType())
+
+ def testParseGTestOutput_nonterminalUnknown(self):
+ raw_output = [
+ '[ RUN ] FooTest.Bar',
+ '[ RUN ] FooTest.Baz',
+ '[ OK ] FooTest.Baz (1 ms)',
+ ]
+ actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+ self.assertEquals(2, len(actual))
+
+ self.assertEquals('FooTest.Bar', actual[0].GetName())
+ self.assertEquals(0, actual[0].GetDuration())
+ self.assertEquals(base_test_result.ResultType.UNKNOWN, actual[0].GetType())
+
+ self.assertEquals('FooTest.Baz', actual[1].GetName())
+ self.assertEquals(1, actual[1].GetDuration())
+ self.assertEquals(base_test_result.ResultType.PASS, actual[1].GetType())
+
+ def testParseGTestOutput_deathTestCrashOk(self):
+ raw_output = [
+ '[ RUN ] FooTest.Bar',
+ '[ CRASHED ]',
+ '[ OK ] FooTest.Bar (1 ms)',
+ ]
+ actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None)
+ self.assertEquals(1, len(actual))
+
+ self.assertEquals('FooTest.Bar', actual[0].GetName())
+ self.assertEquals(1, actual[0].GetDuration())
+ self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType())
+
+ def testParseGTestXML_none(self):
+ actual = gtest_test_instance.ParseGTestXML(None)
+ self.assertEquals([], actual)
+
+ def testTestNameWithoutDisabledPrefix_disabled(self):
+ test_name_list = [
+ 'A.DISABLED_B',
+ 'DISABLED_A.B',
+ 'DISABLED_A.DISABLED_B',
+ ]
+ for test_name in test_name_list:
+ actual = gtest_test_instance \
+ .TestNameWithoutDisabledPrefix(test_name)
+ expected = 'A.B'
+ self.assertEquals(expected, actual)
+
+ def testTestNameWithoutDisabledPrefix_flaky(self):
+ test_name_list = [
+ 'A.FLAKY_B',
+ 'FLAKY_A.B',
+ 'FLAKY_A.FLAKY_B',
+ ]
+ for test_name in test_name_list:
+ actual = gtest_test_instance \
+ .TestNameWithoutDisabledPrefix(test_name)
+ expected = 'A.B'
+ self.assertEquals(expected, actual)
+
+ def testTestNameWithoutDisabledPrefix_notDisabledOrFlaky(self):
+ test_name = 'A.B'
+ actual = gtest_test_instance \
+ .TestNameWithoutDisabledPrefix(test_name)
+ expected = 'A.B'
+ self.assertEquals(expected, actual)
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/deps/v8/build/android/pylib/instrumentation/__init__.py b/deps/v8/build/android/pylib/instrumentation/__init__.py
new file mode 100644
index 0000000000..96196cffb2
--- /dev/null
+++ b/deps/v8/build/android/pylib/instrumentation/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/instrumentation/instrumentation_parser.py b/deps/v8/build/android/pylib/instrumentation/instrumentation_parser.py
new file mode 100644
index 0000000000..aa78e9ec92
--- /dev/null
+++ b/deps/v8/build/android/pylib/instrumentation/instrumentation_parser.py
@@ -0,0 +1,105 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import re
+
+# http://developer.android.com/reference/android/test/InstrumentationTestRunner.html
+STATUS_CODE_START = 1
+STATUS_CODE_OK = 0
+STATUS_CODE_ERROR = -1
+STATUS_CODE_FAILURE = -2
+
+# AndroidJUnitRunner would status output -3 to indicate a test is skipped
+STATUS_CODE_SKIP = -3
+
+# AndroidJUnitRunner outputs -4 to indicate a failed assumption
+# "A test for which an assumption fails should not generate a test
+# case failure"
+# http://junit.org/junit4/javadoc/4.12/org/junit/AssumptionViolatedException.html
+STATUS_CODE_ASSUMPTION_FAILURE = -4
+
+# http://developer.android.com/reference/android/app/Activity.html
+RESULT_CODE_OK = -1
+RESULT_CODE_CANCELED = 0
+
+_INSTR_LINE_RE = re.compile(r'^\s*INSTRUMENTATION_([A-Z_]+): (.*)$')
+
+
+class InstrumentationParser(object):
+
+ def __init__(self, stream):
+ """An incremental parser for the output of Android instrumentation tests.
+
+ Example:
+
+ stream = adb.IterShell('am instrument -r ...')
+ parser = InstrumentationParser(stream)
+
+ for code, bundle in parser.IterStatus():
+ # do something with each instrumentation status
+ print 'status:', code, bundle
+
+ # do something with the final instrumentation result
+ code, bundle = parser.GetResult()
+ print 'result:', code, bundle
+
+ Args:
+ stream: a sequence of lines as produced by the raw output of an
+ instrumentation test (e.g. by |am instrument -r|).
+ """
+ self._stream = stream
+ self._code = None
+ self._bundle = None
+
+ def IterStatus(self):
+ """Iterate over statuses as they are produced by the instrumentation test.
+
+ Yields:
+ A tuple (code, bundle) for each instrumentation status found in the
+ output.
+ """
+ def join_bundle_values(bundle):
+ for key in bundle:
+ bundle[key] = '\n'.join(bundle[key])
+ return bundle
+
+ bundle = {'STATUS': {}, 'RESULT': {}}
+ header = None
+ key = None
+ for line in self._stream:
+ m = _INSTR_LINE_RE.match(line)
+ if m:
+ header, value = m.groups()
+ key = None
+ if header in ['STATUS', 'RESULT'] and '=' in value:
+ key, value = value.split('=', 1)
+ bundle[header][key] = [value]
+ elif header == 'STATUS_CODE':
+ yield int(value), join_bundle_values(bundle['STATUS'])
+ bundle['STATUS'] = {}
+ elif header == 'CODE':
+ self._code = int(value)
+ else:
+ logging.warning('Unknown INSTRUMENTATION_%s line: %s', header, value)
+ elif key is not None:
+ bundle[header][key].append(line)
+
+ self._bundle = join_bundle_values(bundle['RESULT'])
+
+ def GetResult(self):
+ """Return the final instrumentation result.
+
+ Returns:
+ A pair (code, bundle) with the final instrumentation result. The |code|
+ may be None if no instrumentation result was found in the output.
+
+ Raises:
+ AssertionError if attempting to get the instrumentation result before
+ exhausting |IterStatus| first.
+ """
+ assert self._bundle is not None, (
+ 'The IterStatus generator must be exhausted before reading the final'
+ ' instrumentation result.')
+ return self._code, self._bundle
diff --git a/deps/v8/build/android/pylib/instrumentation/instrumentation_parser_test.py b/deps/v8/build/android/pylib/instrumentation/instrumentation_parser_test.py
new file mode 100755
index 0000000000..092d10fc93
--- /dev/null
+++ b/deps/v8/build/android/pylib/instrumentation/instrumentation_parser_test.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Unit tests for instrumentation.InstrumentationParser."""
+
+import unittest
+
+from pylib.instrumentation import instrumentation_parser
+
+
+class InstrumentationParserTest(unittest.TestCase):
+
+ def testInstrumentationParser_nothing(self):
+ parser = instrumentation_parser.InstrumentationParser([''])
+ statuses = list(parser.IterStatus())
+ code, bundle = parser.GetResult()
+ self.assertEqual(None, code)
+ self.assertEqual({}, bundle)
+ self.assertEqual([], statuses)
+
+ def testInstrumentationParser_noMatchingStarts(self):
+ raw_output = [
+ '',
+ 'this.is.a.test.package.TestClass:.',
+ 'Test result for =.',
+ 'Time: 1.234',
+ '',
+ 'OK (1 test)',
+ ]
+
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+ code, bundle = parser.GetResult()
+ self.assertEqual(None, code)
+ self.assertEqual({}, bundle)
+ self.assertEqual([], statuses)
+
+ def testInstrumentationParser_resultAndCode(self):
+ raw_output = [
+ 'INSTRUMENTATION_RESULT: shortMsg=foo bar',
+ 'INSTRUMENTATION_RESULT: longMsg=a foo',
+ 'walked into',
+ 'a bar',
+ 'INSTRUMENTATION_CODE: -1',
+ ]
+
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+ code, bundle = parser.GetResult()
+ self.assertEqual(-1, code)
+ self.assertEqual(
+ {'shortMsg': 'foo bar', 'longMsg': 'a foo\nwalked into\na bar'}, bundle)
+ self.assertEqual([], statuses)
+
+ def testInstrumentationParser_oneStatus(self):
+ raw_output = [
+ 'INSTRUMENTATION_STATUS: foo=1',
+ 'INSTRUMENTATION_STATUS: bar=hello',
+ 'INSTRUMENTATION_STATUS: world=false',
+ 'INSTRUMENTATION_STATUS: class=this.is.a.test.package.TestClass',
+ 'INSTRUMENTATION_STATUS: test=testMethod',
+ 'INSTRUMENTATION_STATUS_CODE: 0',
+ ]
+
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+
+ expected = [
+ (0, {
+ 'foo': '1',
+ 'bar': 'hello',
+ 'world': 'false',
+ 'class': 'this.is.a.test.package.TestClass',
+ 'test': 'testMethod',
+ })
+ ]
+ self.assertEqual(expected, statuses)
+
+ def testInstrumentationParser_multiStatus(self):
+ raw_output = [
+ 'INSTRUMENTATION_STATUS: class=foo',
+ 'INSTRUMENTATION_STATUS: test=bar',
+ 'INSTRUMENTATION_STATUS_CODE: 1',
+ 'INSTRUMENTATION_STATUS: test_skipped=true',
+ 'INSTRUMENTATION_STATUS_CODE: 0',
+ 'INSTRUMENTATION_STATUS: class=hello',
+ 'INSTRUMENTATION_STATUS: test=world',
+ 'INSTRUMENTATION_STATUS: stack=',
+ 'foo/bar.py (27)',
+ 'hello/world.py (42)',
+ 'test/file.py (1)',
+ 'INSTRUMENTATION_STATUS_CODE: -1',
+ ]
+
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+
+ expected = [
+ (1, {'class': 'foo', 'test': 'bar',}),
+ (0, {'test_skipped': 'true'}),
+ (-1, {
+ 'class': 'hello',
+ 'test': 'world',
+ 'stack': '\nfoo/bar.py (27)\nhello/world.py (42)\ntest/file.py (1)',
+ }),
+ ]
+ self.assertEqual(expected, statuses)
+
+ def testInstrumentationParser_statusResultAndCode(self):
+ raw_output = [
+ 'INSTRUMENTATION_STATUS: class=foo',
+ 'INSTRUMENTATION_STATUS: test=bar',
+ 'INSTRUMENTATION_STATUS_CODE: 1',
+ 'INSTRUMENTATION_RESULT: result=hello',
+ 'world',
+ '',
+ '',
+ 'INSTRUMENTATION_CODE: 0',
+ ]
+
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+ code, bundle = parser.GetResult()
+
+ self.assertEqual(0, code)
+ self.assertEqual({'result': 'hello\nworld\n\n'}, bundle)
+ self.assertEqual([(1, {'class': 'foo', 'test': 'bar'})], statuses)
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/deps/v8/build/android/pylib/instrumentation/instrumentation_test_instance.py b/deps/v8/build/android/pylib/instrumentation/instrumentation_test_instance.py
new file mode 100644
index 0000000000..98b9435efe
--- /dev/null
+++ b/deps/v8/build/android/pylib/instrumentation/instrumentation_test_instance.py
@@ -0,0 +1,944 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import copy
+import logging
+import os
+import pickle
+import re
+
+from devil.android import apk_helper
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_exception
+from pylib.base import test_instance
+from pylib.constants import host_paths
+from pylib.instrumentation import test_result
+from pylib.instrumentation import instrumentation_parser
+from pylib.symbols import deobfuscator
+from pylib.symbols import stack_symbolizer
+from pylib.utils import dexdump
+from pylib.utils import instrumentation_tracing
+from pylib.utils import proguard
+from pylib.utils import shared_preference_utils
+from pylib.utils import test_filter
+
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+ import unittest_util # pylint: disable=import-error
+
+# Ref: http://developer.android.com/reference/android/app/Activity.html
+_ACTIVITY_RESULT_CANCELED = 0
+_ACTIVITY_RESULT_OK = -1
+
+_COMMAND_LINE_PARAMETER = 'cmdlinearg-parameter'
+_DEFAULT_ANNOTATIONS = [
+ 'SmallTest', 'MediumTest', 'LargeTest', 'EnormousTest', 'IntegrationTest']
+_EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS = [
+ 'DisabledTest', 'FlakyTest', 'Manual']
+_VALID_ANNOTATIONS = set(_DEFAULT_ANNOTATIONS +
+ _EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS)
+
+# These test methods are inherited from android.test base test class and
+# should be permitted for not having size annotation. For more, please check
+# https://developer.android.com/reference/android/test/AndroidTestCase.html
+# https://developer.android.com/reference/android/test/ServiceTestCase.html
+_TEST_WITHOUT_SIZE_ANNOTATIONS = [
+ 'testAndroidTestCaseSetupProperly', 'testServiceTestCaseSetUpProperly']
+
+_EXTRA_DRIVER_TEST_LIST = (
+ 'org.chromium.test.driver.OnDeviceInstrumentationDriver.TestList')
+_EXTRA_DRIVER_TEST_LIST_FILE = (
+ 'org.chromium.test.driver.OnDeviceInstrumentationDriver.TestListFile')
+_EXTRA_DRIVER_TARGET_PACKAGE = (
+ 'org.chromium.test.driver.OnDeviceInstrumentationDriver.TargetPackage')
+_EXTRA_DRIVER_TARGET_CLASS = (
+ 'org.chromium.test.driver.OnDeviceInstrumentationDriver.TargetClass')
+_EXTRA_TIMEOUT_SCALE = (
+ 'org.chromium.test.driver.OnDeviceInstrumentationDriver.TimeoutScale')
+_TEST_LIST_JUNIT4_RUNNERS = [
+ 'org.chromium.base.test.BaseChromiumAndroidJUnitRunner']
+
+_SKIP_PARAMETERIZATION = 'SkipCommandLineParameterization'
+_COMMANDLINE_PARAMETERIZATION = 'CommandLineParameter'
+_NATIVE_CRASH_RE = re.compile('(process|native) crash', re.IGNORECASE)
+_PICKLE_FORMAT_VERSION = 12
+
+
+class MissingSizeAnnotationError(test_exception.TestException):
+ def __init__(self, class_name):
+ super(MissingSizeAnnotationError, self).__init__(class_name +
+ ': Test method is missing required size annotation. Add one of: ' +
+ ', '.join('@' + a for a in _VALID_ANNOTATIONS))
+
+
+class TestListPickleException(test_exception.TestException):
+ pass
+
+
+# TODO(jbudorick): Make these private class methods of
+# InstrumentationTestInstance once the instrumentation junit3_runner_class is
+# deprecated.
+def ParseAmInstrumentRawOutput(raw_output):
+ """Parses the output of an |am instrument -r| call.
+
+ Args:
+ raw_output: the output of an |am instrument -r| call as a list of lines
+ Returns:
+ A 3-tuple containing:
+ - the instrumentation code as an integer
+ - the instrumentation result as a list of lines
+ - the instrumentation statuses received as a list of 2-tuples
+ containing:
+ - the status code as an integer
+ - the bundle dump as a dict mapping string keys to a list of
+ strings, one for each line.
+ """
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+ code, bundle = parser.GetResult()
+ return (code, bundle, statuses)
+
+
+def GenerateTestResults(
+ result_code, result_bundle, statuses, start_ms, duration_ms, device_abi,
+ symbolizer):
+ """Generate test results from |statuses|.
+
+ Args:
+ result_code: The overall status code as an integer.
+ result_bundle: The summary bundle dump as a dict.
+ statuses: A list of 2-tuples containing:
+ - the status code as an integer
+ - the bundle dump as a dict mapping string keys to string values
+ Note that this is the same as the third item in the 3-tuple returned by
+ |_ParseAmInstrumentRawOutput|.
+ start_ms: The start time of the test in milliseconds.
+ duration_ms: The duration of the test in milliseconds.
+ device_abi: The device_abi, which is needed for symbolization.
+ symbolizer: The symbolizer used to symbolize stack.
+
+ Returns:
+ A list containing an instance of InstrumentationTestResult for each test
+ parsed.
+ """
+
+ results = []
+
+ current_result = None
+
+ for status_code, bundle in statuses:
+ test_class = bundle.get('class', '')
+ test_method = bundle.get('test', '')
+ if test_class and test_method:
+ test_name = '%s#%s' % (test_class, test_method)
+ else:
+ continue
+
+ if status_code == instrumentation_parser.STATUS_CODE_START:
+ if current_result:
+ results.append(current_result)
+ current_result = test_result.InstrumentationTestResult(
+ test_name, base_test_result.ResultType.UNKNOWN, start_ms, duration_ms)
+ else:
+ if status_code == instrumentation_parser.STATUS_CODE_OK:
+ if bundle.get('test_skipped', '').lower() in ('true', '1', 'yes'):
+ current_result.SetType(base_test_result.ResultType.SKIP)
+ elif current_result.GetType() == base_test_result.ResultType.UNKNOWN:
+ current_result.SetType(base_test_result.ResultType.PASS)
+ elif status_code == instrumentation_parser.STATUS_CODE_SKIP:
+ current_result.SetType(base_test_result.ResultType.SKIP)
+ elif status_code == instrumentation_parser.STATUS_CODE_ASSUMPTION_FAILURE:
+ current_result.SetType(base_test_result.ResultType.SKIP)
+ else:
+ if status_code not in (instrumentation_parser.STATUS_CODE_ERROR,
+ instrumentation_parser.STATUS_CODE_FAILURE):
+ logging.error('Unrecognized status code %d. Handling as an error.',
+ status_code)
+ current_result.SetType(base_test_result.ResultType.FAIL)
+ if 'stack' in bundle:
+ if symbolizer and device_abi:
+ current_result.SetLog(
+ '%s\n%s' % (
+ bundle['stack'],
+ '\n'.join(symbolizer.ExtractAndResolveNativeStackTraces(
+ bundle['stack'], device_abi))))
+ else:
+ current_result.SetLog(bundle['stack'])
+
+ if current_result:
+ if current_result.GetType() == base_test_result.ResultType.UNKNOWN:
+ crashed = (result_code == _ACTIVITY_RESULT_CANCELED
+ and any(_NATIVE_CRASH_RE.search(l)
+ for l in result_bundle.itervalues()))
+ if crashed:
+ current_result.SetType(base_test_result.ResultType.CRASH)
+
+ results.append(current_result)
+
+ return results
+
+
+def FilterTests(tests, filter_str=None, annotations=None,
+ excluded_annotations=None):
+ """Filter a list of tests
+
+ Args:
+ tests: a list of tests. e.g. [
+ {'annotations": {}, 'class': 'com.example.TestA', 'method':'test1'},
+ {'annotations": {}, 'class': 'com.example.TestB', 'method':'test2'}]
+ filter_str: googletest-style filter string.
+ annotations: a dict of wanted annotations for test methods.
+ exclude_annotations: a dict of annotations to exclude.
+
+ Return:
+ A list of filtered tests
+ """
+ def gtest_filter(t):
+ if not filter_str:
+ return True
+ # Allow fully-qualified name as well as an omitted package.
+ unqualified_class_test = {
+ 'class': t['class'].split('.')[-1],
+ 'method': t['method']
+ }
+ names = [
+ GetTestName(t, sep='.'),
+ GetTestName(unqualified_class_test, sep='.'),
+ GetUniqueTestName(t, sep='.')
+ ]
+
+ if t['is_junit4']:
+ names += [
+ GetTestNameWithoutParameterPostfix(t, sep='.'),
+ GetTestNameWithoutParameterPostfix(unqualified_class_test, sep='.')
+ ]
+
+ pattern_groups = filter_str.split('-')
+ if len(pattern_groups) > 1:
+ negative_filter = pattern_groups[1]
+ if unittest_util.FilterTestNames(names, negative_filter):
+ return []
+
+ positive_filter = pattern_groups[0]
+ return unittest_util.FilterTestNames(names, positive_filter)
+
+ def annotation_filter(all_annotations):
+ if not annotations:
+ return True
+ return any_annotation_matches(annotations, all_annotations)
+
+ def excluded_annotation_filter(all_annotations):
+ if not excluded_annotations:
+ return True
+ return not any_annotation_matches(excluded_annotations,
+ all_annotations)
+
+ def any_annotation_matches(filter_annotations, all_annotations):
+ return any(
+ ak in all_annotations
+ and annotation_value_matches(av, all_annotations[ak])
+ for ak, av in filter_annotations)
+
+ def annotation_value_matches(filter_av, av):
+ if filter_av is None:
+ return True
+ elif isinstance(av, dict):
+ return filter_av in av['value']
+ elif isinstance(av, list):
+ return filter_av in av
+ return filter_av == av
+
+ filtered_tests = []
+ for t in tests:
+ # Gtest filtering
+ if not gtest_filter(t):
+ continue
+
+ # Enforce that all tests declare their size.
+ if (not any(a in _VALID_ANNOTATIONS for a in t['annotations'])
+ and t['method'] not in _TEST_WITHOUT_SIZE_ANNOTATIONS):
+ raise MissingSizeAnnotationError(GetTestName(t))
+
+ if (not annotation_filter(t['annotations'])
+ or not excluded_annotation_filter(t['annotations'])):
+ continue
+
+ filtered_tests.append(t)
+
+ return filtered_tests
+
+
+# TODO(yolandyan): remove this once the tests are converted to junit4
+def GetAllTestsFromJar(test_jar):
+ pickle_path = '%s-proguard.pickle' % test_jar
+ try:
+ tests = GetTestsFromPickle(pickle_path, os.path.getmtime(test_jar))
+ except TestListPickleException as e:
+ logging.info('Could not get tests from pickle: %s', e)
+ logging.info('Getting tests from JAR via proguard.')
+ tests = _GetTestsFromProguard(test_jar)
+ SaveTestsToPickle(pickle_path, tests)
+ return tests
+
+
+def GetAllTestsFromApk(test_apk):
+ pickle_path = '%s-dexdump.pickle' % test_apk
+ try:
+ tests = GetTestsFromPickle(pickle_path, os.path.getmtime(test_apk))
+ except TestListPickleException as e:
+ logging.info('Could not get tests from pickle: %s', e)
+ logging.info('Getting tests from dex via dexdump.')
+ tests = _GetTestsFromDexdump(test_apk)
+ SaveTestsToPickle(pickle_path, tests)
+ return tests
+
+def GetTestsFromPickle(pickle_path, test_mtime):
+ if not os.path.exists(pickle_path):
+ raise TestListPickleException('%s does not exist.' % pickle_path)
+ if os.path.getmtime(pickle_path) <= test_mtime:
+ raise TestListPickleException('File is stale: %s' % pickle_path)
+
+ with open(pickle_path, 'r') as f:
+ pickle_data = pickle.load(f)
+ if pickle_data['VERSION'] != _PICKLE_FORMAT_VERSION:
+ raise TestListPickleException('PICKLE_FORMAT_VERSION has changed.')
+ return pickle_data['TEST_METHODS']
+
+
+# TODO(yolandyan): remove this once the test listing from java runner lands
+@instrumentation_tracing.no_tracing
+def _GetTestsFromProguard(jar_path):
+ p = proguard.Dump(jar_path)
+ class_lookup = dict((c['class'], c) for c in p['classes'])
+
+ def is_test_class(c):
+ return c['class'].endswith('Test')
+
+ def is_test_method(m):
+ return m['method'].startswith('test')
+
+ def recursive_class_annotations(c):
+ s = c['superclass']
+ if s in class_lookup:
+ a = recursive_class_annotations(class_lookup[s])
+ else:
+ a = {}
+ a.update(c['annotations'])
+ return a
+
+ def stripped_test_class(c):
+ return {
+ 'class': c['class'],
+ 'annotations': recursive_class_annotations(c),
+ 'methods': [m for m in c['methods'] if is_test_method(m)],
+ 'superclass': c['superclass'],
+ }
+
+ return [stripped_test_class(c) for c in p['classes']
+ if is_test_class(c)]
+
+
+def _GetTestsFromDexdump(test_apk):
+ dump = dexdump.Dump(test_apk)
+ tests = []
+
+ def get_test_methods(methods):
+ return [
+ {
+ 'method': m,
+ # No annotation info is available from dexdump.
+ # Set MediumTest annotation for default.
+ 'annotations': {'MediumTest': None},
+ } for m in methods if m.startswith('test')]
+
+ for package_name, package_info in dump.iteritems():
+ for class_name, class_info in package_info['classes'].iteritems():
+ if class_name.endswith('Test'):
+ tests.append({
+ 'class': '%s.%s' % (package_name, class_name),
+ 'annotations': {},
+ 'methods': get_test_methods(class_info['methods']),
+ 'superclass': class_info['superclass'],
+ })
+ return tests
+
+def SaveTestsToPickle(pickle_path, tests):
+ pickle_data = {
+ 'VERSION': _PICKLE_FORMAT_VERSION,
+ 'TEST_METHODS': tests,
+ }
+ with open(pickle_path, 'w') as pickle_file:
+ pickle.dump(pickle_data, pickle_file)
+
+
+class MissingJUnit4RunnerException(test_exception.TestException):
+ """Raised when JUnit4 runner is not provided or specified in apk manifest"""
+
+ def __init__(self):
+ super(MissingJUnit4RunnerException, self).__init__(
+ 'JUnit4 runner is not provided or specified in test apk manifest.')
+
+
+def GetTestName(test, sep='#'):
+ """Gets the name of the given test.
+
+ Note that this may return the same name for more than one test, e.g. if a
+ test is being run multiple times with different parameters.
+
+ Args:
+ test: the instrumentation test dict.
+ sep: the character(s) that should join the class name and the method name.
+ Returns:
+ The test name as a string.
+ """
+ test_name = '%s%s%s' % (test['class'], sep, test['method'])
+ assert ' *-:' not in test_name, (
+ 'The test name must not contain any of the characters in " *-:". See '
+ 'https://crbug.com/912199')
+ return test_name
+
+
+def GetTestNameWithoutParameterPostfix(
+ test, sep='#', parameterization_sep='__'):
+ """Gets the name of the given JUnit4 test without parameter postfix.
+
+ For most WebView JUnit4 javatests, each test is parameterizatized with
+ "__sandboxed_mode" to run in both non-sandboxed mode and sandboxed mode.
+
+ This function returns the name of the test without parameterization
+ so test filters can match both parameterized and non-parameterized tests.
+
+ Args:
+ test: the instrumentation test dict.
+ sep: the character(s) that should join the class name and the method name.
+ parameterization_sep: the character(s) that seperate method name and method
+ parameterization postfix.
+ Returns:
+ The test name without parameter postfix as a string.
+ """
+ name = GetTestName(test, sep=sep)
+ return name.split(parameterization_sep)[0]
+
+
+def GetUniqueTestName(test, sep='#'):
+ """Gets the unique name of the given test.
+
+ This will include text to disambiguate between tests for which GetTestName
+ would return the same name.
+
+ Args:
+ test: the instrumentation test dict.
+ sep: the character(s) that should join the class name and the method name.
+ Returns:
+ The unique test name as a string.
+ """
+ display_name = GetTestName(test, sep=sep)
+ if test.get('flags', [None])[0]:
+ sanitized_flags = [x.replace('-', '_') for x in test['flags']]
+ display_name = '%s_with_%s' % (display_name, '_'.join(sanitized_flags))
+
+ assert ' *-:' not in display_name, (
+ 'The test name must not contain any of the characters in " *-:". See '
+ 'https://crbug.com/912199')
+
+ return display_name
+
+
+class InstrumentationTestInstance(test_instance.TestInstance):
+
+ def __init__(self, args, data_deps_delegate, error_func):
+ super(InstrumentationTestInstance, self).__init__()
+
+ self._additional_apks = []
+ self._apk_under_test = None
+ self._apk_under_test_incremental_install_json = None
+ self._package_info = None
+ self._suite = None
+ self._test_apk = None
+ self._test_apk_incremental_install_json = None
+ self._test_jar = None
+ self._test_package = None
+ self._junit3_runner_class = None
+ self._junit4_runner_class = None
+ self._junit4_runner_supports_listing = None
+ self._test_support_apk = None
+ self._initializeApkAttributes(args, error_func)
+
+ self._data_deps = None
+ self._data_deps_delegate = None
+ self._runtime_deps_path = None
+ self._initializeDataDependencyAttributes(args, data_deps_delegate)
+
+ self._annotations = None
+ self._excluded_annotations = None
+ self._test_filter = None
+ self._initializeTestFilterAttributes(args)
+
+ self._flags = None
+ self._use_apk_under_test_flags_file = False
+ self._initializeFlagAttributes(args)
+
+ self._driver_apk = None
+ self._driver_package = None
+ self._driver_name = None
+ self._initializeDriverAttributes()
+
+ self._screenshot_dir = None
+ self._timeout_scale = None
+ self._wait_for_java_debugger = None
+ self._initializeTestControlAttributes(args)
+
+ self._coverage_directory = None
+ self._initializeTestCoverageAttributes(args)
+
+ self._store_tombstones = False
+ self._symbolizer = None
+ self._enable_java_deobfuscation = False
+ self._deobfuscator = None
+ self._initializeLogAttributes(args)
+
+ self._edit_shared_prefs = []
+ self._initializeEditPrefsAttributes(args)
+
+ self._replace_system_package = None
+ self._initializeReplaceSystemPackageAttributes(args)
+
+ self._use_webview_provider = None
+ self._initializeUseWebviewProviderAttributes(args)
+
+ self._external_shard_index = args.test_launcher_shard_index
+ self._total_external_shards = args.test_launcher_total_shards
+
+ def _initializeApkAttributes(self, args, error_func):
+ if args.apk_under_test:
+ apk_under_test_path = args.apk_under_test
+ if not args.apk_under_test.endswith('.apk'):
+ apk_under_test_path = os.path.join(
+ constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
+ '%s.apk' % args.apk_under_test)
+
+ # TODO(jbudorick): Move the realpath up to the argument parser once
+ # APK-by-name is no longer supported.
+ apk_under_test_path = os.path.realpath(apk_under_test_path)
+
+ if not os.path.exists(apk_under_test_path):
+ error_func('Unable to find APK under test: %s' % apk_under_test_path)
+
+ self._apk_under_test = apk_helper.ToHelper(apk_under_test_path)
+
+ if args.test_apk.endswith('.apk'):
+ self._suite = os.path.splitext(os.path.basename(args.test_apk))[0]
+ test_apk_path = args.test_apk
+ self._test_apk = apk_helper.ToHelper(args.test_apk)
+ else:
+ self._suite = args.test_apk
+ test_apk_path = os.path.join(
+ constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
+ '%s.apk' % args.test_apk)
+
+ # TODO(jbudorick): Move the realpath up to the argument parser once
+ # APK-by-name is no longer supported.
+ test_apk_path = os.path.realpath(test_apk_path)
+
+ if not os.path.exists(test_apk_path):
+ error_func('Unable to find test APK: %s' % test_apk_path)
+
+ self._test_apk = apk_helper.ToHelper(test_apk_path)
+
+ self._apk_under_test_incremental_install_json = (
+ args.apk_under_test_incremental_install_json)
+ self._test_apk_incremental_install_json = (
+ args.test_apk_incremental_install_json)
+
+ if self._test_apk_incremental_install_json:
+ assert self._suite.endswith('_incremental')
+ self._suite = self._suite[:-len('_incremental')]
+
+ self._test_jar = args.test_jar
+ self._test_support_apk = apk_helper.ToHelper(os.path.join(
+ constants.GetOutDirectory(), constants.SDK_BUILD_TEST_JAVALIB_DIR,
+ '%sSupport.apk' % self._suite))
+
+ if not os.path.exists(self._test_apk.path):
+ error_func('Unable to find test APK: %s' % self._test_apk.path)
+ if not self._test_jar:
+ logging.warning('Test jar not specified. Test runner will not have '
+ 'Java annotation info available. May not handle test '
+ 'timeouts correctly.')
+ elif not os.path.exists(self._test_jar):
+ error_func('Unable to find test JAR: %s' % self._test_jar)
+
+ self._test_package = self._test_apk.GetPackageName()
+ all_instrumentations = self._test_apk.GetAllInstrumentations()
+ all_junit3_runner_classes = [
+ x for x in all_instrumentations if ('0xffffffff' in x.get(
+ 'chromium-junit3', ''))]
+ all_junit4_runner_classes = [
+ x for x in all_instrumentations if ('0xffffffff' not in x.get(
+ 'chromium-junit3', ''))]
+
+ if len(all_junit3_runner_classes) > 1:
+ logging.warning('This test apk has more than one JUnit3 instrumentation')
+ if len(all_junit4_runner_classes) > 1:
+ logging.warning('This test apk has more than one JUnit4 instrumentation')
+
+ self._junit3_runner_class = (
+ all_junit3_runner_classes[0]['android:name']
+ if all_junit3_runner_classes else self.test_apk.GetInstrumentationName())
+
+ self._junit4_runner_class = (
+ all_junit4_runner_classes[0]['android:name']
+ if all_junit4_runner_classes else None)
+
+ if self._junit4_runner_class:
+ if self._test_apk_incremental_install_json:
+ self._junit4_runner_supports_listing = next(
+ (True for x in self._test_apk.GetAllMetadata()
+ if 'real-instr' in x[0] and x[1] in _TEST_LIST_JUNIT4_RUNNERS),
+ False)
+ else:
+ self._junit4_runner_supports_listing = (
+ self._junit4_runner_class in _TEST_LIST_JUNIT4_RUNNERS)
+
+ self._package_info = None
+ if self._apk_under_test:
+ package_under_test = self._apk_under_test.GetPackageName()
+ for package_info in constants.PACKAGE_INFO.itervalues():
+ if package_under_test == package_info.package:
+ self._package_info = package_info
+ break
+ if not self._package_info:
+ logging.warning('Unable to find package info for %s', self._test_package)
+
+ for apk in args.additional_apks:
+ if not os.path.exists(apk):
+ error_func('Unable to find additional APK: %s' % apk)
+ self._additional_apks = (
+ [apk_helper.ToHelper(x) for x in args.additional_apks])
+
+ def _initializeDataDependencyAttributes(self, args, data_deps_delegate):
+ self._data_deps = []
+ self._data_deps_delegate = data_deps_delegate
+ self._runtime_deps_path = args.runtime_deps_path
+
+ if not self._runtime_deps_path:
+ logging.warning('No data dependencies will be pushed.')
+
+ def _initializeTestFilterAttributes(self, args):
+ self._test_filter = test_filter.InitializeFilterFromArgs(args)
+
+ def annotation_element(a):
+ a = a.split('=', 1)
+ return (a[0], a[1] if len(a) == 2 else None)
+
+ if args.annotation_str:
+ self._annotations = [
+ annotation_element(a) for a in args.annotation_str.split(',')]
+ elif not self._test_filter:
+ self._annotations = [
+ annotation_element(a) for a in _DEFAULT_ANNOTATIONS]
+ else:
+ self._annotations = []
+
+ if args.exclude_annotation_str:
+ self._excluded_annotations = [
+ annotation_element(a) for a in args.exclude_annotation_str.split(',')]
+ else:
+ self._excluded_annotations = []
+
+ requested_annotations = set(a[0] for a in self._annotations)
+ if not args.run_disabled:
+ self._excluded_annotations.extend(
+ annotation_element(a) for a in _EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS
+ if a not in requested_annotations)
+
+ def _initializeFlagAttributes(self, args):
+ self._use_apk_under_test_flags_file = args.use_apk_under_test_flags_file
+ self._flags = ['--enable-test-intents']
+ if args.command_line_flags:
+ self._flags.extend(args.command_line_flags)
+ if args.device_flags_file:
+ with open(args.device_flags_file) as device_flags_file:
+ stripped_lines = (l.strip() for l in device_flags_file)
+ self._flags.extend(flag for flag in stripped_lines if flag)
+ if args.strict_mode and args.strict_mode != 'off':
+ self._flags.append('--strict-mode=' + args.strict_mode)
+
+ def _initializeDriverAttributes(self):
+ self._driver_apk = os.path.join(
+ constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
+ 'OnDeviceInstrumentationDriver.apk')
+ if os.path.exists(self._driver_apk):
+ driver_apk = apk_helper.ApkHelper(self._driver_apk)
+ self._driver_package = driver_apk.GetPackageName()
+ self._driver_name = driver_apk.GetInstrumentationName()
+ else:
+ self._driver_apk = None
+
+ def _initializeTestControlAttributes(self, args):
+ self._screenshot_dir = args.screenshot_dir
+ self._timeout_scale = args.timeout_scale or 1
+ self._wait_for_java_debugger = args.wait_for_java_debugger
+
+ def _initializeTestCoverageAttributes(self, args):
+ self._coverage_directory = args.coverage_dir
+
+ def _initializeLogAttributes(self, args):
+ self._enable_java_deobfuscation = args.enable_java_deobfuscation
+ self._store_tombstones = args.store_tombstones
+ self._symbolizer = stack_symbolizer.Symbolizer(
+ self.apk_under_test.path if self.apk_under_test else None)
+
+ def _initializeEditPrefsAttributes(self, args):
+ if not hasattr(args, 'shared_prefs_file') or not args.shared_prefs_file:
+ return
+ if not isinstance(args.shared_prefs_file, str):
+ logging.warning("Given non-string for a filepath")
+ return
+ self._edit_shared_prefs = shared_preference_utils.ExtractSettingsFromJson(
+ args.shared_prefs_file)
+
+ def _initializeReplaceSystemPackageAttributes(self, args):
+ if (not hasattr(args, 'replace_system_package')
+ or not args.replace_system_package):
+ return
+ self._replace_system_package = args.replace_system_package
+
+ def _initializeUseWebviewProviderAttributes(self, args):
+ if (not hasattr(args, 'use_webview_provider')
+ or not args.use_webview_provider):
+ return
+ self._use_webview_provider = args.use_webview_provider
+
+ @property
+ def additional_apks(self):
+ return self._additional_apks
+
+ @property
+ def apk_under_test(self):
+ return self._apk_under_test
+
+ @property
+ def apk_under_test_incremental_install_json(self):
+ return self._apk_under_test_incremental_install_json
+
+ @property
+ def coverage_directory(self):
+ return self._coverage_directory
+
+ @property
+ def driver_apk(self):
+ return self._driver_apk
+
+ @property
+ def driver_package(self):
+ return self._driver_package
+
+ @property
+ def driver_name(self):
+ return self._driver_name
+
+ @property
+ def edit_shared_prefs(self):
+ return self._edit_shared_prefs
+
+ @property
+ def external_shard_index(self):
+ return self._external_shard_index
+
+ @property
+ def flags(self):
+ return self._flags
+
+ @property
+ def junit3_runner_class(self):
+ return self._junit3_runner_class
+
+ @property
+ def junit4_runner_class(self):
+ return self._junit4_runner_class
+
+ @property
+ def junit4_runner_supports_listing(self):
+ return self._junit4_runner_supports_listing
+
+ @property
+ def package_info(self):
+ return self._package_info
+
+ @property
+ def replace_system_package(self):
+ return self._replace_system_package
+
+ @property
+ def use_webview_provider(self):
+ return self._use_webview_provider
+
+ @property
+ def screenshot_dir(self):
+ return self._screenshot_dir
+
+ @property
+ def store_tombstones(self):
+ return self._store_tombstones
+
+ @property
+ def suite(self):
+ return self._suite
+
+ @property
+ def symbolizer(self):
+ return self._symbolizer
+
+ @property
+ def test_apk(self):
+ return self._test_apk
+
+ @property
+ def test_apk_incremental_install_json(self):
+ return self._test_apk_incremental_install_json
+
+ @property
+ def test_jar(self):
+ return self._test_jar
+
+ @property
+ def test_support_apk(self):
+ return self._test_support_apk
+
+ @property
+ def test_package(self):
+ return self._test_package
+
+ @property
+ def timeout_scale(self):
+ return self._timeout_scale
+
+ @property
+ def total_external_shards(self):
+ return self._total_external_shards
+
+ @property
+ def use_apk_under_test_flags_file(self):
+ return self._use_apk_under_test_flags_file
+
+ @property
+ def wait_for_java_debugger(self):
+ return self._wait_for_java_debugger
+
+ #override
+ def TestType(self):
+ return 'instrumentation'
+
+ #override
+ def GetPreferredAbis(self):
+ ret = self._test_apk.GetAbis()
+ if not ret and self._apk_under_test:
+ ret = self._apk_under_test.GetAbis()
+ return ret
+
+ #override
+ def SetUp(self):
+ self._data_deps.extend(
+ self._data_deps_delegate(self._runtime_deps_path))
+ if self._enable_java_deobfuscation:
+ self._deobfuscator = deobfuscator.DeobfuscatorPool(
+ self.test_apk.path + '.mapping')
+
+ def GetDataDependencies(self):
+ return self._data_deps
+
+ def GetTests(self):
+ if self.test_jar:
+ raw_tests = GetAllTestsFromJar(self.test_jar)
+ else:
+ raw_tests = GetAllTestsFromApk(self.test_apk.path)
+ return self.ProcessRawTests(raw_tests)
+
+ def MaybeDeobfuscateLines(self, lines):
+ if not self._deobfuscator:
+ return lines
+ return self._deobfuscator.TransformLines(lines)
+
+ def ProcessRawTests(self, raw_tests):
+ inflated_tests = self._ParameterizeTestsWithFlags(
+ self._InflateTests(raw_tests))
+ if self._junit4_runner_class is None and any(
+ t['is_junit4'] for t in inflated_tests):
+ raise MissingJUnit4RunnerException()
+ filtered_tests = FilterTests(
+ inflated_tests, self._test_filter, self._annotations,
+ self._excluded_annotations)
+ if self._test_filter and not filtered_tests:
+ for t in inflated_tests:
+ logging.debug(' %s', GetUniqueTestName(t))
+ logging.warning('Unmatched Filter: %s', self._test_filter)
+ return filtered_tests
+
+ # pylint: disable=no-self-use
+ def _InflateTests(self, tests):
+ inflated_tests = []
+ for c in tests:
+ for m in c['methods']:
+ a = dict(c['annotations'])
+ a.update(m['annotations'])
+ inflated_tests.append({
+ 'class': c['class'],
+ 'method': m['method'],
+ 'annotations': a,
+ 'is_junit4': c['superclass'] == 'java.lang.Object'
+ })
+ return inflated_tests
+
+ def _ParameterizeTestsWithFlags(self, tests):
+ new_tests = []
+ for t in tests:
+ annotations = t['annotations']
+ parameters = None
+ if (annotations.get(_COMMANDLINE_PARAMETERIZATION)
+ and _SKIP_PARAMETERIZATION not in annotations):
+ parameters = annotations[_COMMANDLINE_PARAMETERIZATION]['value']
+ if parameters:
+ t['flags'] = [parameters[0]]
+ for p in parameters[1:]:
+ parameterized_t = copy.copy(t)
+ parameterized_t['flags'] = ['--%s' % p]
+ new_tests.append(parameterized_t)
+ return tests + new_tests
+
+ def GetDriverEnvironmentVars(
+ self, test_list=None, test_list_file_path=None):
+ env = {
+ _EXTRA_DRIVER_TARGET_PACKAGE: self.test_package,
+ _EXTRA_DRIVER_TARGET_CLASS: self.junit3_runner_class,
+ _EXTRA_TIMEOUT_SCALE: self._timeout_scale,
+ }
+
+ if test_list:
+ env[_EXTRA_DRIVER_TEST_LIST] = ','.join(test_list)
+
+ if test_list_file_path:
+ env[_EXTRA_DRIVER_TEST_LIST_FILE] = (
+ os.path.basename(test_list_file_path))
+
+ return env
+
+ @staticmethod
+ def ParseAmInstrumentRawOutput(raw_output):
+ return ParseAmInstrumentRawOutput(raw_output)
+
+ @staticmethod
+ def GenerateTestResults(
+ result_code, result_bundle, statuses, start_ms, duration_ms,
+ device_abi, symbolizer):
+ return GenerateTestResults(result_code, result_bundle, statuses,
+ start_ms, duration_ms, device_abi, symbolizer)
+
+ #override
+ def TearDown(self):
+ self.symbolizer.CleanUp()
+ if self._deobfuscator:
+ self._deobfuscator.Close()
+ self._deobfuscator = None
diff --git a/deps/v8/build/android/pylib/instrumentation/instrumentation_test_instance_test.py b/deps/v8/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
new file mode 100755
index 0000000000..78446d1527
--- /dev/null
+++ b/deps/v8/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
@@ -0,0 +1,972 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for instrumentation_test_instance."""
+
+# pylint: disable=protected-access
+
+import collections
+import tempfile
+import unittest
+
+from pylib.base import base_test_result
+from pylib.constants import host_paths
+from pylib.instrumentation import instrumentation_test_instance
+
+with host_paths.SysPath(host_paths.PYMOCK_PATH):
+ import mock # pylint: disable=import-error
+
+_INSTRUMENTATION_TEST_INSTANCE_PATH = (
+ 'pylib.instrumentation.instrumentation_test_instance.%s')
+
+class InstrumentationTestInstanceTest(unittest.TestCase):
+
+ def setUp(self):
+ options = mock.Mock()
+ options.tool = ''
+
+ @staticmethod
+ def createTestInstance():
+ c = _INSTRUMENTATION_TEST_INSTANCE_PATH % 'InstrumentationTestInstance'
+ with mock.patch('%s._initializeApkAttributes' % c), (
+ mock.patch('%s._initializeDataDependencyAttributes' % c)), (
+ mock.patch('%s._initializeTestFilterAttributes' % c)), (
+ mock.patch('%s._initializeFlagAttributes' % c)), (
+ mock.patch('%s._initializeDriverAttributes' % c)), (
+ mock.patch('%s._initializeTestControlAttributes' % c)), (
+ mock.patch('%s._initializeTestCoverageAttributes' % c)):
+ return instrumentation_test_instance.InstrumentationTestInstance(
+ mock.MagicMock(), mock.MagicMock(), lambda s: None)
+
+ _FlagAttributesArgs = collections.namedtuple(
+ '_FlagAttributesArgs',
+ [
+ 'command_line_flags',
+ 'device_flags_file',
+ 'strict_mode',
+ 'use_apk_under_test_flags_file'
+ ])
+
+ def createFlagAttributesArgs(
+ self, command_line_flags=None, device_flags_file=None,
+ strict_mode=None, use_apk_under_test_flags_file=False):
+ return self._FlagAttributesArgs(
+ command_line_flags, device_flags_file, strict_mode,
+ use_apk_under_test_flags_file)
+
+ def test_initializeFlagAttributes_commandLineFlags(self):
+ o = self.createTestInstance()
+ args = self.createFlagAttributesArgs(command_line_flags=['--foo', '--bar'])
+ o._initializeFlagAttributes(args)
+ self.assertEquals(o._flags, ['--enable-test-intents', '--foo', '--bar'])
+
+ def test_initializeFlagAttributes_deviceFlagsFile(self):
+ o = self.createTestInstance()
+ with tempfile.NamedTemporaryFile() as flags_file:
+ flags_file.write('\n'.join(['--foo', '--bar']))
+ flags_file.flush()
+
+ args = self.createFlagAttributesArgs(device_flags_file=flags_file.name)
+ o._initializeFlagAttributes(args)
+ self.assertEquals(o._flags, ['--enable-test-intents', '--foo', '--bar'])
+
+ def test_initializeFlagAttributes_strictModeOn(self):
+ o = self.createTestInstance()
+ args = self.createFlagAttributesArgs(strict_mode='on')
+ o._initializeFlagAttributes(args)
+ self.assertEquals(o._flags, ['--enable-test-intents', '--strict-mode=on'])
+
+ def test_initializeFlagAttributes_strictModeOff(self):
+ o = self.createTestInstance()
+ args = self.createFlagAttributesArgs(strict_mode='off')
+ o._initializeFlagAttributes(args)
+ self.assertEquals(o._flags, ['--enable-test-intents'])
+
+ def testGetTests_noFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'method': 'testMethod1',
+ 'is_junit4': True,
+ },
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'MediumTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'method': 'testMethod2',
+ 'is_junit4': True,
+ },
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Bar']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest2',
+ 'method': 'testMethod1',
+ 'is_junit4': True,
+ },
+ ]
+
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_simpleGtestFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._test_filter = 'org.chromium.test.SampleTest.testMethod1'
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_simpleGtestUnqualifiedNameFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._test_filter = 'SampleTest.testMethod1'
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_parameterizedTestGtestFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1__sandboxed_mode',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'method': 'testMethod1',
+ 'is_junit4': True,
+ },
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'method': 'testMethod1__sandboxed_mode',
+ 'is_junit4': True,
+ },
+ ]
+
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ o._test_filter = 'org.chromium.test.SampleTest.testMethod1'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_wildcardGtestFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Bar']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest2',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._test_filter = 'org.chromium.test.SampleTest2.*'
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_negativeGtestFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'MediumTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': True,
+ 'method': 'testMethod2',
+ },
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Bar']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest2',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._test_filter = '*-org.chromium.test.SampleTest.testMethod1'
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_annotationFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Bar']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest2',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._annotations = [('SmallTest', None)]
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_excludedAnnotationFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'junit.framework.TestCase',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'junit.framework.TestCase',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'MediumTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': False,
+ 'method': 'testMethod2',
+ },
+ ]
+
+ o._excluded_annotations = [('SmallTest', None)]
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_annotationSimpleValueFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'junit.framework.TestCase',
+ 'methods': [
+ {
+ 'annotations': {
+ 'SmallTest': None,
+ 'TestValue': '1',
+ },
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {
+ 'MediumTest': None,
+ 'TestValue': '2',
+ },
+ 'method': 'testMethod2',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'junit.framework.TestCase',
+ 'methods': [
+ {
+ 'annotations': {
+ 'SmallTest': None,
+ 'TestValue': '3',
+ },
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Foo']},
+ 'SmallTest': None,
+ 'TestValue': '1',
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': False,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._annotations = [('TestValue', '1')]
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTests_annotationDictValueFilter(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Bar']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest2',
+ 'is_junit4': True,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._annotations = [('Feature', 'Bar')]
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGetTestName(self):
+ test = {
+ 'annotations': {
+ 'RunWith': {'value': 'class J4Runner'},
+ 'SmallTest': {},
+ 'Test': {'expected': 'class org.junit.Test$None',
+ 'timeout': '0'},
+ 'UiThreadTest': {}},
+ 'class': 'org.chromium.TestA',
+ 'is_junit4': True,
+ 'method': 'testSimple'}
+ unqualified_class_test = {
+ 'class': test['class'].split('.')[-1],
+ 'method': test['method']
+ }
+
+ self.assertEquals(
+ instrumentation_test_instance.GetTestName(test, sep='.'),
+ 'org.chromium.TestA.testSimple')
+ self.assertEquals(
+ instrumentation_test_instance.GetTestName(
+ unqualified_class_test, sep='.'),
+ 'TestA.testSimple')
+
+ def testGetUniqueTestName(self):
+ test = {
+ 'annotations': {
+ 'RunWith': {'value': 'class J4Runner'},
+ 'SmallTest': {},
+ 'Test': {'expected': 'class org.junit.Test$None', 'timeout': '0'},
+ 'UiThreadTest': {}},
+ 'class': 'org.chromium.TestA',
+ 'flags': ['enable_features=abc'],
+ 'is_junit4': True,
+ 'method': 'testSimple'}
+ self.assertEquals(
+ instrumentation_test_instance.GetUniqueTestName(
+ test, sep='.'),
+ 'org.chromium.TestA.testSimple_with_enable_features=abc')
+
+ def testGetTestNameWithoutParameterPostfix(self):
+ test = {
+ 'annotations': {
+ 'RunWith': {'value': 'class J4Runner'},
+ 'SmallTest': {},
+ 'Test': {'expected': 'class org.junit.Test$None', 'timeout': '0'},
+ 'UiThreadTest': {}},
+ 'class': 'org.chromium.TestA__sandbox_mode',
+ 'flags': 'enable_features=abc',
+ 'is_junit4': True,
+ 'method': 'testSimple'}
+ unqualified_class_test = {
+ 'class': test['class'].split('.')[-1],
+ 'method': test['method']
+ }
+ self.assertEquals(
+ instrumentation_test_instance.GetTestNameWithoutParameterPostfix(
+ test, sep='.'),
+ 'org.chromium.TestA')
+ self.assertEquals(
+ instrumentation_test_instance.GetTestNameWithoutParameterPostfix(
+ unqualified_class_test, sep='.'),
+ 'TestA')
+
+ def testGetTests_multipleAnnotationValuesRequested(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'Feature': {'value': ['Foo']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'junit.framework.TestCase',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Baz']},
+ 'MediumTest': None,
+ },
+ 'method': 'testMethod2',
+ },
+ ],
+ },
+ {
+ 'annotations': {'Feature': {'value': ['Bar']}},
+ 'class': 'org.chromium.test.SampleTest2',
+ 'superclass': 'junit.framework.TestCase',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Baz']},
+ 'MediumTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': False,
+ 'method': 'testMethod2',
+ },
+ {
+ 'annotations': {
+ 'Feature': {'value': ['Bar']},
+ 'SmallTest': None,
+ },
+ 'class': 'org.chromium.test.SampleTest2',
+ 'is_junit4': False,
+ 'method': 'testMethod1',
+ },
+ ]
+
+ o._annotations = [('Feature', 'Bar'), ('Feature', 'Baz')]
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testGenerateTestResults_noStatus(self):
+ results = instrumentation_test_instance.GenerateTestResults(
+ None, None, [], 0, 1000, None, None)
+ self.assertEqual([], results)
+
+ def testGenerateTestResults_testPassed(self):
+ statuses = [
+ (1, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ (0, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ ]
+ results = instrumentation_test_instance.GenerateTestResults(
+ None, None, statuses, 0, 1000, None, None)
+ self.assertEqual(1, len(results))
+ self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
+
+ def testGenerateTestResults_testSkipped_true(self):
+ statuses = [
+ (1, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ (0, {
+ 'test_skipped': 'true',
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ (0, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ ]
+ results = instrumentation_test_instance.GenerateTestResults(
+ None, None, statuses, 0, 1000, None, None)
+ self.assertEqual(1, len(results))
+ self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType())
+
+ def testGenerateTestResults_testSkipped_false(self):
+ statuses = [
+ (1, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ (0, {
+ 'test_skipped': 'false',
+ }),
+ (0, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ ]
+ results = instrumentation_test_instance.GenerateTestResults(
+ None, None, statuses, 0, 1000, None, None)
+ self.assertEqual(1, len(results))
+ self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
+
+ def testGenerateTestResults_testFailed(self):
+ statuses = [
+ (1, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ (-2, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ ]
+ results = instrumentation_test_instance.GenerateTestResults(
+ None, None, statuses, 0, 1000, None, None)
+ self.assertEqual(1, len(results))
+ self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType())
+
+ def testGenerateTestResults_testUnknownException(self):
+ stacktrace = 'long\nstacktrace'
+ statuses = [
+ (1, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ (-1, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ 'stack': stacktrace,
+ }),
+ ]
+ results = instrumentation_test_instance.GenerateTestResults(
+ None, None, statuses, 0, 1000, None, None)
+ self.assertEqual(1, len(results))
+ self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType())
+ self.assertEqual(stacktrace, results[0].GetLog())
+
+ def testGenerateJUnitTestResults_testSkipped_true(self):
+ statuses = [
+ (1, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ (-3, {
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
+ }),
+ ]
+ results = instrumentation_test_instance.GenerateTestResults(
+ None, None, statuses, 0, 1000, None, None)
+ self.assertEqual(1, len(results))
+ self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType())
+
+ def testCommandLineParameterization(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'CommandLineParameter': {
+ 'value': ['', 'enable-features=abc']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {'SmallTest': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'CommandLineParameter': {'value': ['', 'enable-features=abc']},
+ 'SmallTest': None},
+ 'class': 'org.chromium.test.SampleTest',
+ 'flags': [''],
+ 'is_junit4': True,
+ 'method': 'testMethod1'},
+ {
+ 'annotations': {
+ 'CommandLineParameter': {'value': ['', 'enable-features=abc']},
+ 'MediumTest': None},
+ 'class': 'org.chromium.test.SampleTest',
+ 'flags': [''],
+ 'is_junit4': True,
+ 'method': 'testMethod2'},
+ {
+ 'annotations': {
+ 'CommandLineParameter': {'value': ['', 'enable-features=abc']},
+ 'SmallTest': None},
+ 'class': 'org.chromium.test.SampleTest',
+ 'flags': ['--enable-features=abc'],
+ 'is_junit4': True,
+ 'method': 'testMethod1'},
+ {
+ 'annotations': {
+ 'CommandLineParameter': {'value': ['', 'enable-features=abc']},
+ 'MediumTest': None},
+ 'class': 'org.chromium.test.SampleTest',
+ 'flags': ['--enable-features=abc'],
+ 'is_junit4': True,
+ 'method': 'testMethod2'}]
+
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+ self.assertEquals(actual_tests, expected_tests)
+
+ def testCommandLineParameterization_skipped(self):
+ o = self.createTestInstance()
+ raw_tests = [
+ {
+ 'annotations': {'CommandLineParameter': {
+ 'value': ['', 'enable-features=abc']}},
+ 'class': 'org.chromium.test.SampleTest',
+ 'superclass': 'java.lang.Object',
+ 'methods': [
+ {
+ 'annotations': {
+ 'SmallTest': None,
+ 'SkipCommandLineParameterization': None},
+ 'method': 'testMethod1',
+ },
+ {
+ 'annotations': {'MediumTest': None},
+ 'method': 'testMethod2',
+ },
+ ],
+ }
+ ]
+
+ expected_tests = [
+ {
+ 'annotations': {
+ 'CommandLineParameter': {'value': ['', 'enable-features=abc']},
+ 'SkipCommandLineParameterization': None,
+ 'SmallTest': None},
+ 'class': 'org.chromium.test.SampleTest',
+ 'is_junit4': True,
+ 'method': 'testMethod1'},
+ {
+ 'annotations': {
+ 'CommandLineParameter': {'value': ['', 'enable-features=abc']},
+ 'MediumTest': None},
+ 'class': 'org.chromium.test.SampleTest',
+ 'flags': [''],
+ 'is_junit4': True,
+ 'method': 'testMethod2'},
+ {
+ 'annotations': {
+ 'CommandLineParameter': {'value': ['', 'enable-features=abc']},
+ 'MediumTest': None},
+ 'class': 'org.chromium.test.SampleTest',
+ 'flags': ['--enable-features=abc'],
+ 'is_junit4': True,
+ 'method': 'testMethod2'}]
+
+ o._test_jar = 'path/to/test.jar'
+ o._junit4_runner_class = 'J4Runner'
+ actual_tests = o.ProcessRawTests(raw_tests)
+ self.assertEquals(actual_tests, expected_tests)
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/deps/v8/build/android/pylib/instrumentation/json_perf_parser.py b/deps/v8/build/android/pylib/instrumentation/json_perf_parser.py
new file mode 100644
index 0000000000..c647890ba3
--- /dev/null
+++ b/deps/v8/build/android/pylib/instrumentation/json_perf_parser.py
@@ -0,0 +1,161 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""A helper module for parsing JSON objects from perf tests results."""
+
+import json
+
+
+def GetAverageRunInfo(json_data, name):
+ """Summarizes TraceEvent JSON data for performance metrics.
+
+ Example JSON Inputs (More tags can be added but these are required):
+ Measuring Duration:
+ [
+ { "cat": "Java",
+ "ts": 10000000000,
+ "ph": "S",
+ "name": "TestTrace"
+ },
+ { "cat": "Java",
+ "ts": 10000004000,
+ "ph": "F",
+ "name": "TestTrace"
+ },
+ ...
+ ]
+
+ Measuring Call Frequency (FPS):
+ [
+ { "cat": "Java",
+ "ts": 10000000000,
+ "ph": "I",
+ "name": "TestTraceFPS"
+ },
+ { "cat": "Java",
+ "ts": 10000004000,
+ "ph": "I",
+ "name": "TestTraceFPS"
+ },
+ ...
+ ]
+
+ Args:
+ json_data: A list of dictonaries each representing a JSON object.
+ name: The 'name' tag to filter on in the JSON file.
+
+ Returns:
+ A dictionary of result data with the following tags:
+ min: The minimum value tracked.
+ max: The maximum value tracked.
+ average: The average of all the values tracked.
+ count: The number of times the category/name pair was tracked.
+ type: The type of tracking ('Instant' for instant tags and 'Span' for
+ begin/end tags.
+ category: The passed in category filter.
+ name: The passed in name filter.
+ data_points: A list of all of the times used to generate this data.
+ units: The units for the values being reported.
+
+ Raises:
+ Exception: if entry contains invalid data.
+ """
+
+ def EntryFilter(entry):
+ return entry['cat'] == 'Java' and entry['name'] == name
+ filtered_entries = [j for j in json_data if EntryFilter(j)]
+
+ result = {}
+
+ result['min'] = -1
+ result['max'] = -1
+ result['average'] = 0
+ result['count'] = 0
+ result['type'] = 'Unknown'
+ result['category'] = 'Java'
+ result['name'] = name
+ result['data_points'] = []
+ result['units'] = ''
+
+ total_sum = 0
+
+ last_val = 0
+ val_type = None
+ for entry in filtered_entries:
+ if not val_type:
+ if 'mem' in entry:
+ val_type = 'mem'
+
+ def GetVal(entry):
+ return entry['mem']
+
+ result['units'] = 'kb'
+ elif 'ts' in entry:
+ val_type = 'ts'
+
+ def GetVal(entry):
+ return float(entry['ts']) / 1000.0
+
+ result['units'] = 'ms'
+ else:
+ raise Exception('Entry did not contain valid value info: %s' % entry)
+
+ if not val_type in entry:
+ raise Exception('Entry did not contain expected value type "%s" '
+ 'information: %s' % (val_type, entry))
+ val = GetVal(entry)
+ if (entry['ph'] == 'S' and
+ (result['type'] == 'Unknown' or result['type'] == 'Span')):
+ result['type'] = 'Span'
+ last_val = val
+ elif ((entry['ph'] == 'F' and result['type'] == 'Span') or
+ (entry['ph'] == 'I' and (result['type'] == 'Unknown' or
+ result['type'] == 'Instant'))):
+ if last_val > 0:
+ delta = val - last_val
+ if result['min'] == -1 or result['min'] > delta:
+ result['min'] = delta
+ if result['max'] == -1 or result['max'] < delta:
+ result['max'] = delta
+ total_sum += delta
+ result['count'] += 1
+ result['data_points'].append(delta)
+ if entry['ph'] == 'I':
+ result['type'] = 'Instant'
+ last_val = val
+ if result['count'] > 0:
+ result['average'] = total_sum / result['count']
+
+ return result
+
+
+def GetAverageRunInfoFromJSONString(json_string, name):
+ """Returns the results from GetAverageRunInfo using a JSON string.
+
+ Args:
+ json_string: The string containing JSON.
+ name: The 'name' tag to filter on in the JSON file.
+
+ Returns:
+ See GetAverageRunInfo Returns section.
+ """
+ return GetAverageRunInfo(json.loads(json_string), name)
+
+
+def GetAverageRunInfoFromFile(json_file, name):
+ """Returns the results from GetAverageRunInfo using a JSON file.
+
+ Args:
+ json_file: The path to a JSON file.
+ name: The 'name' tag to filter on in the JSON file.
+
+ Returns:
+ See GetAverageRunInfo Returns section.
+ """
+ with open(json_file, 'r') as f:
+ data = f.read()
+ perf = json.loads(data)
+
+ return GetAverageRunInfo(perf, name)
diff --git a/deps/v8/build/android/pylib/instrumentation/render_test.html.jinja b/deps/v8/build/android/pylib/instrumentation/render_test.html.jinja
new file mode 100644
index 0000000000..81b85b78e3
--- /dev/null
+++ b/deps/v8/build/android/pylib/instrumentation/render_test.html.jinja
@@ -0,0 +1,40 @@
+<html>
+<head>
+ <title>{{ test_name }}</title>
+ <script>
+ function toggleZoom() {
+ for (const img of document.getElementsByTagName("img")) {
+ if (img.hasAttribute('style')) {
+ img.removeAttribute('style');
+ } else {
+ img.style.width = '100%';
+ }
+ }
+ }
+ </script>
+</head>
+<body>
+ <a href="https://cs.chromium.org/search/?q={{ test_name }}&m=100&type=cs">Link to Golden (in repo)</a><br />
+ <a download="{{ test_name }}" href="{{ failure_link }}">Download Failure Image (right click and 'Save link as')</a>
+ <table>
+ <thead>
+ <tr>
+ <th>Failure</th>
+ <th>Golden</th>
+ <th>Diff</th>
+ </tr>
+ </thead>
+ <tbody style="vertical-align: top">
+ <tr onclick="toggleZoom()">
+ <td><img src="{{ failure_link }}" style="width: 100%" /></td>
+ {% if golden_link %}
+ <td><img src="{{ golden_link }}" style="width: 100%" /></td>
+ <td><img src="{{ diff_link }}" style="width: 100%" /></td>
+ {% else %}
+ <td>No Golden Image.</td>
+ {% endif %}
+ </tr>
+ </tbody>
+ </table>
+</body>
+</html>
diff --git a/deps/v8/build/android/pylib/instrumentation/test_result.py b/deps/v8/build/android/pylib/instrumentation/test_result.py
new file mode 100644
index 0000000000..24e80a8e5f
--- /dev/null
+++ b/deps/v8/build/android/pylib/instrumentation/test_result.py
@@ -0,0 +1,30 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import base_test_result
+
+
+class InstrumentationTestResult(base_test_result.BaseTestResult):
+ """Result information for a single instrumentation test."""
+
+ def __init__(self, full_name, test_type, start_date, dur, log=''):
+ """Construct an InstrumentationTestResult object.
+
+ Args:
+ full_name: Full name of the test.
+ test_type: Type of the test result as defined in ResultType.
+ start_date: Date in milliseconds when the test began running.
+ dur: Duration of the test run in milliseconds.
+ log: A string listing any errors.
+ """
+ super(InstrumentationTestResult, self).__init__(
+ full_name, test_type, dur, log)
+ name_pieces = full_name.rsplit('#')
+ if len(name_pieces) > 1:
+ self._test_name = name_pieces[1]
+ self._class_name = name_pieces[0]
+ else:
+ self._class_name = full_name
+ self._test_name = full_name
+ self._start_date = start_date
diff --git a/deps/v8/build/android/pylib/junit/__init__.py b/deps/v8/build/android/pylib/junit/__init__.py
new file mode 100644
index 0000000000..4d6aabb953
--- /dev/null
+++ b/deps/v8/build/android/pylib/junit/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/junit/junit_test_instance.py b/deps/v8/build/android/pylib/junit/junit_test_instance.py
new file mode 100644
index 0000000000..f258cbd7bb
--- /dev/null
+++ b/deps/v8/build/android/pylib/junit/junit_test_instance.py
@@ -0,0 +1,80 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import test_instance
+from pylib.utils import test_filter
+
+
+class JunitTestInstance(test_instance.TestInstance):
+
+ def __init__(self, args, _):
+ super(JunitTestInstance, self).__init__()
+
+ self._android_manifest_path = args.android_manifest_path
+ self._coverage_dir = args.coverage_dir
+ self._debug_socket = args.debug_socket
+ self._jacoco = args.jacoco
+ self._package_filter = args.package_filter
+ self._package_name = args.package_name
+ self._resource_zips = args.resource_zips
+ self._robolectric_runtime_deps_dir = args.robolectric_runtime_deps_dir
+ self._runner_filter = args.runner_filter
+ self._test_filter = test_filter.InitializeFilterFromArgs(args)
+ self._test_suite = args.test_suite
+
+ #override
+ def TestType(self):
+ return 'junit'
+
+ #override
+ def SetUp(self):
+ pass
+
+ #override
+ def TearDown(self):
+ pass
+
+ @property
+ def android_manifest_path(self):
+ return self._android_manifest_path
+
+ @property
+ def coverage_dir(self):
+ return self._coverage_dir
+
+ @property
+ def jacoco(self):
+ return self._jacoco
+
+ @property
+ def debug_socket(self):
+ return self._debug_socket
+
+ @property
+ def package_filter(self):
+ return self._package_filter
+
+ @property
+ def package_name(self):
+ return self._package_name
+
+ @property
+ def resource_zips(self):
+ return self._resource_zips
+
+ @property
+ def robolectric_runtime_deps_dir(self):
+ return self._robolectric_runtime_deps_dir
+
+ @property
+ def runner_filter(self):
+ return self._runner_filter
+
+ @property
+ def test_filter(self):
+ return self._test_filter
+
+ @property
+ def suite(self):
+ return self._test_suite
diff --git a/deps/v8/build/android/pylib/linker/__init__.py b/deps/v8/build/android/pylib/linker/__init__.py
new file mode 100644
index 0000000000..9228df89b0
--- /dev/null
+++ b/deps/v8/build/android/pylib/linker/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/linker/linker_test_instance.py b/deps/v8/build/android/pylib/linker/linker_test_instance.py
new file mode 100644
index 0000000000..5f19db9678
--- /dev/null
+++ b/deps/v8/build/android/pylib/linker/linker_test_instance.py
@@ -0,0 +1,51 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import test_instance
+from pylib.constants import host_paths
+from pylib.linker import test_case
+from pylib.utils import test_filter
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+ import unittest_util
+
+
+class LinkerTestInstance(test_instance.TestInstance):
+
+ def __init__(self, args):
+ super(LinkerTestInstance, self).__init__()
+ self._test_apk = args.test_apk
+ self._test_filter = test_filter.InitializeFilterFromArgs(args)
+
+ @property
+ def test_apk(self):
+ return self._test_apk
+
+ @property
+ def test_filter(self):
+ return self._test_filter
+
+ def GetTests(self):
+ tests = [
+ test_case.LinkerSharedRelroTest(is_low_memory=False),
+ test_case.LinkerSharedRelroTest(is_low_memory=True)
+ ]
+
+ if self._test_filter:
+ filtered_names = unittest_util.FilterTestNames(
+ (t.qualified_name for t in tests), self._test_filter)
+ tests = [
+ t for t in tests
+ if t.qualified_name in filtered_names]
+
+ return tests
+
+ def SetUp(self):
+ pass
+
+ def TearDown(self):
+ pass
+
+ def TestType(self):
+ return 'linker'
diff --git a/deps/v8/build/android/pylib/linker/test_case.py b/deps/v8/build/android/pylib/linker/test_case.py
new file mode 100644
index 0000000000..871da89add
--- /dev/null
+++ b/deps/v8/build/android/pylib/linker/test_case.py
@@ -0,0 +1,215 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base class for linker-specific test cases.
+
+ The custom dynamic linker can only be tested through a custom test case
+ for various technical reasons:
+
+ - It's an 'invisible feature', i.e. it doesn't expose a new API or
+ behaviour, all it does is save RAM when loading native libraries.
+
+ - Checking that it works correctly requires several things that do not
+ fit the existing GTest-based and instrumentation-based tests:
+
+ - Native test code needs to be run in both the browser and renderer
+ process at the same time just after loading native libraries, in
+ a completely asynchronous way.
+
+ - Each test case requires restarting a whole new application process
+ with a different command-line.
+
+ - Enabling test support in the Linker code requires building a special
+ APK with a flag to activate special test-only support code in the
+ Linker code itself.
+
+ Host-driven tests have also been tried, but since they're really
+ sub-classes of instrumentation tests, they didn't work well either.
+
+ To build and run the linker tests, do the following:
+
+ ninja -C out/Debug chromium_linker_test_apk
+ out/Debug/bin/run_chromium_linker_test_apk
+
+"""
+# pylint: disable=R0201
+
+import logging
+import re
+
+from devil.android import device_errors
+from devil.android.sdk import intent
+from pylib.base import base_test_result
+
+
+ResultType = base_test_result.ResultType
+
+_PACKAGE_NAME = 'org.chromium.chromium_linker_test_apk'
+_ACTIVITY_NAME = '.ChromiumLinkerTestActivity'
+_COMMAND_LINE_FILE = '/data/local/tmp/chromium-linker-test-command-line'
+
+# Logcat filters used during each test. Only the 'chromium' one is really
+# needed, but the logs are added to the TestResult in case of error, and
+# it is handy to have others as well when troubleshooting.
+_LOGCAT_FILTERS = ['*:s', 'chromium:v', 'cr_chromium:v',
+ 'cr_ChromiumAndroidLinker:v', 'cr_LibraryLoader:v',
+ 'cr_LinkerTest:v']
+#_LOGCAT_FILTERS = ['*:v'] ## DEBUG
+
+# Regular expression used to match status lines in logcat.
+_RE_BROWSER_STATUS_LINE = re.compile(r' BROWSER_LINKER_TEST: (FAIL|SUCCESS)$')
+_RE_RENDERER_STATUS_LINE = re.compile(r' RENDERER_LINKER_TEST: (FAIL|SUCCESS)$')
+
+def _StartActivityAndWaitForLinkerTestStatus(device, timeout):
+ """Force-start an activity and wait up to |timeout| seconds until the full
+ linker test status lines appear in the logcat, recorded through |device|.
+ Args:
+ device: A DeviceUtils instance.
+ timeout: Timeout in seconds
+ Returns:
+ A (status, logs) tuple, where status is a ResultType constant, and logs
+ if the final logcat output as a string.
+ """
+
+ # 1. Start recording logcat with appropriate filters.
+ with device.GetLogcatMonitor(filter_specs=_LOGCAT_FILTERS) as logmon:
+
+ # 2. Force-start activity.
+ device.StartActivity(
+ intent.Intent(package=_PACKAGE_NAME, activity=_ACTIVITY_NAME),
+ force_stop=True)
+
+ # 3. Wait up to |timeout| seconds until the test status is in the logcat.
+ result = ResultType.PASS
+ try:
+ browser_match = logmon.WaitFor(_RE_BROWSER_STATUS_LINE, timeout=timeout)
+ logging.debug('Found browser match: %s', browser_match.group(0))
+ renderer_match = logmon.WaitFor(_RE_RENDERER_STATUS_LINE,
+ timeout=timeout)
+ logging.debug('Found renderer match: %s', renderer_match.group(0))
+ if (browser_match.group(1) != 'SUCCESS'
+ or renderer_match.group(1) != 'SUCCESS'):
+ result = ResultType.FAIL
+ except device_errors.CommandTimeoutError:
+ result = ResultType.TIMEOUT
+
+ logcat = device.adb.Logcat(dump=True)
+
+ logmon.Close()
+ return result, '\n'.join(logcat)
+
+
+class LibraryLoadMap(dict):
+ """A helper class to pretty-print a map of library names to load addresses."""
+ def __str__(self):
+ items = ['\'%s\': 0x%x' % (name, address) for \
+ (name, address) in self.iteritems()]
+ return '{%s}' % (', '.join(items))
+
+ def __repr__(self):
+ return 'LibraryLoadMap(%s)' % self.__str__()
+
+
+class AddressList(list):
+ """A helper class to pretty-print a list of load addresses."""
+ def __str__(self):
+ items = ['0x%x' % address for address in self]
+ return '[%s]' % (', '.join(items))
+
+ def __repr__(self):
+ return 'AddressList(%s)' % self.__str__()
+
+
+class LinkerTestCaseBase(object):
+ """Base class for linker test cases."""
+
+ def __init__(self, is_low_memory=False):
+ """Create a test case.
+ Args:
+ is_low_memory: True to simulate a low-memory device, False otherwise.
+ """
+ test_suffix = 'ForLinker'
+ self.is_low_memory = is_low_memory
+ if is_low_memory:
+ test_suffix += 'LowMemoryDevice'
+ else:
+ test_suffix += 'RegularDevice'
+ class_name = self.__class__.__name__
+ self.qualified_name = '%s.%s' % (class_name, test_suffix)
+ self.tagged_name = self.qualified_name
+
+ def _RunTest(self, _device):
+ """Run the test, must be overriden.
+ Args:
+ _device: A DeviceUtils interface.
+ Returns:
+ A (status, log) tuple, where <status> is a ResultType constant, and <log>
+ is the logcat output captured during the test in case of error, or None
+ in case of success.
+ """
+ return ResultType.FAIL, 'Unimplemented _RunTest() method!'
+
+ def Run(self, device):
+ """Run the test on a given device.
+ Args:
+ device: Name of target device where to run the test.
+ Returns:
+ A base_test_result.TestRunResult() instance.
+ """
+ margin = 8
+ print '[ %-*s ] %s' % (margin, 'RUN', self.tagged_name)
+ logging.info('Running linker test: %s', self.tagged_name)
+
+ command_line_flags = ''
+ if self.is_low_memory:
+ command_line_flags += ' --low-memory-device'
+ device.WriteFile(_COMMAND_LINE_FILE, command_line_flags)
+
+ # Run the test.
+ status, logs = self._RunTest(device)
+
+ result_text = 'OK'
+ if status == ResultType.FAIL:
+ result_text = 'FAILED'
+ elif status == ResultType.TIMEOUT:
+ result_text = 'TIMEOUT'
+ print '[ %*s ] %s' % (margin, result_text, self.tagged_name)
+
+ return base_test_result.BaseTestResult(self.tagged_name, status, log=logs)
+
+
+ def __str__(self):
+ return self.tagged_name
+
+ def __repr__(self):
+ return self.tagged_name
+
+
+class LinkerSharedRelroTest(LinkerTestCaseBase):
+ """A linker test case to check the status of shared RELRO sections.
+
+ The core of the checks performed here are pretty simple:
+
+ - Clear the logcat and start recording with an appropriate set of filters.
+ - Create the command-line appropriate for the test-case.
+ - Start the activity (always forcing a cold start).
+ - Every second, look at the current content of the filtered logcat lines
+ and look for instances of the following:
+
+ BROWSER_LINKER_TEST: <status>
+ RENDERER_LINKER_TEST: <status>
+
+ where <status> can be either FAIL or SUCCESS. These lines can appear
+ in any order in the logcat. Once both browser and renderer status are
+ found, stop the loop. Otherwise timeout after 30 seconds.
+
+ Note that there can be other lines beginning with BROWSER_LINKER_TEST:
+ and RENDERER_LINKER_TEST:, but are not followed by a <status> code.
+
+ - The test case passes if the <status> for both the browser and renderer
+ process are SUCCESS. Otherwise its a fail.
+ """
+ def _RunTest(self, device):
+ # Wait up to 30 seconds until the linker test status is in the logcat.
+ return _StartActivityAndWaitForLinkerTestStatus(device, timeout=30)
diff --git a/deps/v8/build/android/pylib/local/__init__.py b/deps/v8/build/android/pylib/local/__init__.py
new file mode 100644
index 0000000000..4d6aabb953
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/local/device/__init__.py b/deps/v8/build/android/pylib/local/device/__init__.py
new file mode 100644
index 0000000000..4d6aabb953
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/local/device/local_device_environment.py b/deps/v8/build/android/pylib/local/device/local_device_environment.py
new file mode 100644
index 0000000000..4d7aa82ad0
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_environment.py
@@ -0,0 +1,300 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import datetime
+import functools
+import logging
+import os
+import shutil
+import tempfile
+import threading
+
+import devil_chromium
+from devil import base_error
+from devil.android import device_blacklist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android import logcat_monitor
+from devil.android.sdk import adb_wrapper
+from devil.utils import file_utils
+from devil.utils import parallelizer
+from pylib import constants
+from pylib.base import environment
+from pylib.utils import instrumentation_tracing
+from py_trace_event import trace_event
+
+
+LOGCAT_FILTERS = [
+ 'chromium:v',
+ 'cr_*:v',
+ 'DEBUG:I',
+ 'StrictMode:D',
+]
+
+
+def _DeviceCachePath(device):
+ file_name = 'device_cache_%s.json' % device.adb.GetDeviceSerial()
+ return os.path.join(constants.GetOutDirectory(), file_name)
+
+
+def handle_shard_failures(f):
+ """A decorator that handles device failures for per-device functions.
+
+ Args:
+ f: the function being decorated. The function must take at least one
+ argument, and that argument must be the device.
+ """
+ return handle_shard_failures_with(None)(f)
+
+
+# TODO(jbudorick): Refactor this to work as a decorator or context manager.
+def handle_shard_failures_with(on_failure):
+ """A decorator that handles device failures for per-device functions.
+
+ This calls on_failure in the event of a failure.
+
+ Args:
+ f: the function being decorated. The function must take at least one
+ argument, and that argument must be the device.
+ on_failure: A binary function to call on failure.
+ """
+ def decorator(f):
+ @functools.wraps(f)
+ def wrapper(dev, *args, **kwargs):
+ try:
+ return f(dev, *args, **kwargs)
+ except device_errors.CommandTimeoutError:
+ logging.exception('Shard timed out: %s(%s)', f.__name__, str(dev))
+ except device_errors.DeviceUnreachableError:
+ logging.exception('Shard died: %s(%s)', f.__name__, str(dev))
+ except base_error.BaseError:
+ logging.exception('Shard failed: %s(%s)', f.__name__, str(dev))
+ except SystemExit:
+ logging.exception('Shard killed: %s(%s)', f.__name__, str(dev))
+ raise
+ if on_failure:
+ on_failure(dev, f.__name__)
+ return None
+
+ return wrapper
+
+ return decorator
+
+
+class LocalDeviceEnvironment(environment.Environment):
+
+ def __init__(self, args, output_manager, _error_func):
+ super(LocalDeviceEnvironment, self).__init__(output_manager)
+ self._blacklist = (device_blacklist.Blacklist(args.blacklist_file)
+ if args.blacklist_file
+ else None)
+ self._device_serials = args.test_devices
+ self._devices_lock = threading.Lock()
+ self._devices = None
+ self._concurrent_adb = args.enable_concurrent_adb
+ self._enable_device_cache = args.enable_device_cache
+ self._logcat_monitors = []
+ self._logcat_output_dir = args.logcat_output_dir
+ self._logcat_output_file = args.logcat_output_file
+ self._max_tries = 1 + args.num_retries
+ self._preferred_abis = None
+ self._recover_devices = args.recover_devices
+ self._skip_clear_data = args.skip_clear_data
+ self._tool_name = args.tool
+ self._trace_output = None
+ if hasattr(args, 'trace_output'):
+ self._trace_output = args.trace_output
+ self._trace_all = None
+ if hasattr(args, 'trace_all'):
+ self._trace_all = args.trace_all
+
+ devil_chromium.Initialize(
+ output_directory=constants.GetOutDirectory(),
+ adb_path=args.adb_path)
+
+ # Some things such as Forwarder require ADB to be in the environment path.
+ adb_dir = os.path.dirname(adb_wrapper.AdbWrapper.GetAdbPath())
+ if adb_dir and adb_dir not in os.environ['PATH'].split(os.pathsep):
+ os.environ['PATH'] = adb_dir + os.pathsep + os.environ['PATH']
+
+ #override
+ def SetUp(self):
+ if self.trace_output and self._trace_all:
+ to_include = [r"pylib\..*", r"devil\..*", "__main__"]
+ to_exclude = ["logging"]
+ instrumentation_tracing.start_instrumenting(self.trace_output, to_include,
+ to_exclude)
+ elif self.trace_output:
+ self.EnableTracing()
+
+ # Must be called before accessing |devices|.
+ def SetPreferredAbis(self, abis):
+ assert self._devices is None
+ self._preferred_abis = abis
+
+ def _InitDevices(self):
+ device_arg = []
+ if self._device_serials:
+ device_arg = self._device_serials
+
+ self._devices = device_utils.DeviceUtils.HealthyDevices(
+ self._blacklist,
+ retries=5,
+ enable_usb_resets=True,
+ enable_device_files_cache=self._enable_device_cache,
+ default_retries=self._max_tries - 1,
+ device_arg=device_arg,
+ abis=self._preferred_abis)
+
+ if self._logcat_output_file:
+ self._logcat_output_dir = tempfile.mkdtemp()
+
+ @handle_shard_failures_with(on_failure=self.BlacklistDevice)
+ def prepare_device(d):
+ d.WaitUntilFullyBooted()
+
+ if self._enable_device_cache:
+ cache_path = _DeviceCachePath(d)
+ if os.path.exists(cache_path):
+ logging.info('Using device cache: %s', cache_path)
+ with open(cache_path) as f:
+ d.LoadCacheData(f.read())
+ # Delete cached file so that any exceptions cause it to be cleared.
+ os.unlink(cache_path)
+
+ if self._logcat_output_dir:
+ logcat_file = os.path.join(
+ self._logcat_output_dir,
+ '%s_%s' % (d.adb.GetDeviceSerial(),
+ datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%S')))
+ monitor = logcat_monitor.LogcatMonitor(
+ d.adb, clear=True, output_file=logcat_file)
+ self._logcat_monitors.append(monitor)
+ monitor.Start()
+
+ self.parallel_devices.pMap(prepare_device)
+
+ @property
+ def blacklist(self):
+ return self._blacklist
+
+ @property
+ def concurrent_adb(self):
+ return self._concurrent_adb
+
+ @property
+ def devices(self):
+ # Initialize lazily so that host-only tests do not fail when no devices are
+ # attached.
+ if self._devices is None:
+ self._InitDevices()
+ return self._devices
+
+ @property
+ def max_tries(self):
+ return self._max_tries
+
+ @property
+ def parallel_devices(self):
+ return parallelizer.SyncParallelizer(self.devices)
+
+ @property
+ def recover_devices(self):
+ return self._recover_devices
+
+ @property
+ def skip_clear_data(self):
+ return self._skip_clear_data
+
+ @property
+ def tool(self):
+ return self._tool_name
+
+ @property
+ def trace_output(self):
+ return self._trace_output
+
+ #override
+ def TearDown(self):
+ if self.trace_output and self._trace_all:
+ instrumentation_tracing.stop_instrumenting()
+ elif self.trace_output:
+ self.DisableTracing()
+
+ # By default, teardown will invoke ADB. When receiving SIGTERM due to a
+ # timeout, there's a high probability that ADB is non-responsive. In these
+ # cases, sending an ADB command will potentially take a long time to time
+ # out. Before this happens, the process will be hard-killed for not
+ # responding to SIGTERM fast enough.
+ if self._received_sigterm:
+ return
+
+ if not self._devices:
+ return
+
+ @handle_shard_failures_with(on_failure=self.BlacklistDevice)
+ def tear_down_device(d):
+ # Write the cache even when not using it so that it will be ready the
+ # first time that it is enabled. Writing it every time is also necessary
+ # so that an invalid cache can be flushed just by disabling it for one
+ # run.
+ cache_path = _DeviceCachePath(d)
+ if os.path.exists(os.path.dirname(cache_path)):
+ with open(cache_path, 'w') as f:
+ f.write(d.DumpCacheData())
+ logging.info('Wrote device cache: %s', cache_path)
+ else:
+ logging.warning(
+ 'Unable to write device cache as %s directory does not exist',
+ os.path.dirname(cache_path))
+
+ self.parallel_devices.pMap(tear_down_device)
+
+ for m in self._logcat_monitors:
+ try:
+ m.Stop()
+ m.Close()
+ _, temp_path = tempfile.mkstemp()
+ with open(m.output_file, 'r') as infile:
+ with open(temp_path, 'w') as outfile:
+ for line in infile:
+ outfile.write('Device(%s) %s' % (m.adb.GetDeviceSerial(), line))
+ shutil.move(temp_path, m.output_file)
+ except base_error.BaseError:
+ logging.exception('Failed to stop logcat monitor for %s',
+ m.adb.GetDeviceSerial())
+ except IOError:
+ logging.exception('Failed to locate logcat for device %s',
+ m.adb.GetDeviceSerial())
+
+ if self._logcat_output_file:
+ file_utils.MergeFiles(
+ self._logcat_output_file,
+ [m.output_file for m in self._logcat_monitors
+ if os.path.exists(m.output_file)])
+ shutil.rmtree(self._logcat_output_dir)
+
+ def BlacklistDevice(self, device, reason='local_device_failure'):
+ device_serial = device.adb.GetDeviceSerial()
+ if self._blacklist:
+ self._blacklist.Extend([device_serial], reason=reason)
+ with self._devices_lock:
+ self._devices = [d for d in self._devices if str(d) != device_serial]
+ logging.error('Device %s blacklisted: %s', device_serial, reason)
+ if not self._devices:
+ raise device_errors.NoDevicesError(
+ 'All devices were blacklisted due to errors')
+
+ @staticmethod
+ def DisableTracing():
+ if not trace_event.trace_is_enabled():
+ logging.warning('Tracing is not running.')
+ else:
+ trace_event.trace_disable()
+
+ def EnableTracing(self):
+ if trace_event.trace_is_enabled():
+ logging.warning('Tracing is already running.')
+ else:
+ trace_event.trace_enable(self._trace_output)
diff --git a/deps/v8/build/android/pylib/local/device/local_device_gtest_run.py b/deps/v8/build/android/pylib/local/device/local_device_gtest_run.py
new file mode 100644
index 0000000000..76d3e1bb9b
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_gtest_run.py
@@ -0,0 +1,635 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import itertools
+import logging
+import os
+import posixpath
+import shutil
+import time
+
+from devil.android import crash_handler
+from devil.android import device_errors
+from devil.android import device_temp_file
+from devil.android import logcat_monitor
+from devil.android import ports
+from devil.utils import reraiser_thread
+from incremental_install import installer
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.gtest import gtest_test_instance
+from pylib.local import local_test_server_spawner
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_test_run
+from pylib.utils import google_storage_helper
+from pylib.utils import logdog_helper
+from py_trace_event import trace_event
+from py_utils import contextlib_ext
+from py_utils import tempfile_ext
+import tombstones
+
+_MAX_INLINE_FLAGS_LENGTH = 50 # Arbitrarily chosen.
+_EXTRA_COMMAND_LINE_FILE = (
+ 'org.chromium.native_test.NativeTest.CommandLineFile')
+_EXTRA_COMMAND_LINE_FLAGS = (
+ 'org.chromium.native_test.NativeTest.CommandLineFlags')
+_EXTRA_STDOUT_FILE = (
+ 'org.chromium.native_test.NativeTestInstrumentationTestRunner'
+ '.StdoutFile')
+_EXTRA_TEST = (
+ 'org.chromium.native_test.NativeTestInstrumentationTestRunner'
+ '.Test')
+_EXTRA_TEST_LIST = (
+ 'org.chromium.native_test.NativeTestInstrumentationTestRunner'
+ '.TestList')
+
+_MAX_SHARD_SIZE = 256
+_SECONDS_TO_NANOS = int(1e9)
+
+# The amount of time a test executable may run before it gets killed.
+_TEST_TIMEOUT_SECONDS = 30*60
+
+# Tests that use SpawnedTestServer must run the LocalTestServerSpawner on the
+# host machine.
+# TODO(jbudorick): Move this up to the test instance if the net test server is
+# handled outside of the APK for the remote_device environment.
+_SUITE_REQUIRES_TEST_SERVER_SPAWNER = [
+ 'components_browsertests', 'content_unittests', 'content_browsertests',
+ 'net_unittests', 'services_unittests', 'unit_tests'
+]
+
+# No-op context manager. If we used Python 3, we could change this to
+# contextlib.ExitStack()
+class _NullContextManager(object):
+ def __enter__(self):
+ pass
+ def __exit__(self, *args):
+ pass
+
+
+def _GenerateSequentialFileNames(filename):
+ """Infinite generator of names: 'name.ext', 'name_1.ext', 'name_2.ext', ..."""
+ yield filename
+ base, ext = os.path.splitext(filename)
+ for i in itertools.count(1):
+ yield '%s_%d%s' % (base, i, ext)
+
+
+def _ExtractTestsFromFilter(gtest_filter):
+ """Returns the list of tests specified by the given filter.
+
+ Returns:
+ None if the device should be queried for the test list instead.
+ """
+ # Empty means all tests, - means exclude filter.
+ if not gtest_filter or '-' in gtest_filter:
+ return None
+
+ patterns = gtest_filter.split(':')
+ # For a single pattern, allow it even if it has a wildcard so long as the
+ # wildcard comes at the end and there is at least one . to prove the scope is
+ # not too large.
+ # This heuristic is not necessarily faster, but normally is.
+ if len(patterns) == 1 and patterns[0].endswith('*'):
+ no_suffix = patterns[0].rstrip('*')
+ if '*' not in no_suffix and '.' in no_suffix:
+ return patterns
+
+ if '*' in gtest_filter:
+ return None
+ return patterns
+
+
+class _ApkDelegate(object):
+ def __init__(self, test_instance, tool):
+ self._activity = test_instance.activity
+ self._apk_helper = test_instance.apk_helper
+ self._test_apk_incremental_install_json = (
+ test_instance.test_apk_incremental_install_json)
+ self._package = test_instance.package
+ self._runner = test_instance.runner
+ self._permissions = test_instance.permissions
+ self._suite = test_instance.suite
+ self._component = '%s/%s' % (self._package, self._runner)
+ self._extras = test_instance.extras
+ self._wait_for_java_debugger = test_instance.wait_for_java_debugger
+ self._tool = tool
+
+ def GetTestDataRoot(self, device):
+ # pylint: disable=no-self-use
+ return posixpath.join(device.GetExternalStoragePath(),
+ 'chromium_tests_root')
+
+ def Install(self, device):
+ if self._test_apk_incremental_install_json:
+ installer.Install(device, self._test_apk_incremental_install_json,
+ apk=self._apk_helper, permissions=self._permissions)
+ else:
+ device.Install(
+ self._apk_helper,
+ allow_downgrade=True,
+ reinstall=True,
+ permissions=self._permissions)
+
+ def ResultsDirectory(self, device):
+ return device.GetApplicationDataDirectory(self._package)
+
+ def Run(self, test, device, flags=None, **kwargs):
+ extras = dict(self._extras)
+
+ if ('timeout' in kwargs
+ and gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT not in extras):
+ # Make sure the instrumentation doesn't kill the test before the
+ # scripts do. The provided timeout value is in seconds, but the
+ # instrumentation deals with nanoseconds because that's how Android
+ # handles time.
+ extras[gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT] = int(
+ kwargs['timeout'] * _SECONDS_TO_NANOS)
+
+ # pylint: disable=redefined-variable-type
+ command_line_file = _NullContextManager()
+ if flags:
+ if len(flags) > _MAX_INLINE_FLAGS_LENGTH:
+ command_line_file = device_temp_file.DeviceTempFile(device.adb)
+ device.WriteFile(command_line_file.name, '_ %s' % flags)
+ extras[_EXTRA_COMMAND_LINE_FILE] = command_line_file.name
+ else:
+ extras[_EXTRA_COMMAND_LINE_FLAGS] = flags
+
+ test_list_file = _NullContextManager()
+ if test:
+ if len(test) > 1:
+ test_list_file = device_temp_file.DeviceTempFile(device.adb)
+ device.WriteFile(test_list_file.name, '\n'.join(test))
+ extras[_EXTRA_TEST_LIST] = test_list_file.name
+ else:
+ extras[_EXTRA_TEST] = test[0]
+ # pylint: enable=redefined-variable-type
+
+ stdout_file = device_temp_file.DeviceTempFile(
+ device.adb, dir=device.GetExternalStoragePath(), suffix='.gtest_out')
+ extras[_EXTRA_STDOUT_FILE] = stdout_file.name
+
+ if self._wait_for_java_debugger:
+ cmd = ['am', 'set-debug-app', '-w', self._package]
+ device.RunShellCommand(cmd, check_return=True)
+ logging.warning('*' * 80)
+ logging.warning('Waiting for debugger to attach to process: %s',
+ self._package)
+ logging.warning('*' * 80)
+
+ with command_line_file, test_list_file, stdout_file:
+ try:
+ device.StartInstrumentation(
+ self._component, extras=extras, raw=False, **kwargs)
+ except device_errors.CommandFailedError:
+ logging.exception('gtest shard failed.')
+ except device_errors.CommandTimeoutError:
+ logging.exception('gtest shard timed out.')
+ except device_errors.DeviceUnreachableError:
+ logging.exception('gtest shard device unreachable.')
+ except Exception:
+ device.ForceStop(self._package)
+ raise
+ # TODO(jbudorick): Remove this after resolving crbug.com/726880
+ logging.info(
+ '%s size on device: %s',
+ stdout_file.name, device.StatPath(stdout_file.name).get('st_size', 0))
+ return device.ReadFile(stdout_file.name).splitlines()
+
+ def PullAppFiles(self, device, files, directory):
+ device_dir = device.GetApplicationDataDirectory(self._package)
+ host_dir = os.path.join(directory, str(device))
+ for f in files:
+ device_file = posixpath.join(device_dir, f)
+ host_file = os.path.join(host_dir, *f.split(posixpath.sep))
+ for host_file in _GenerateSequentialFileNames(host_file):
+ if not os.path.exists(host_file):
+ break
+ device.PullFile(device_file, host_file)
+
+ def Clear(self, device):
+ device.ClearApplicationState(self._package, permissions=self._permissions)
+
+
+class _ExeDelegate(object):
+ def __init__(self, tr, dist_dir, tool):
+ self._host_dist_dir = dist_dir
+ self._exe_file_name = os.path.basename(dist_dir)[:-len('__dist')]
+ self._device_dist_dir = posixpath.join(
+ constants.TEST_EXECUTABLE_DIR, os.path.basename(dist_dir))
+ self._test_run = tr
+ self._tool = tool
+
+ def GetTestDataRoot(self, device):
+ # pylint: disable=no-self-use
+ # pylint: disable=unused-argument
+ return posixpath.join(constants.TEST_EXECUTABLE_DIR, 'chromium_tests_root')
+
+ def Install(self, device):
+ # TODO(jbudorick): Look into merging this with normal data deps pushing if
+ # executables become supported on nonlocal environments.
+ device.PushChangedFiles([(self._host_dist_dir, self._device_dist_dir)],
+ delete_device_stale=True)
+
+ def ResultsDirectory(self, device):
+ # pylint: disable=no-self-use
+ # pylint: disable=unused-argument
+ return constants.TEST_EXECUTABLE_DIR
+
+ def Run(self, test, device, flags=None, **kwargs):
+ tool = self._test_run.GetTool(device).GetTestWrapper()
+ if tool:
+ cmd = [tool]
+ else:
+ cmd = []
+ cmd.append(posixpath.join(self._device_dist_dir, self._exe_file_name))
+
+ if test:
+ cmd.append('--gtest_filter=%s' % ':'.join(test))
+ if flags:
+ # TODO(agrieve): This won't work if multiple flags are passed.
+ cmd.append(flags)
+ cwd = constants.TEST_EXECUTABLE_DIR
+
+ env = {
+ 'LD_LIBRARY_PATH': self._device_dist_dir
+ }
+
+ if self._tool != 'asan':
+ env['UBSAN_OPTIONS'] = constants.UBSAN_OPTIONS
+
+ try:
+ gcov_strip_depth = os.environ['NATIVE_COVERAGE_DEPTH_STRIP']
+ external = device.GetExternalStoragePath()
+ env['GCOV_PREFIX'] = '%s/gcov' % external
+ env['GCOV_PREFIX_STRIP'] = gcov_strip_depth
+ except (device_errors.CommandFailedError, KeyError):
+ pass
+
+ # Executable tests return a nonzero exit code on test failure, which is
+ # fine from the test runner's perspective; thus check_return=False.
+ output = device.RunShellCommand(
+ cmd, cwd=cwd, env=env, check_return=False, large_output=True, **kwargs)
+ return output
+
+ def PullAppFiles(self, device, files, directory):
+ pass
+
+ def Clear(self, device):
+ device.KillAll(self._exe_file_name, blocking=True, timeout=30, quiet=True)
+
+
+class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
+
+ def __init__(self, env, test_instance):
+ assert isinstance(env, local_device_environment.LocalDeviceEnvironment)
+ assert isinstance(test_instance, gtest_test_instance.GtestTestInstance)
+ super(LocalDeviceGtestRun, self).__init__(env, test_instance)
+
+ # pylint: disable=redefined-variable-type
+ if self._test_instance.apk:
+ self._delegate = _ApkDelegate(self._test_instance, env.tool)
+ elif self._test_instance.exe_dist_dir:
+ self._delegate = _ExeDelegate(self, self._test_instance.exe_dist_dir,
+ self._env.tool)
+ if self._test_instance.isolated_script_test_perf_output:
+ self._test_perf_output_filenames = _GenerateSequentialFileNames(
+ self._test_instance.isolated_script_test_perf_output)
+ else:
+ self._test_perf_output_filenames = itertools.repeat(None)
+ # pylint: enable=redefined-variable-type
+ self._crashes = set()
+ self._servers = collections.defaultdict(list)
+
+ #override
+ def TestPackage(self):
+ return self._test_instance.suite
+
+ #override
+ def SetUp(self):
+ @local_device_environment.handle_shard_failures_with(
+ on_failure=self._env.BlacklistDevice)
+ @trace_event.traced
+ def individual_device_set_up(device, host_device_tuples):
+ def install_apk(dev):
+ # Install test APK.
+ self._delegate.Install(dev)
+
+ def push_test_data(dev):
+ # Push data dependencies.
+ device_root = self._delegate.GetTestDataRoot(dev)
+ host_device_tuples_substituted = [
+ (h, local_device_test_run.SubstituteDeviceRoot(d, device_root))
+ for h, d in host_device_tuples]
+ dev.PushChangedFiles(
+ host_device_tuples_substituted,
+ delete_device_stale=True,
+ # Some gtest suites, e.g. unit_tests, have data dependencies that
+ # can take longer than the default timeout to push. See
+ # crbug.com/791632 for context.
+ timeout=600)
+ if not host_device_tuples:
+ dev.RemovePath(device_root, force=True, recursive=True, rename=True)
+ dev.RunShellCommand(['mkdir', '-p', device_root], check_return=True)
+
+ def init_tool_and_start_servers(dev):
+ tool = self.GetTool(dev)
+ tool.CopyFiles(dev)
+ tool.SetupEnvironment()
+
+ self._servers[str(dev)] = []
+ if self.TestPackage() in _SUITE_REQUIRES_TEST_SERVER_SPAWNER:
+ self._servers[str(dev)].append(
+ local_test_server_spawner.LocalTestServerSpawner(
+ ports.AllocateTestServerPort(), dev, tool))
+
+ for s in self._servers[str(dev)]:
+ s.SetUp()
+
+ def bind_crash_handler(step, dev):
+ return lambda: crash_handler.RetryOnSystemCrash(step, dev)
+
+ steps = [
+ bind_crash_handler(s, device)
+ for s in (install_apk, push_test_data, init_tool_and_start_servers)]
+ if self._env.concurrent_adb:
+ reraiser_thread.RunAsync(steps)
+ else:
+ for step in steps:
+ step()
+
+ self._env.parallel_devices.pMap(
+ individual_device_set_up,
+ self._test_instance.GetDataDependencies())
+
+ #override
+ def _ShouldShard(self):
+ return True
+
+ #override
+ def _CreateShards(self, tests):
+ # _crashes are tests that might crash and make the tests in the same shard
+ # following the crashed testcase not run.
+ # Thus we need to create separate shards for each crashed testcase,
+ # so that other tests can be run.
+ device_count = len(self._env.devices)
+ shards = []
+
+ # Add shards with only one suspect testcase.
+ shards += [[crash] for crash in self._crashes if crash in tests]
+
+ # Delete suspect testcase from tests.
+ tests = [test for test in tests if not test in self._crashes]
+
+ for i in xrange(0, device_count):
+ unbounded_shard = tests[i::device_count]
+ shards += [unbounded_shard[j:j+_MAX_SHARD_SIZE]
+ for j in xrange(0, len(unbounded_shard), _MAX_SHARD_SIZE)]
+ return shards
+
+ #override
+ def _GetTests(self):
+ if self._test_instance.extract_test_list_from_filter:
+ # When the exact list of tests to run is given via command-line (e.g. when
+ # locally iterating on a specific test), skip querying the device (which
+ # takes ~3 seconds).
+ tests = _ExtractTestsFromFilter(self._test_instance.gtest_filter)
+ if tests:
+ return tests
+
+ # Even when there's only one device, it still makes sense to retrieve the
+ # test list so that tests can be split up and run in batches rather than all
+ # at once (since test output is not streamed).
+ @local_device_environment.handle_shard_failures_with(
+ on_failure=self._env.BlacklistDevice)
+ def list_tests(dev):
+ timeout = 30
+ retries = 1
+ if self._test_instance.wait_for_java_debugger:
+ timeout = None
+
+ flags = [
+ f for f in self._test_instance.flags
+ if f not in ['--wait-for-debugger', '--wait-for-java-debugger']
+ ]
+ flags.append('--gtest_list_tests')
+
+ # TODO(crbug.com/726880): Remove retries when no longer necessary.
+ for i in range(0, retries+1):
+ logging.info('flags:')
+ for f in flags:
+ logging.info(' %s', f)
+
+ raw_test_list = crash_handler.RetryOnSystemCrash(
+ lambda d: self._delegate.Run(
+ None, d, flags=' '.join(flags), timeout=timeout),
+ device=dev)
+ tests = gtest_test_instance.ParseGTestListTests(raw_test_list)
+ if not tests:
+ logging.info('No tests found. Output:')
+ for l in raw_test_list:
+ logging.info(' %s', l)
+ logging.info('Logcat:')
+ for line in dev.adb.Logcat(dump=True):
+ logging.info(line)
+ dev.adb.Logcat(clear=True)
+ if i < retries:
+ logging.info('Retrying...')
+ else:
+ break
+ return tests
+
+ # Query all devices in case one fails.
+ test_lists = self._env.parallel_devices.pMap(list_tests).pGet(None)
+
+ # If all devices failed to list tests, raise an exception.
+ # Check that tl is not None and is not empty.
+ if all(not tl for tl in test_lists):
+ raise device_errors.CommandFailedError(
+ 'Failed to list tests on any device')
+ tests = list(sorted(set().union(*[set(tl) for tl in test_lists if tl])))
+ tests = self._test_instance.FilterTests(tests)
+ tests = self._ApplyExternalSharding(
+ tests, self._test_instance.external_shard_index,
+ self._test_instance.total_external_shards)
+ return tests
+
+ def _UploadTestArtifacts(self, device, test_artifacts_dir):
+ # TODO(jbudorick): Reconcile this with the output manager once
+ # https://codereview.chromium.org/2933993002/ lands.
+ if test_artifacts_dir:
+ with tempfile_ext.NamedTemporaryDirectory() as test_artifacts_host_dir:
+ device.PullFile(test_artifacts_dir.name, test_artifacts_host_dir)
+ with tempfile_ext.NamedTemporaryDirectory() as temp_zip_dir:
+ zip_base_name = os.path.join(temp_zip_dir, 'test_artifacts')
+ test_artifacts_zip = shutil.make_archive(
+ zip_base_name, 'zip', test_artifacts_host_dir)
+ link = google_storage_helper.upload(
+ google_storage_helper.unique_name(
+ 'test_artifacts', device=device),
+ test_artifacts_zip,
+ bucket='%s/test_artifacts' % (
+ self._test_instance.gs_test_artifacts_bucket))
+ logging.info('Uploading test artifacts to %s.', link)
+ return link
+ return None
+
+ #override
+ def _RunTest(self, device, test):
+ # Run the test.
+ timeout = (self._test_instance.shard_timeout
+ * self.GetTool(device).GetTimeoutScale())
+ if self._test_instance.wait_for_java_debugger:
+ timeout = None
+ if self._test_instance.store_tombstones:
+ tombstones.ClearAllTombstones(device)
+ test_perf_output_filename = next(self._test_perf_output_filenames)
+
+ with device_temp_file.DeviceTempFile(
+ adb=device.adb,
+ dir=self._delegate.ResultsDirectory(device),
+ suffix='.xml') as device_tmp_results_file:
+ with contextlib_ext.Optional(
+ device_temp_file.NamedDeviceTemporaryDirectory(
+ adb=device.adb, dir='/sdcard/'),
+ self._test_instance.gs_test_artifacts_bucket) as test_artifacts_dir:
+ with (contextlib_ext.Optional(
+ device_temp_file.DeviceTempFile(
+ adb=device.adb, dir=self._delegate.ResultsDirectory(device)),
+ test_perf_output_filename)) as isolated_script_test_perf_output:
+
+ flags = list(self._test_instance.flags)
+ if self._test_instance.enable_xml_result_parsing:
+ flags.append('--gtest_output=xml:%s' % device_tmp_results_file.name)
+
+ if self._test_instance.gs_test_artifacts_bucket:
+ flags.append('--test_artifacts_dir=%s' % test_artifacts_dir.name)
+
+ if test_perf_output_filename:
+ flags.append('--isolated_script_test_perf_output=%s'
+ % isolated_script_test_perf_output.name)
+
+ logging.info('flags:')
+ for f in flags:
+ logging.info(' %s', f)
+
+ stream_name = 'logcat_%s_%s_%s' % (
+ hash(tuple(test)),
+ time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()),
+ device.serial)
+
+ with self._env.output_manager.ArchivedTempfile(
+ stream_name, 'logcat') as logcat_file:
+ with logcat_monitor.LogcatMonitor(
+ device.adb,
+ filter_specs=local_device_environment.LOGCAT_FILTERS,
+ output_file=logcat_file.name) as logmon:
+ with contextlib_ext.Optional(
+ trace_event.trace(str(test)),
+ self._env.trace_output):
+ output = self._delegate.Run(
+ test, device, flags=' '.join(flags),
+ timeout=timeout, retries=0)
+ logmon.Close()
+
+ if logcat_file.Link():
+ logging.info('Logcat saved to %s', logcat_file.Link())
+
+ if self._test_instance.enable_xml_result_parsing:
+ try:
+ gtest_xml = device.ReadFile(
+ device_tmp_results_file.name,
+ as_root=True)
+ except device_errors.CommandFailedError as e:
+ logging.warning(
+ 'Failed to pull gtest results XML file %s: %s',
+ device_tmp_results_file.name,
+ str(e))
+ gtest_xml = None
+
+ if test_perf_output_filename:
+ try:
+ device.PullFile(isolated_script_test_perf_output.name,
+ test_perf_output_filename)
+ except device_errors.CommandFailedError as e:
+ logging.warning(
+ 'Failed to pull chartjson results %s: %s',
+ isolated_script_test_perf_output.name, str(e))
+
+ test_artifacts_url = self._UploadTestArtifacts(device,
+ test_artifacts_dir)
+
+ for s in self._servers[str(device)]:
+ s.Reset()
+ if self._test_instance.app_files:
+ self._delegate.PullAppFiles(device, self._test_instance.app_files,
+ self._test_instance.app_file_dir)
+ if not self._env.skip_clear_data:
+ self._delegate.Clear(device)
+
+ for l in output:
+ logging.info(l)
+
+ # Parse the output.
+ # TODO(jbudorick): Transition test scripts away from parsing stdout.
+ if self._test_instance.enable_xml_result_parsing:
+ results = gtest_test_instance.ParseGTestXML(gtest_xml)
+ else:
+ results = gtest_test_instance.ParseGTestOutput(
+ output, self._test_instance.symbolizer, device.product_cpu_abi)
+
+ tombstones_url = None
+ for r in results:
+ if logcat_file:
+ r.SetLink('logcat', logcat_file.Link())
+
+ if self._test_instance.gs_test_artifacts_bucket:
+ r.SetLink('test_artifacts', test_artifacts_url)
+
+ if r.GetType() == base_test_result.ResultType.CRASH:
+ self._crashes.add(r.GetName())
+ if self._test_instance.store_tombstones:
+ if not tombstones_url:
+ resolved_tombstones = tombstones.ResolveTombstones(
+ device,
+ resolve_all_tombstones=True,
+ include_stack_symbols=False,
+ wipe_tombstones=True)
+ stream_name = 'tombstones_%s_%s' % (
+ time.strftime('%Y%m%dT%H%M%S', time.localtime()),
+ device.serial)
+ tombstones_url = logdog_helper.text(
+ stream_name, '\n'.join(resolved_tombstones))
+ r.SetLink('tombstones', tombstones_url)
+
+ tests_stripped_disabled_prefix = set()
+ for t in test:
+ tests_stripped_disabled_prefix.add(
+ gtest_test_instance.TestNameWithoutDisabledPrefix(t))
+ not_run_tests = tests_stripped_disabled_prefix.difference(
+ set(r.GetName() for r in results))
+ return results, list(not_run_tests) if results else None
+
+ #override
+ def TearDown(self):
+ # By default, teardown will invoke ADB. When receiving SIGTERM due to a
+ # timeout, there's a high probability that ADB is non-responsive. In these
+ # cases, sending an ADB command will potentially take a long time to time
+ # out. Before this happens, the process will be hard-killed for not
+ # responding to SIGTERM fast enough.
+ if self._received_sigterm:
+ return
+
+ @local_device_environment.handle_shard_failures
+ @trace_event.traced
+ def individual_device_tear_down(dev):
+ for s in self._servers.get(str(dev), []):
+ s.TearDown()
+
+ tool = self.GetTool(dev)
+ tool.CleanUpEnvironment()
+
+ self._env.parallel_devices.pMap(individual_device_tear_down)
diff --git a/deps/v8/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/deps/v8/build/android/pylib/local/device/local_device_instrumentation_test_run.py
new file mode 100644
index 0000000000..4332e74972
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_instrumentation_test_run.py
@@ -0,0 +1,965 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import copy
+import hashlib
+import json
+import logging
+import os
+import posixpath
+import re
+import sys
+import time
+
+from devil.android import crash_handler
+from devil.android import device_errors
+from devil.android import device_temp_file
+from devil.android import flag_changer
+from devil.android.sdk import shared_prefs
+from devil.android import logcat_monitor
+from devil.android.tools import system_app
+from devil.android.tools import webview_app
+from devil.utils import reraiser_thread
+from incremental_install import installer
+from pylib import constants
+from pylib import valgrind_tools
+from pylib.base import base_test_result
+from pylib.base import output_manager
+from pylib.constants import host_paths
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_test_run
+from pylib.output import remote_output_manager
+from pylib.utils import instrumentation_tracing
+from pylib.utils import shared_preference_utils
+
+from py_trace_event import trace_event
+from py_trace_event import trace_time
+from py_utils import contextlib_ext
+from py_utils import tempfile_ext
+import tombstones
+
+
+with host_paths.SysPath(
+ os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'), 0):
+ import jinja2 # pylint: disable=import-error
+ import markupsafe # pylint: disable=import-error,unused-import
+
+
+_JINJA_TEMPLATE_DIR = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'pylib', 'instrumentation')
+_JINJA_TEMPLATE_FILENAME = 'render_test.html.jinja'
+
+_TAG = 'test_runner_py'
+
+TIMEOUT_ANNOTATIONS = [
+ ('Manual', 10 * 60 * 60),
+ ('IntegrationTest', 30 * 60),
+ ('External', 10 * 60),
+ ('EnormousTest', 10 * 60),
+ ('LargeTest', 5 * 60),
+ ('MediumTest', 3 * 60),
+ ('SmallTest', 1 * 60),
+]
+
+LOGCAT_FILTERS = ['*:e', 'chromium:v', 'cr_*:v', 'DEBUG:I',
+ 'StrictMode:D', '%s:I' % _TAG]
+
+EXTRA_SCREENSHOT_FILE = (
+ 'org.chromium.base.test.ScreenshotOnFailureStatement.ScreenshotFile')
+
+EXTRA_UI_CAPTURE_DIR = (
+ 'org.chromium.base.test.util.Screenshooter.ScreenshotDir')
+
+EXTRA_TRACE_FILE = ('org.chromium.base.test.BaseJUnit4ClassRunner.TraceFile')
+
+_EXTRA_TEST_LIST = (
+ 'org.chromium.base.test.BaseChromiumAndroidJUnitRunner.TestList')
+
+FEATURE_ANNOTATION = 'Feature'
+RENDER_TEST_FEATURE_ANNOTATION = 'RenderTest'
+
+# This needs to be kept in sync with formatting in |RenderUtils.imageName|
+RE_RENDER_IMAGE_NAME = re.compile(
+ r'(?P<test_class>\w+)\.'
+ r'(?P<description>[-\w]+)\.'
+ r'(?P<device_model_sdk>[-\w]+)\.png')
+
+@contextlib.contextmanager
+def _LogTestEndpoints(device, test_name):
+ device.RunShellCommand(
+ ['log', '-p', 'i', '-t', _TAG, 'START %s' % test_name],
+ check_return=True)
+ try:
+ yield
+ finally:
+ device.RunShellCommand(
+ ['log', '-p', 'i', '-t', _TAG, 'END %s' % test_name],
+ check_return=True)
+
+# TODO(jbudorick): Make this private once the instrumentation test_runner
+# is deprecated.
+def DidPackageCrashOnDevice(package_name, device):
+ # Dismiss any error dialogs. Limit the number in case we have an error
+ # loop or we are failing to dismiss.
+ try:
+ for _ in xrange(10):
+ package = device.DismissCrashDialogIfNeeded(timeout=10, retries=1)
+ if not package:
+ return False
+ # Assume test package convention of ".test" suffix
+ if package in package_name:
+ return True
+ except device_errors.CommandFailedError:
+ logging.exception('Error while attempting to dismiss crash dialog.')
+ return False
+
+
+_CURRENT_FOCUS_CRASH_RE = re.compile(
+ r'\s*mCurrentFocus.*Application (Error|Not Responding): (\S+)}')
+
+
+class LocalDeviceInstrumentationTestRun(
+ local_device_test_run.LocalDeviceTestRun):
+ def __init__(self, env, test_instance):
+ super(LocalDeviceInstrumentationTestRun, self).__init__(
+ env, test_instance)
+ self._flag_changers = {}
+ self._replace_package_contextmanager = None
+ self._shared_prefs_to_restore = []
+ self._use_webview_contextmanager = None
+
+ #override
+ def TestPackage(self):
+ return self._test_instance.suite
+
+ #override
+ def SetUp(self):
+ @local_device_environment.handle_shard_failures_with(
+ self._env.BlacklistDevice)
+ @trace_event.traced
+ def individual_device_set_up(device, host_device_tuples):
+ steps = []
+
+ if self._test_instance.replace_system_package:
+ @trace_event.traced
+ def replace_package(dev):
+ # We need the context manager to be applied before modifying any
+ # shared preference files in case the replacement APK needs to be
+ # set up, and it needs to be applied while the test is running.
+ # Thus, it needs to be applied early during setup, but must still be
+ # applied during _RunTest, which isn't possible using 'with' without
+ # applying the context manager up in test_runner. Instead, we
+ # manually invoke its __enter__ and __exit__ methods in setup and
+ # teardown.
+ self._replace_package_contextmanager = system_app.ReplaceSystemApp(
+ dev, self._test_instance.replace_system_package.package,
+ self._test_instance.replace_system_package.replacement_apk)
+ # Pylint is not smart enough to realize that this field has
+ # an __enter__ method, and will complain loudly.
+ # pylint: disable=no-member
+ self._replace_package_contextmanager.__enter__()
+ # pylint: enable=no-member
+
+ steps.append(replace_package)
+
+ if self._test_instance.use_webview_provider:
+ @trace_event.traced
+ def use_webview_provider(dev):
+ # We need the context manager to be applied before modifying any
+ # shared preference files in case the replacement APK needs to be
+ # set up, and it needs to be applied while the test is running.
+ # Thus, it needs to be applied early during setup, but must still be
+ # applied during _RunTest, which isn't possible using 'with' without
+ # applying the context manager up in test_runner. Instead, we
+ # manually invoke its __enter__ and __exit__ methods in setup and
+ # teardown.
+ self._use_webview_contextmanager = webview_app.UseWebViewProvider(
+ dev, self._test_instance.use_webview_provider)
+ # Pylint is not smart enough to realize that this field has
+ # an __enter__ method, and will complain loudly.
+ # pylint: disable=no-member
+ self._use_webview_contextmanager.__enter__()
+ # pylint: enable=no-member
+
+ steps.append(use_webview_provider)
+
+ def install_helper(apk, permissions):
+ @instrumentation_tracing.no_tracing
+ @trace_event.traced("apk_path")
+ def install_helper_internal(d, apk_path=apk.path):
+ # pylint: disable=unused-argument
+ d.Install(apk, permissions=permissions)
+ return install_helper_internal
+
+ def incremental_install_helper(apk, json_path, permissions):
+ @trace_event.traced("apk_path")
+ def incremental_install_helper_internal(d, apk_path=apk.path):
+ # pylint: disable=unused-argument
+ installer.Install(d, json_path, apk=apk, permissions=permissions)
+ return incremental_install_helper_internal
+
+ if self._test_instance.apk_under_test:
+ permissions = self._test_instance.apk_under_test.GetPermissions()
+ if self._test_instance.apk_under_test_incremental_install_json:
+ steps.append(incremental_install_helper(
+ self._test_instance.apk_under_test,
+ self._test_instance.
+ apk_under_test_incremental_install_json,
+ permissions))
+ else:
+ steps.append(install_helper(self._test_instance.apk_under_test,
+ permissions))
+
+ permissions = self._test_instance.test_apk.GetPermissions()
+ if self._test_instance.test_apk_incremental_install_json:
+ steps.append(incremental_install_helper(
+ self._test_instance.test_apk,
+ self._test_instance.
+ test_apk_incremental_install_json,
+ permissions))
+ else:
+ steps.append(install_helper(self._test_instance.test_apk,
+ permissions))
+
+ steps.extend(install_helper(apk, None)
+ for apk in self._test_instance.additional_apks)
+
+ @trace_event.traced
+ def set_debug_app(dev):
+ # Set debug app in order to enable reading command line flags on user
+ # builds
+ package_name = None
+ if self._test_instance.apk_under_test:
+ package_name = self._test_instance.apk_under_test.GetPackageName()
+ elif self._test_instance.test_apk:
+ package_name = self._test_instance.test_apk.GetPackageName()
+ else:
+ logging.error("Couldn't set debug app: no package name found")
+ return
+ cmd = ['am', 'set-debug-app', '--persistent']
+ if self._test_instance.wait_for_java_debugger:
+ cmd.append('-w')
+ cmd.append(package_name)
+ dev.RunShellCommand(cmd, check_return=True)
+
+ @trace_event.traced
+ def edit_shared_prefs(dev):
+ for setting in self._test_instance.edit_shared_prefs:
+ shared_pref = shared_prefs.SharedPrefs(
+ dev, setting['package'], setting['filename'],
+ use_encrypted_path=setting.get('supports_encrypted_path', False))
+ pref_to_restore = copy.copy(shared_pref)
+ pref_to_restore.Load()
+ self._shared_prefs_to_restore.append(pref_to_restore)
+
+ shared_preference_utils.ApplySharedPreferenceSetting(
+ shared_pref, setting)
+
+ @trace_event.traced
+ def set_vega_permissions(dev):
+ # Normally, installation of VrCore automatically grants storage
+ # permissions. However, since VrCore is part of the system image on
+ # the Vega standalone headset, we don't install the APK as part of test
+ # setup. Instead, grant the permissions here so that it can take
+ # screenshots.
+ if dev.product_name == 'vega':
+ dev.GrantPermissions('com.google.vr.vrcore', [
+ 'android.permission.WRITE_EXTERNAL_STORAGE',
+ 'android.permission.READ_EXTERNAL_STORAGE'
+ ])
+
+ @instrumentation_tracing.no_tracing
+ def push_test_data(dev):
+ device_root = posixpath.join(dev.GetExternalStoragePath(),
+ 'chromium_tests_root')
+ host_device_tuples_substituted = [
+ (h, local_device_test_run.SubstituteDeviceRoot(d, device_root))
+ for h, d in host_device_tuples]
+ logging.info('instrumentation data deps:')
+ for h, d in host_device_tuples_substituted:
+ logging.info('%r -> %r', h, d)
+ dev.PushChangedFiles(host_device_tuples_substituted,
+ delete_device_stale=True)
+ if not host_device_tuples_substituted:
+ dev.RunShellCommand(['rm', '-rf', device_root], check_return=True)
+ dev.RunShellCommand(['mkdir', '-p', device_root], check_return=True)
+
+ @trace_event.traced
+ def create_flag_changer(dev):
+ if self._test_instance.flags:
+ self._CreateFlagChangerIfNeeded(dev)
+ logging.debug('Attempting to set flags: %r',
+ self._test_instance.flags)
+ self._flag_changers[str(dev)].AddFlags(self._test_instance.flags)
+
+ valgrind_tools.SetChromeTimeoutScale(
+ dev, self._test_instance.timeout_scale)
+
+ steps += [
+ set_debug_app, edit_shared_prefs, push_test_data, create_flag_changer,
+ set_vega_permissions
+ ]
+
+ def bind_crash_handler(step, dev):
+ return lambda: crash_handler.RetryOnSystemCrash(step, dev)
+
+ steps = [bind_crash_handler(s, device) for s in steps]
+
+ try:
+ if self._env.concurrent_adb:
+ reraiser_thread.RunAsync(steps)
+ else:
+ for step in steps:
+ step()
+ if self._test_instance.store_tombstones:
+ tombstones.ClearAllTombstones(device)
+ except device_errors.CommandFailedError:
+ # A bugreport can be large and take a while to generate, so only capture
+ # one if we're using a remote manager.
+ if isinstance(
+ self._env.output_manager,
+ remote_output_manager.RemoteOutputManager):
+ logging.error(
+ 'Error when setting up device for tests. Taking a bugreport for '
+ 'investigation. This may take a while...')
+ report_name = '%s.bugreport' % device.serial
+ with self._env.output_manager.ArchivedTempfile(
+ report_name, 'bug_reports') as report_file:
+ device.TakeBugReport(report_file.name)
+ logging.error('Bug report saved to %s', report_file.Link())
+ raise
+
+ self._env.parallel_devices.pMap(
+ individual_device_set_up,
+ self._test_instance.GetDataDependencies())
+ if self._test_instance.wait_for_java_debugger:
+ apk = self._test_instance.apk_under_test or self._test_instance.test_apk
+ logging.warning('*' * 80)
+ logging.warning('Waiting for debugger to attach to process: %s',
+ apk.GetPackageName())
+ logging.warning('*' * 80)
+
+ #override
+ def TearDown(self):
+ # By default, teardown will invoke ADB. When receiving SIGTERM due to a
+ # timeout, there's a high probability that ADB is non-responsive. In these
+ # cases, sending an ADB command will potentially take a long time to time
+ # out. Before this happens, the process will be hard-killed for not
+ # responding to SIGTERM fast enough.
+ if self._received_sigterm:
+ return
+
+ @local_device_environment.handle_shard_failures_with(
+ self._env.BlacklistDevice)
+ @trace_event.traced
+ def individual_device_tear_down(dev):
+ if str(dev) in self._flag_changers:
+ self._flag_changers[str(dev)].Restore()
+
+ # Remove package-specific configuration
+ dev.RunShellCommand(['am', 'clear-debug-app'], check_return=True)
+
+ valgrind_tools.SetChromeTimeoutScale(dev, None)
+
+ # Restore any shared preference files that we stored during setup.
+ # This should be run sometime before the replace package contextmanager
+ # gets exited so we don't have to special case restoring files of
+ # replaced system apps.
+ for pref_to_restore in self._shared_prefs_to_restore:
+ pref_to_restore.Commit(force_commit=True)
+
+ # Context manager exit handlers are applied in reverse order
+ # of the enter handlers
+ if self._use_webview_contextmanager:
+ # See pylint-related comment above with __enter__()
+ # pylint: disable=no-member
+ self._use_webview_contextmanager.__exit__(*sys.exc_info())
+ # pylint: enable=no-member
+
+ if self._replace_package_contextmanager:
+ # See pylint-related comment above with __enter__()
+ # pylint: disable=no-member
+ self._replace_package_contextmanager.__exit__(*sys.exc_info())
+ # pylint: enable=no-member
+
+ self._env.parallel_devices.pMap(individual_device_tear_down)
+
+ def _CreateFlagChangerIfNeeded(self, device):
+ if str(device) not in self._flag_changers:
+ cmdline_file = 'test-cmdline-file'
+ if self._test_instance.use_apk_under_test_flags_file:
+ if self._test_instance.package_info:
+ cmdline_file = self._test_instance.package_info.cmdline_file
+ else:
+ logging.warning(
+ 'No PackageInfo found, falling back to using flag file %s',
+ cmdline_file)
+ self._flag_changers[str(device)] = flag_changer.FlagChanger(
+ device, cmdline_file)
+
+ #override
+ def _CreateShards(self, tests):
+ return tests
+
+ #override
+ def _GetTests(self):
+ if self._test_instance.junit4_runner_supports_listing:
+ raw_tests = self._GetTestsFromRunner()
+ tests = self._test_instance.ProcessRawTests(raw_tests)
+ else:
+ tests = self._test_instance.GetTests()
+ tests = self._ApplyExternalSharding(
+ tests, self._test_instance.external_shard_index,
+ self._test_instance.total_external_shards)
+ return tests
+
+ #override
+ def _GetUniqueTestName(self, test):
+ return instrumentation_test_instance.GetUniqueTestName(test)
+
+ #override
+ def _RunTest(self, device, test):
+ extras = {}
+
+ flags_to_add = []
+ test_timeout_scale = None
+ if self._test_instance.coverage_directory:
+ coverage_basename = '%s.ec' % ('%s_group' % test[0]['method']
+ if isinstance(test, list) else test['method'])
+ extras['coverage'] = 'true'
+ coverage_directory = os.path.join(
+ device.GetExternalStoragePath(), 'chrome', 'test', 'coverage')
+ coverage_device_file = os.path.join(
+ coverage_directory, coverage_basename)
+ extras['coverageFile'] = coverage_device_file
+ # Save screenshot if screenshot dir is specified (save locally) or if
+ # a GS bucket is passed (save in cloud).
+ screenshot_device_file = device_temp_file.DeviceTempFile(
+ device.adb, suffix='.png', dir=device.GetExternalStoragePath())
+ extras[EXTRA_SCREENSHOT_FILE] = screenshot_device_file.name
+
+ # Set up the screenshot directory. This needs to be done for each test so
+ # that we only get screenshots created by that test. It has to be on
+ # external storage since the default location doesn't allow file creation
+ # from the instrumentation test app on Android L and M.
+ ui_capture_dir = device_temp_file.NamedDeviceTemporaryDirectory(
+ device.adb,
+ dir=device.GetExternalStoragePath())
+ extras[EXTRA_UI_CAPTURE_DIR] = ui_capture_dir.name
+
+ if self._env.trace_output:
+ trace_device_file = device_temp_file.DeviceTempFile(
+ device.adb, suffix='.json', dir=device.GetExternalStoragePath())
+ extras[EXTRA_TRACE_FILE] = trace_device_file.name
+
+ if isinstance(test, list):
+ if not self._test_instance.driver_apk:
+ raise Exception('driver_apk does not exist. '
+ 'Please build it and try again.')
+ if any(t.get('is_junit4') for t in test):
+ raise Exception('driver apk does not support JUnit4 tests')
+
+ def name_and_timeout(t):
+ n = instrumentation_test_instance.GetTestName(t)
+ i = self._GetTimeoutFromAnnotations(t['annotations'], n)
+ return (n, i)
+
+ test_names, timeouts = zip(*(name_and_timeout(t) for t in test))
+
+ test_name = ','.join(test_names)
+ test_display_name = test_name
+ target = '%s/%s' % (
+ self._test_instance.driver_package,
+ self._test_instance.driver_name)
+ extras.update(
+ self._test_instance.GetDriverEnvironmentVars(
+ test_list=test_names))
+ timeout = sum(timeouts)
+ else:
+ test_name = instrumentation_test_instance.GetTestName(test)
+ test_display_name = self._GetUniqueTestName(test)
+ if test['is_junit4']:
+ target = '%s/%s' % (
+ self._test_instance.test_package,
+ self._test_instance.junit4_runner_class)
+ else:
+ target = '%s/%s' % (
+ self._test_instance.test_package,
+ self._test_instance.junit3_runner_class)
+ extras['class'] = test_name
+ if 'flags' in test and test['flags']:
+ flags_to_add.extend(test['flags'])
+ timeout = self._GetTimeoutFromAnnotations(
+ test['annotations'], test_display_name)
+
+ test_timeout_scale = self._GetTimeoutScaleFromAnnotations(
+ test['annotations'])
+ if test_timeout_scale and test_timeout_scale != 1:
+ valgrind_tools.SetChromeTimeoutScale(
+ device, test_timeout_scale * self._test_instance.timeout_scale)
+
+ if self._test_instance.wait_for_java_debugger:
+ timeout = None
+ logging.info('preparing to run %s: %s', test_display_name, test)
+
+ render_tests_device_output_dir = None
+ if _IsRenderTest(test):
+ # TODO(mikecase): Add DeviceTempDirectory class and use that instead.
+ render_tests_device_output_dir = posixpath.join(
+ device.GetExternalStoragePath(),
+ 'render_test_output_dir')
+ flags_to_add.append('--render-test-output-dir=%s' %
+ render_tests_device_output_dir)
+
+ if flags_to_add:
+ self._CreateFlagChangerIfNeeded(device)
+ self._flag_changers[str(device)].PushFlags(add=flags_to_add)
+
+ time_ms = lambda: int(time.time() * 1e3)
+ start_ms = time_ms()
+
+ stream_name = 'logcat_%s_%s_%s' % (
+ test_name.replace('#', '.'),
+ time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()),
+ device.serial)
+
+ with ui_capture_dir:
+ with self._env.output_manager.ArchivedTempfile(
+ stream_name, 'logcat') as logcat_file:
+ try:
+ with logcat_monitor.LogcatMonitor(
+ device.adb,
+ filter_specs=local_device_environment.LOGCAT_FILTERS,
+ output_file=logcat_file.name,
+ transform_func=self._test_instance.MaybeDeobfuscateLines
+ ) as logmon:
+ with _LogTestEndpoints(device, test_name):
+ with contextlib_ext.Optional(
+ trace_event.trace(test_name),
+ self._env.trace_output):
+ output = device.StartInstrumentation(
+ target, raw=True, extras=extras, timeout=timeout, retries=0)
+ finally:
+ logmon.Close()
+
+ if logcat_file.Link():
+ logging.info('Logcat saved to %s', logcat_file.Link())
+
+ duration_ms = time_ms() - start_ms
+
+ with contextlib_ext.Optional(
+ trace_event.trace('ProcessResults'),
+ self._env.trace_output):
+ output = self._test_instance.MaybeDeobfuscateLines(output)
+ # TODO(jbudorick): Make instrumentation tests output a JSON so this
+ # doesn't have to parse the output.
+ result_code, result_bundle, statuses = (
+ self._test_instance.ParseAmInstrumentRawOutput(output))
+ results = self._test_instance.GenerateTestResults(
+ result_code, result_bundle, statuses, start_ms, duration_ms,
+ device.product_cpu_abi, self._test_instance.symbolizer)
+
+ if self._env.trace_output:
+ self._SaveTraceData(trace_device_file, device, test['class'])
+
+ def restore_flags():
+ if flags_to_add:
+ self._flag_changers[str(device)].Restore()
+
+ def restore_timeout_scale():
+ if test_timeout_scale:
+ valgrind_tools.SetChromeTimeoutScale(
+ device, self._test_instance.timeout_scale)
+
+ def handle_coverage_data():
+ if self._test_instance.coverage_directory:
+ device.PullFile(coverage_directory,
+ self._test_instance.coverage_directory)
+ device.RunShellCommand(
+ 'rm -f %s' % posixpath.join(coverage_directory, '*'),
+ check_return=True, shell=True)
+
+ def handle_render_test_data():
+ if _IsRenderTest(test):
+ # Render tests do not cause test failure by default. So we have to
+ # check to see if any failure images were generated even if the test
+ # does not fail.
+ try:
+ self._ProcessRenderTestResults(
+ device, render_tests_device_output_dir, results)
+ finally:
+ device.RemovePath(render_tests_device_output_dir,
+ recursive=True, force=True)
+
+ def pull_ui_screen_captures():
+ screenshots = []
+ for filename in device.ListDirectory(ui_capture_dir.name):
+ if filename.endswith('.json'):
+ screenshots.append(pull_ui_screenshot(filename))
+ if screenshots:
+ json_archive_name = 'ui_capture_%s_%s.json' % (
+ test_name.replace('#', '.'),
+ time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()))
+ with self._env.output_manager.ArchivedTempfile(
+ json_archive_name, 'ui_capture', output_manager.Datatype.JSON
+ ) as json_archive:
+ json.dump(screenshots, json_archive)
+ for result in results:
+ result.SetLink('ui screenshot', json_archive.Link())
+
+ def pull_ui_screenshot(filename):
+ source_dir = ui_capture_dir.name
+ json_path = posixpath.join(source_dir, filename)
+ json_data = json.loads(device.ReadFile(json_path))
+ image_file_path = posixpath.join(source_dir, json_data['location'])
+ with self._env.output_manager.ArchivedTempfile(
+ json_data['location'], 'ui_capture', output_manager.Datatype.PNG
+ ) as image_archive:
+ device.PullFile(image_file_path, image_archive.name)
+ json_data['image_link'] = image_archive.Link()
+ return json_data
+
+ # While constructing the TestResult objects, we can parallelize several
+ # steps that involve ADB. These steps should NOT depend on any info in
+ # the results! Things such as whether the test CRASHED have not yet been
+ # determined.
+ post_test_steps = [restore_flags, restore_timeout_scale,
+ handle_coverage_data, handle_render_test_data,
+ pull_ui_screen_captures]
+ if self._env.concurrent_adb:
+ post_test_step_thread_group = reraiser_thread.ReraiserThreadGroup(
+ reraiser_thread.ReraiserThread(f) for f in post_test_steps)
+ post_test_step_thread_group.StartAll(will_block=True)
+ else:
+ for step in post_test_steps:
+ step()
+
+ for result in results:
+ if logcat_file:
+ result.SetLink('logcat', logcat_file.Link())
+
+ # Update the result name if the test used flags.
+ if flags_to_add:
+ for r in results:
+ if r.GetName() == test_name:
+ r.SetName(test_display_name)
+
+ # Add UNKNOWN results for any missing tests.
+ iterable_test = test if isinstance(test, list) else [test]
+ test_names = set(self._GetUniqueTestName(t) for t in iterable_test)
+ results_names = set(r.GetName() for r in results)
+ results.extend(
+ base_test_result.BaseTestResult(u, base_test_result.ResultType.UNKNOWN)
+ for u in test_names.difference(results_names))
+
+ # Update the result type if we detect a crash.
+ try:
+ if DidPackageCrashOnDevice(self._test_instance.test_package, device):
+ for r in results:
+ if r.GetType() == base_test_result.ResultType.UNKNOWN:
+ r.SetType(base_test_result.ResultType.CRASH)
+ except device_errors.CommandTimeoutError:
+ logging.warning('timed out when detecting/dismissing error dialogs')
+ # Attach screenshot to the test to help with debugging the dialog boxes.
+ self._SaveScreenshot(device, screenshot_device_file, test_display_name,
+ results, 'dialog_box_screenshot')
+
+ # Handle failures by:
+ # - optionally taking a screenshot
+ # - logging the raw output at INFO level
+ # - clearing the application state while persisting permissions
+ if any(r.GetType() not in (base_test_result.ResultType.PASS,
+ base_test_result.ResultType.SKIP)
+ for r in results):
+ self._SaveScreenshot(device, screenshot_device_file, test_display_name,
+ results, 'post_test_screenshot')
+
+ logging.info('detected failure in %s. raw output:', test_display_name)
+ for l in output:
+ logging.info(' %s', l)
+ if (not self._env.skip_clear_data
+ and self._test_instance.package_info):
+ permissions = (
+ self._test_instance.apk_under_test.GetPermissions()
+ if self._test_instance.apk_under_test
+ else None)
+ device.ClearApplicationState(self._test_instance.package_info.package,
+ permissions=permissions)
+ else:
+ logging.debug('raw output from %s:', test_display_name)
+ for l in output:
+ logging.debug(' %s', l)
+ if self._test_instance.store_tombstones:
+ tombstones_url = None
+ for result in results:
+ if result.GetType() == base_test_result.ResultType.CRASH:
+ if not tombstones_url:
+ resolved_tombstones = tombstones.ResolveTombstones(
+ device,
+ resolve_all_tombstones=True,
+ include_stack_symbols=False,
+ wipe_tombstones=True,
+ tombstone_symbolizer=self._test_instance.symbolizer)
+ tombstone_filename = 'tombstones_%s_%s' % (
+ time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()),
+ device.serial)
+ with self._env.output_manager.ArchivedTempfile(
+ tombstone_filename, 'tombstones') as tombstone_file:
+ tombstone_file.write('\n'.join(resolved_tombstones))
+ result.SetLink('tombstones', tombstone_file.Link())
+ if self._env.concurrent_adb:
+ post_test_step_thread_group.JoinAll()
+ return results, None
+
+ def _GetTestsFromRunner(self):
+ test_apk_path = self._test_instance.test_apk.path
+ pickle_path = '%s-runner.pickle' % test_apk_path
+ # For incremental APKs, the code doesn't live in the apk, so instead check
+ # the timestamp of the target's .stamp file.
+ if self._test_instance.test_apk_incremental_install_json:
+ with open(self._test_instance.test_apk_incremental_install_json) as f:
+ data = json.load(f)
+ out_dir = constants.GetOutDirectory()
+ test_mtime = max(
+ os.path.getmtime(os.path.join(out_dir, p)) for p in data['dex_files'])
+ else:
+ test_mtime = os.path.getmtime(test_apk_path)
+
+ try:
+ return instrumentation_test_instance.GetTestsFromPickle(
+ pickle_path, test_mtime)
+ except instrumentation_test_instance.TestListPickleException as e:
+ logging.info('Could not get tests from pickle: %s', e)
+ logging.info('Getting tests by having %s list them.',
+ self._test_instance.junit4_runner_class)
+ def list_tests(d):
+ def _run(dev):
+ with device_temp_file.DeviceTempFile(
+ dev.adb, suffix='.json',
+ dir=dev.GetExternalStoragePath()) as dev_test_list_json:
+ junit4_runner_class = self._test_instance.junit4_runner_class
+ test_package = self._test_instance.test_package
+ extras = {
+ 'log': 'true',
+ # Workaround for https://github.com/mockito/mockito/issues/922
+ 'notPackage': 'net.bytebuddy',
+ }
+ extras[_EXTRA_TEST_LIST] = dev_test_list_json.name
+ target = '%s/%s' % (test_package, junit4_runner_class)
+ timeout = 120
+ if self._test_instance.wait_for_java_debugger:
+ timeout = None
+ test_list_run_output = dev.StartInstrumentation(
+ target, extras=extras, retries=0, timeout=timeout)
+ if any(test_list_run_output):
+ logging.error('Unexpected output while listing tests:')
+ for line in test_list_run_output:
+ logging.error(' %s', line)
+ with tempfile_ext.NamedTemporaryDirectory() as host_dir:
+ host_file = os.path.join(host_dir, 'list_tests.json')
+ dev.PullFile(dev_test_list_json.name, host_file)
+ with open(host_file, 'r') as host_file:
+ return json.load(host_file)
+
+ return crash_handler.RetryOnSystemCrash(_run, d)
+
+ raw_test_lists = self._env.parallel_devices.pMap(list_tests).pGet(None)
+
+ # If all devices failed to list tests, raise an exception.
+ # Check that tl is not None and is not empty.
+ if all(not tl for tl in raw_test_lists):
+ raise device_errors.CommandFailedError(
+ 'Failed to list tests on any device')
+
+ # Get the first viable list of raw tests
+ raw_tests = [tl for tl in raw_test_lists if tl][0]
+
+ instrumentation_test_instance.SaveTestsToPickle(pickle_path, raw_tests)
+ return raw_tests
+
+ def _SaveTraceData(self, trace_device_file, device, test_class):
+ trace_host_file = self._env.trace_output
+
+ if device.FileExists(trace_device_file.name):
+ try:
+ java_trace_json = device.ReadFile(trace_device_file.name)
+ except IOError:
+ raise Exception('error pulling trace file from device')
+ finally:
+ trace_device_file.close()
+
+ process_name = '%s (device %s)' % (test_class, device.serial)
+ process_hash = int(hashlib.md5(process_name).hexdigest()[:6], 16)
+
+ java_trace = json.loads(java_trace_json)
+ java_trace.sort(key=lambda event: event['ts'])
+
+ get_date_command = 'echo $EPOCHREALTIME'
+ device_time = device.RunShellCommand(get_date_command, single_line=True)
+ device_time = float(device_time) * 1e6
+ system_time = trace_time.Now()
+ time_difference = system_time - device_time
+
+ threads_to_add = set()
+ for event in java_trace:
+ # Ensure thread ID and thread name will be linked in the metadata.
+ threads_to_add.add((event['tid'], event['name']))
+
+ event['pid'] = process_hash
+
+ # Adjust time stamp to align with Python trace times (from
+ # trace_time.Now()).
+ event['ts'] += time_difference
+
+ for tid, thread_name in threads_to_add:
+ thread_name_metadata = {'pid': process_hash, 'tid': tid,
+ 'ts': 0, 'ph': 'M', 'cat': '__metadata',
+ 'name': 'thread_name',
+ 'args': {'name': thread_name}}
+ java_trace.append(thread_name_metadata)
+
+ process_name_metadata = {'pid': process_hash, 'tid': 0, 'ts': 0,
+ 'ph': 'M', 'cat': '__metadata',
+ 'name': 'process_name',
+ 'args': {'name': process_name}}
+ java_trace.append(process_name_metadata)
+
+ java_trace_json = json.dumps(java_trace)
+ java_trace_json = java_trace_json.rstrip(' ]')
+
+ with open(trace_host_file, 'r') as host_handle:
+ host_contents = host_handle.readline()
+
+ if host_contents:
+ java_trace_json = ',%s' % java_trace_json.lstrip(' [')
+
+ with open(trace_host_file, 'a') as host_handle:
+ host_handle.write(java_trace_json)
+
+ def _SaveScreenshot(self, device, screenshot_device_file, test_name, results,
+ link_name):
+ screenshot_filename = '%s-%s.png' % (
+ test_name, time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()))
+ if device.FileExists(screenshot_device_file.name):
+ with self._env.output_manager.ArchivedTempfile(
+ screenshot_filename, 'screenshot',
+ output_manager.Datatype.PNG) as screenshot_host_file:
+ try:
+ device.PullFile(screenshot_device_file.name,
+ screenshot_host_file.name)
+ finally:
+ screenshot_device_file.close()
+ for result in results:
+ result.SetLink(link_name, screenshot_host_file.Link())
+
+ def _ProcessRenderTestResults(
+ self, device, render_tests_device_output_dir, results):
+
+ failure_images_device_dir = posixpath.join(
+ render_tests_device_output_dir, 'failures')
+ if not device.FileExists(failure_images_device_dir):
+ return
+
+ diff_images_device_dir = posixpath.join(
+ render_tests_device_output_dir, 'diffs')
+
+ golden_images_device_dir = posixpath.join(
+ render_tests_device_output_dir, 'goldens')
+
+ for failure_filename in device.ListDirectory(failure_images_device_dir):
+
+ with self._env.output_manager.ArchivedTempfile(
+ 'fail_%s' % failure_filename, 'render_tests',
+ output_manager.Datatype.PNG) as failure_image_host_file:
+ device.PullFile(
+ posixpath.join(failure_images_device_dir, failure_filename),
+ failure_image_host_file.name)
+ failure_link = failure_image_host_file.Link()
+
+ golden_image_device_file = posixpath.join(
+ golden_images_device_dir, failure_filename)
+ if device.PathExists(golden_image_device_file):
+ with self._env.output_manager.ArchivedTempfile(
+ 'golden_%s' % failure_filename, 'render_tests',
+ output_manager.Datatype.PNG) as golden_image_host_file:
+ device.PullFile(
+ golden_image_device_file, golden_image_host_file.name)
+ golden_link = golden_image_host_file.Link()
+ else:
+ golden_link = ''
+
+ diff_image_device_file = posixpath.join(
+ diff_images_device_dir, failure_filename)
+ if device.PathExists(diff_image_device_file):
+ with self._env.output_manager.ArchivedTempfile(
+ 'diff_%s' % failure_filename, 'render_tests',
+ output_manager.Datatype.PNG) as diff_image_host_file:
+ device.PullFile(
+ diff_image_device_file, diff_image_host_file.name)
+ diff_link = diff_image_host_file.Link()
+ else:
+ diff_link = ''
+
+ jinja2_env = jinja2.Environment(
+ loader=jinja2.FileSystemLoader(_JINJA_TEMPLATE_DIR),
+ trim_blocks=True)
+ template = jinja2_env.get_template(_JINJA_TEMPLATE_FILENAME)
+ # pylint: disable=no-member
+ processed_template_output = template.render(
+ test_name=failure_filename,
+ failure_link=failure_link,
+ golden_link=golden_link,
+ diff_link=diff_link)
+
+ with self._env.output_manager.ArchivedTempfile(
+ '%s.html' % failure_filename, 'render_tests',
+ output_manager.Datatype.HTML) as html_results:
+ html_results.write(processed_template_output)
+ html_results.flush()
+ for result in results:
+ result.SetLink(failure_filename, html_results.Link())
+
+ #override
+ def _ShouldRetry(self, test, result):
+ # We've tried to disable retries in the past with mixed results.
+ # See crbug.com/619055 for historical context and crbug.com/797002
+ # for ongoing efforts.
+ del test, result
+ return True
+
+ #override
+ def _ShouldShard(self):
+ return True
+
+ @classmethod
+ def _GetTimeoutScaleFromAnnotations(cls, annotations):
+ try:
+ return int(annotations.get('TimeoutScale', {}).get('value', 1))
+ except ValueError as e:
+ logging.warning("Non-integer value of TimeoutScale ignored. (%s)", str(e))
+ return 1
+
+ @classmethod
+ def _GetTimeoutFromAnnotations(cls, annotations, test_name):
+ for k, v in TIMEOUT_ANNOTATIONS:
+ if k in annotations:
+ timeout = v
+ break
+ else:
+ logging.warning('Using default 1 minute timeout for %s', test_name)
+ timeout = 60
+
+ timeout *= cls._GetTimeoutScaleFromAnnotations(annotations)
+
+ return timeout
+
+
+def _IsRenderTest(test):
+ """Determines if a test or list of tests has a RenderTest amongst them."""
+ if not isinstance(test, list):
+ test = [test]
+ return any([RENDER_TEST_FEATURE_ANNOTATION in t['annotations'].get(
+ FEATURE_ANNOTATION, {}).get('value', ()) for t in test])
diff --git a/deps/v8/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py b/deps/v8/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py
new file mode 100755
index 0000000000..fb96ee6bbd
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_instrumentation_test_run_test.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env vpython
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for local_device_instrumentation_test_run."""
+
+# pylint: disable=protected-access
+
+import unittest
+
+from pylib.base import base_test_result
+from pylib.base import mock_environment
+from pylib.base import mock_test_instance
+from pylib.local.device import local_device_instrumentation_test_run
+
+class LocalDeviceInstrumentationTestRunTest(unittest.TestCase):
+
+ # TODO(crbug.com/797002): Decide whether the _ShouldRetry hook is worth
+ # retaining and remove these tests if not.
+
+ def testShouldRetry_failure(self):
+ env = mock_environment.MockEnvironment()
+ ti = mock_test_instance.MockTestInstance()
+ obj = (local_device_instrumentation_test_run
+ .LocalDeviceInstrumentationTestRun(env, ti))
+ test = {
+ 'annotations': {},
+ 'class': 'SadTest',
+ 'method': 'testFailure',
+ 'is_junit4': True,
+ }
+ result = base_test_result.BaseTestResult(
+ 'SadTest.testFailure', base_test_result.ResultType.FAIL)
+ self.assertTrue(obj._ShouldRetry(test, result))
+
+ def testShouldRetry_retryOnFailure(self):
+ env = mock_environment.MockEnvironment()
+ ti = mock_test_instance.MockTestInstance()
+ obj = (local_device_instrumentation_test_run
+ .LocalDeviceInstrumentationTestRun(env, ti))
+ test = {
+ 'annotations': {'RetryOnFailure': None},
+ 'class': 'SadTest',
+ 'method': 'testRetryOnFailure',
+ 'is_junit4': True,
+ }
+ result = base_test_result.BaseTestResult(
+ 'SadTest.testRetryOnFailure', base_test_result.ResultType.FAIL)
+ self.assertTrue(obj._ShouldRetry(test, result))
+
+ def testShouldRetry_notRun(self):
+ env = mock_environment.MockEnvironment()
+ ti = mock_test_instance.MockTestInstance()
+ obj = (local_device_instrumentation_test_run
+ .LocalDeviceInstrumentationTestRun(env, ti))
+ test = {
+ 'annotations': {},
+ 'class': 'SadTest',
+ 'method': 'testNotRun',
+ 'is_junit4': True,
+ }
+ result = base_test_result.BaseTestResult(
+ 'SadTest.testNotRun', base_test_result.ResultType.NOTRUN)
+ self.assertTrue(obj._ShouldRetry(test, result))
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/deps/v8/build/android/pylib/local/device/local_device_linker_test_run.py b/deps/v8/build/android/pylib/local/device/local_device_linker_test_run.py
new file mode 100644
index 0000000000..2a1520e003
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_linker_test_run.py
@@ -0,0 +1,75 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import sys
+import traceback
+
+from pylib.base import base_test_result
+from pylib.linker import test_case
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_test_run
+
+
+class LinkerExceptionTestResult(base_test_result.BaseTestResult):
+ """Test result corresponding to a python exception in a host-custom test."""
+
+ def __init__(self, test_name, exc_info):
+ """Constructs a LinkerExceptionTestResult object.
+
+ Args:
+ test_name: name of the test which raised an exception.
+ exc_info: exception info, ostensibly from sys.exc_info().
+ """
+ exc_type, exc_value, exc_traceback = exc_info
+ trace_info = ''.join(traceback.format_exception(exc_type, exc_value,
+ exc_traceback))
+ log_msg = 'Exception:\n' + trace_info
+
+ super(LinkerExceptionTestResult, self).__init__(
+ test_name,
+ base_test_result.ResultType.FAIL,
+ log="%s %s" % (exc_type, log_msg))
+
+
+class LocalDeviceLinkerTestRun(local_device_test_run.LocalDeviceTestRun):
+
+ def _CreateShards(self, tests):
+ return tests
+
+ def _GetTests(self):
+ return self._test_instance.GetTests()
+
+ def _GetUniqueTestName(self, test):
+ return test.qualified_name
+
+ def _RunTest(self, device, test):
+ assert isinstance(test, test_case.LinkerTestCaseBase)
+
+ try:
+ result = test.Run(device)
+ except Exception: # pylint: disable=broad-except
+ logging.exception('Caught exception while trying to run test: ' +
+ test.tagged_name)
+ exc_info = sys.exc_info()
+ result = LinkerExceptionTestResult(test.tagged_name, exc_info)
+
+ return result, None
+
+ def SetUp(self):
+ @local_device_environment.handle_shard_failures_with(
+ on_failure=self._env.BlacklistDevice)
+ def individual_device_set_up(dev):
+ dev.Install(self._test_instance.test_apk)
+
+ self._env.parallel_devices.pMap(individual_device_set_up)
+
+ def _ShouldShard(self):
+ return True
+
+ def TearDown(self):
+ pass
+
+ def TestPackage(self):
+ pass
diff --git a/deps/v8/build/android/pylib/local/device/local_device_monkey_test_run.py b/deps/v8/build/android/pylib/local/device/local_device_monkey_test_run.py
new file mode 100644
index 0000000000..fe178c8fdb
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_monkey_test_run.py
@@ -0,0 +1,126 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from devil.android import device_errors
+from devil.android.sdk import intent
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.local.device import local_device_test_run
+
+
+_CHROME_PACKAGE = constants.PACKAGE_INFO['chrome'].package
+
+class LocalDeviceMonkeyTestRun(local_device_test_run.LocalDeviceTestRun):
+ def __init__(self, env, test_instance):
+ super(LocalDeviceMonkeyTestRun, self).__init__(env, test_instance)
+
+ def TestPackage(self):
+ return 'monkey'
+
+ #override
+ def SetUp(self):
+ pass
+
+ #override
+ def _RunTest(self, device, test):
+ device.ClearApplicationState(self._test_instance.package)
+
+ # Chrome crashes are not always caught by Monkey test runner.
+ # Launch Chrome and verify Chrome has the same PID before and after
+ # the test.
+ device.StartActivity(
+ intent.Intent(package=self._test_instance.package,
+ activity=self._test_instance.activity,
+ action='android.intent.action.MAIN'),
+ blocking=True, force_stop=True)
+ before_pids = device.GetPids(self._test_instance.package)
+
+ output = ''
+ if before_pids:
+ if len(before_pids.get(self._test_instance.package, [])) > 1:
+ raise Exception(
+ 'At most one instance of process %s expected but found pids: '
+ '%s' % (self._test_instance.package, before_pids))
+ output = '\n'.join(self._LaunchMonkeyTest(device))
+ after_pids = device.GetPids(self._test_instance.package)
+
+ crashed = True
+ if not self._test_instance.package in before_pids:
+ logging.error('Failed to start the process.')
+ elif not self._test_instance.package in after_pids:
+ logging.error('Process %s has died.',
+ before_pids[self._test_instance.package])
+ elif (before_pids[self._test_instance.package] !=
+ after_pids[self._test_instance.package]):
+ logging.error('Detected process restart %s -> %s',
+ before_pids[self._test_instance.package],
+ after_pids[self._test_instance.package])
+ else:
+ crashed = False
+
+ success_pattern = 'Events injected: %d' % self._test_instance.event_count
+ if success_pattern in output and not crashed:
+ result = base_test_result.BaseTestResult(
+ test, base_test_result.ResultType.PASS, log=output)
+ else:
+ result = base_test_result.BaseTestResult(
+ test, base_test_result.ResultType.FAIL, log=output)
+ if 'chrome' in self._test_instance.package:
+ logging.warning('Starting MinidumpUploadService...')
+ # TODO(jbudorick): Update this after upstreaming.
+ minidump_intent = intent.Intent(
+ action='%s.crash.ACTION_FIND_ALL' % _CHROME_PACKAGE,
+ package=self._test_instance.package,
+ activity='%s.crash.MinidumpUploadService' % _CHROME_PACKAGE)
+ try:
+ device.RunShellCommand(
+ ['am', 'startservice'] + minidump_intent.am_args,
+ as_root=True, check_return=True)
+ except device_errors.CommandFailedError:
+ logging.exception('Failed to start MinidumpUploadService')
+
+ return result, None
+
+ #override
+ def TearDown(self):
+ pass
+
+ #override
+ def _CreateShards(self, tests):
+ return tests
+
+ #override
+ def _ShouldShard(self):
+ # TODO(mikecase): Run Monkey test concurrently on each attached device.
+ return False
+
+ #override
+ def _GetTests(self):
+ return ['MonkeyTest']
+
+ def _LaunchMonkeyTest(self, device):
+ try:
+ cmd = ['monkey',
+ '-p', self._test_instance.package,
+ '--throttle', str(self._test_instance.throttle),
+ '-s', str(self._test_instance.seed),
+ '--monitor-native-crashes',
+ '--kill-process-after-error']
+ for category in self._test_instance.categories:
+ cmd.extend(['-c', category])
+ for _ in range(self._test_instance.verbose_count):
+ cmd.append('-v')
+ cmd.append(str(self._test_instance.event_count))
+ return device.RunShellCommand(
+ cmd, timeout=self._test_instance.timeout, check_return=True)
+ finally:
+ try:
+ # Kill the monkey test process on the device. If you manually
+ # interrupt the test run, this will prevent the monkey test from
+ # continuing to run.
+ device.KillAll('com.android.commands.monkey')
+ except device_errors.CommandFailedError:
+ pass
diff --git a/deps/v8/build/android/pylib/local/device/local_device_perf_test_run.py b/deps/v8/build/android/pylib/local/device/local_device_perf_test_run.py
new file mode 100644
index 0000000000..bc828408a0
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_perf_test_run.py
@@ -0,0 +1,538 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import io
+import json
+import logging
+import os
+import pickle
+import shutil
+import tempfile
+import threading
+import time
+import zipfile
+
+from devil.android import battery_utils
+from devil.android import device_errors
+from devil.android import device_list
+from devil.android import device_utils
+from devil.android import forwarder
+from devil.android.tools import device_recovery
+from devil.android.tools import device_status
+from devil.utils import cmd_helper
+from devil.utils import parallelizer
+from devil.utils import reraiser_thread
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.constants import host_paths
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_test_run
+from py_trace_event import trace_event
+from py_utils import contextlib_ext
+
+
+class HeartBeat(object):
+
+ def __init__(self, shard, wait_time=60*10):
+ """ HeartBeat Logger constructor.
+
+ Args:
+ shard: A perf test runner device shard.
+ wait_time: time to wait between heartbeat messages.
+ """
+ self._shard = shard
+ self._running = False
+ self._timer = None
+ self._wait_time = wait_time
+
+ def Start(self):
+ if not self._running:
+ self._timer = threading.Timer(self._wait_time, self._LogMessage)
+ self._timer.start()
+ self._running = True
+
+ def Stop(self):
+ if self._running:
+ self._timer.cancel()
+ self._running = False
+
+ def _LogMessage(self):
+ logging.info('Currently working on test %s', self._shard.current_test)
+ self._timer = threading.Timer(self._wait_time, self._LogMessage)
+ self._timer.start()
+
+
+class TestShard(object):
+ def __init__(self, env, test_instance, tests, retries=3, timeout=None):
+ logging.info('Create shard for the following tests:')
+ for t in tests:
+ logging.info(' %s', t)
+ self._current_test = None
+ self._env = env
+ self._heart_beat = HeartBeat(self)
+ self._index = None
+ self._output_dir = None
+ self._retries = retries
+ self._test_instance = test_instance
+ self._tests = tests
+ self._timeout = timeout
+
+ def _TestSetUp(self, test):
+ if (self._test_instance.collect_chartjson_data
+ or self._tests[test].get('archive_output_dir')):
+ self._output_dir = tempfile.mkdtemp()
+
+ self._current_test = test
+ self._heart_beat.Start()
+
+ def _RunSingleTest(self, test):
+ self._test_instance.WriteBuildBotJson(self._output_dir)
+
+ timeout = self._tests[test].get('timeout', self._timeout)
+ cmd = self._CreateCmd(test)
+ cwd = os.path.abspath(host_paths.DIR_SOURCE_ROOT)
+
+ self._LogTest(test, cmd, timeout)
+
+ try:
+ start_time = time.time()
+
+ with contextlib_ext.Optional(
+ trace_event.trace(test),
+ self._env.trace_output):
+ exit_code, output = cmd_helper.GetCmdStatusAndOutputWithTimeout(
+ cmd, timeout, cwd=cwd, shell=True)
+ end_time = time.time()
+ chart_json_output = self._test_instance.ReadChartjsonOutput(
+ self._output_dir)
+ if exit_code == 0:
+ result_type = base_test_result.ResultType.PASS
+ else:
+ result_type = base_test_result.ResultType.FAIL
+ except cmd_helper.TimeoutError as e:
+ end_time = time.time()
+ exit_code = -1
+ output = e.output
+ chart_json_output = ''
+ result_type = base_test_result.ResultType.TIMEOUT
+ return self._ProcessTestResult(test, cmd, start_time, end_time, exit_code,
+ output, chart_json_output, result_type)
+
+ def _CreateCmd(self, test):
+ cmd = []
+ if self._test_instance.dry_run:
+ cmd.append('echo')
+ cmd.append(self._tests[test]['cmd'])
+ if self._output_dir:
+ cmd.append('--output-dir=%s' % self._output_dir)
+ return ' '.join(self._ExtendCmd(cmd))
+
+ def _ExtendCmd(self, cmd): # pylint: disable=no-self-use
+ return cmd
+
+ def _LogTest(self, _test, _cmd, _timeout):
+ raise NotImplementedError
+
+ def _LogTestExit(self, test, exit_code, duration):
+ # pylint: disable=no-self-use
+ logging.info('%s : exit_code=%d in %d secs.', test, exit_code, duration)
+
+ def _ExtendPersistedResult(self, persisted_result):
+ raise NotImplementedError
+
+ def _ProcessTestResult(self, test, cmd, start_time, end_time, exit_code,
+ output, chart_json_output, result_type):
+ if exit_code is None:
+ exit_code = -1
+
+ self._LogTestExit(test, exit_code, end_time - start_time)
+
+ archive_bytes = (self._ArchiveOutputDir()
+ if self._tests[test].get('archive_output_dir')
+ else None)
+ persisted_result = {
+ 'name': test,
+ 'output': [output],
+ 'chartjson': chart_json_output,
+ 'archive_bytes': archive_bytes,
+ 'exit_code': exit_code,
+ 'result_type': result_type,
+ 'start_time': start_time,
+ 'end_time': end_time,
+ 'total_time': end_time - start_time,
+ 'cmd': cmd,
+ }
+ self._ExtendPersistedResult(persisted_result)
+ self._SaveResult(persisted_result)
+ return result_type
+
+ def _ArchiveOutputDir(self):
+ """Archive all files in the output dir, and return as compressed bytes."""
+ with io.BytesIO() as archive:
+ with zipfile.ZipFile(archive, 'w', zipfile.ZIP_DEFLATED) as contents:
+ num_files = 0
+ for absdir, _, files in os.walk(self._output_dir):
+ reldir = os.path.relpath(absdir, self._output_dir)
+ for filename in files:
+ src_path = os.path.join(absdir, filename)
+ # We use normpath to turn './file.txt' into just 'file.txt'.
+ dst_path = os.path.normpath(os.path.join(reldir, filename))
+ contents.write(src_path, dst_path)
+ num_files += 1
+ if num_files:
+ logging.info('%d files in the output dir were archived.', num_files)
+ else:
+ logging.warning('No files in the output dir. Archive is empty.')
+ return archive.getvalue()
+
+ @staticmethod
+ def _SaveResult(result):
+ pickled = os.path.join(constants.PERF_OUTPUT_DIR, result['name'])
+ if os.path.exists(pickled):
+ with file(pickled, 'r') as f:
+ previous = pickle.load(f)
+ result['output'] = previous['output'] + result['output']
+ with file(pickled, 'w') as f:
+ pickle.dump(result, f)
+
+ def _TestTearDown(self):
+ if self._output_dir:
+ shutil.rmtree(self._output_dir, ignore_errors=True)
+ self._output_dir = None
+ self._heart_beat.Stop()
+ self._current_test = None
+
+ @property
+ def current_test(self):
+ return self._current_test
+
+
+class DeviceTestShard(TestShard):
+ def __init__(
+ self, env, test_instance, device, index, tests, retries=3, timeout=None):
+ super(DeviceTestShard, self).__init__(
+ env, test_instance, tests, retries, timeout)
+ self._battery = battery_utils.BatteryUtils(device) if device else None
+ self._device = device
+ self._index = index
+
+ @local_device_environment.handle_shard_failures
+ def RunTestsOnShard(self):
+ results = base_test_result.TestRunResults()
+ for test in self._tests:
+ tries_left = self._retries
+ result_type = None
+ while (result_type != base_test_result.ResultType.PASS
+ and tries_left > 0):
+ try:
+ self._TestSetUp(test)
+ result_type = self._RunSingleTest(test)
+ except device_errors.CommandTimeoutError:
+ result_type = base_test_result.ResultType.TIMEOUT
+ except (device_errors.CommandFailedError,
+ device_errors.DeviceUnreachableError):
+ logging.exception('Exception when executing %s.', test)
+ result_type = base_test_result.ResultType.FAIL
+ finally:
+ self._TestTearDown()
+ if result_type != base_test_result.ResultType.PASS:
+ try:
+ device_recovery.RecoverDevice(self._device, self._env.blacklist)
+ except device_errors.CommandTimeoutError:
+ logging.exception(
+ 'Device failed to recover after failing %s.', test)
+ tries_left -= 1
+
+ results.AddResult(base_test_result.BaseTestResult(test, result_type))
+ return results
+
+ def _LogTestExit(self, test, exit_code, duration):
+ logging.info('%s : exit_code=%d in %d secs on device %s',
+ test, exit_code, duration, str(self._device))
+
+ @trace_event.traced
+ def _TestSetUp(self, test):
+ if not self._device.IsOnline():
+ msg = 'Device %s is unresponsive.' % str(self._device)
+ raise device_errors.DeviceUnreachableError(msg)
+
+ logging.info('Charge level: %s%%',
+ str(self._battery.GetBatteryInfo().get('level')))
+ if self._test_instance.min_battery_level:
+ self._battery.ChargeDeviceToLevel(self._test_instance.min_battery_level)
+
+ logging.info('temperature: %s (0.1 C)',
+ str(self._battery.GetBatteryInfo().get('temperature')))
+ if self._test_instance.max_battery_temp:
+ self._battery.LetBatteryCoolToTemperature(
+ self._test_instance.max_battery_temp)
+
+ if not self._device.IsScreenOn():
+ self._device.SetScreen(True)
+
+ super(DeviceTestShard, self)._TestSetUp(test)
+
+ def _LogTest(self, test, cmd, timeout):
+ logging.debug("Running %s with command '%s' on shard %s with timeout %d",
+ test, cmd, str(self._index), timeout)
+
+ def _ExtendCmd(self, cmd):
+ cmd.extend(['--device=%s' % str(self._device)])
+ return cmd
+
+ def _ExtendPersistedResult(self, persisted_result):
+ persisted_result['host_test'] = False
+ persisted_result['device'] = str(self._device)
+
+ @trace_event.traced
+ def _TestTearDown(self):
+ try:
+ logging.info('Unmapping device ports for %s.', self._device)
+ forwarder.Forwarder.UnmapAllDevicePorts(self._device)
+ except Exception: # pylint: disable=broad-except
+ logging.exception('Exception when resetting ports.')
+ finally:
+ super(DeviceTestShard, self)._TestTearDown()
+
+class HostTestShard(TestShard):
+ def __init__(self, env, test_instance, tests, retries=3, timeout=None):
+ super(HostTestShard, self).__init__(
+ env, test_instance, tests, retries, timeout)
+
+ @local_device_environment.handle_shard_failures
+ def RunTestsOnShard(self):
+ results = base_test_result.TestRunResults()
+ for test in self._tests:
+ tries_left = self._retries + 1
+ result_type = None
+ while (result_type != base_test_result.ResultType.PASS
+ and tries_left > 0):
+ try:
+ self._TestSetUp(test)
+ result_type = self._RunSingleTest(test)
+ finally:
+ self._TestTearDown()
+ tries_left -= 1
+ results.AddResult(base_test_result.BaseTestResult(test, result_type))
+ return results
+
+ def _LogTest(self, test, cmd, timeout):
+ logging.debug("Running %s with command '%s' on host shard with timeout %d",
+ test, cmd, timeout)
+
+ def _ExtendPersistedResult(self, persisted_result):
+ persisted_result['host_test'] = True
+
+
+class LocalDevicePerfTestRun(local_device_test_run.LocalDeviceTestRun):
+
+ _DEFAULT_TIMEOUT = 5 * 60 * 60 # 5 hours.
+ _CONFIG_VERSION = 1
+
+ def __init__(self, env, test_instance):
+ super(LocalDevicePerfTestRun, self).__init__(env, test_instance)
+ self._devices = None
+ self._env = env
+ self._no_device_tests = {}
+ self._test_buckets = []
+ self._test_instance = test_instance
+ self._timeout = None if test_instance.no_timeout else self._DEFAULT_TIMEOUT
+
+ #override
+ def SetUp(self):
+ if os.path.exists(constants.PERF_OUTPUT_DIR):
+ shutil.rmtree(constants.PERF_OUTPUT_DIR)
+ os.makedirs(constants.PERF_OUTPUT_DIR)
+
+ #override
+ def TearDown(self):
+ pass
+
+ def _GetStepsFromDict(self):
+ # From where this is called one of these two must be set.
+ if self._test_instance.single_step:
+ return {
+ 'version': self._CONFIG_VERSION,
+ 'steps': {
+ 'single_step': {
+ 'device_affinity': 0,
+ 'cmd': self._test_instance.single_step
+ },
+ }
+ }
+ if self._test_instance.steps:
+ with file(self._test_instance.steps, 'r') as f:
+ steps = json.load(f)
+ if steps['version'] != self._CONFIG_VERSION:
+ raise TestDictVersionError(
+ 'Version is expected to be %d but was %d' % (self._CONFIG_VERSION,
+ steps['version']))
+ return steps
+ raise PerfTestRunGetStepsError(
+ 'Neither single_step or steps set in test_instance.')
+
+ def _SplitTestsByAffinity(self):
+ # This splits tests by their device affinity so that the same tests always
+ # run on the same devices. This is important for perf tests since different
+ # devices might yield slightly different performance results.
+ test_dict = self._GetStepsFromDict()
+ for test, test_config in sorted(test_dict['steps'].iteritems()):
+ try:
+ affinity = test_config.get('device_affinity')
+ if affinity is None:
+ self._no_device_tests[test] = test_config
+ else:
+ if len(self._test_buckets) < affinity + 1:
+ while len(self._test_buckets) != affinity + 1:
+ self._test_buckets.append(collections.OrderedDict())
+ self._test_buckets[affinity][test] = test_config
+ except KeyError:
+ logging.exception(
+ 'Test config for %s is bad.\n Config:%s', test, str(test_config))
+
+ @staticmethod
+ def _GetAllDevices(active_devices, devices_path):
+ try:
+ if devices_path:
+ devices = [device_utils.DeviceUtils(s)
+ for s in device_list.GetPersistentDeviceList(devices_path)]
+ if not devices and active_devices:
+ logging.warning('%s is empty. Falling back to active devices.',
+ devices_path)
+ devices = active_devices
+ else:
+ logging.warning('Known devices file path not being passed. For device '
+ 'affinity to work properly, it must be passed.')
+ devices = active_devices
+ except IOError as e:
+ logging.error('Unable to find %s [%s]', devices_path, e)
+ devices = active_devices
+ return sorted(devices)
+
+ #override
+ def RunTests(self, results):
+ def run_no_devices_tests():
+ if not self._no_device_tests:
+ return []
+ s = HostTestShard(self._env, self._test_instance, self._no_device_tests,
+ retries=3, timeout=self._timeout)
+ return [s.RunTestsOnShard()]
+
+ def device_shard_helper(shard_id):
+ if device_status.IsBlacklisted(
+ str(self._devices[shard_id]), self._env.blacklist):
+ logging.warning('Device %s is not active. Will not create shard %s.',
+ str(self._devices[shard_id]), shard_id)
+ return None
+ s = DeviceTestShard(self._env, self._test_instance,
+ self._devices[shard_id], shard_id,
+ self._test_buckets[shard_id],
+ retries=self._env.max_tries, timeout=self._timeout)
+ return s.RunTestsOnShard()
+
+ def run_devices_tests():
+ if not self._test_buckets:
+ return []
+ if self._devices is None:
+ self._devices = self._GetAllDevices(
+ self._env.devices, self._test_instance.known_devices_file)
+
+ device_indices = range(min(len(self._devices), len(self._test_buckets)))
+ shards = parallelizer.Parallelizer(device_indices).pMap(
+ device_shard_helper)
+ return [x for x in shards.pGet(self._timeout) if x is not None]
+
+ # Affinitize the tests.
+ self._SplitTestsByAffinity()
+ if not self._test_buckets and not self._no_device_tests:
+ raise local_device_test_run.NoTestsError()
+ host_test_results, device_test_results = reraiser_thread.RunAsync(
+ [run_no_devices_tests, run_devices_tests])
+
+ # Ideally, results would be populated as early as possible, so that in the
+ # event of an exception or timeout, the caller will still have partially
+ # populated results. This looks like it can be done prior to dispatching
+ # tests, but will hold off on making this change unless it looks like it
+ # might provide utility.
+ results.extend(host_test_results + device_test_results)
+
+ # override
+ def TestPackage(self):
+ return 'perf'
+
+ # override
+ def _CreateShards(self, _tests):
+ raise NotImplementedError
+
+ # override
+ def _GetTests(self):
+ return self._test_buckets
+
+ # override
+ def _RunTest(self, _device, _test):
+ raise NotImplementedError
+
+ # override
+ def _ShouldShard(self):
+ return False
+
+
+class OutputJsonList(LocalDevicePerfTestRun):
+ # override
+ def SetUp(self):
+ pass
+
+ # override
+ def RunTests(self, results):
+ result_type = self._test_instance.OutputJsonList()
+ result = base_test_result.TestRunResults()
+ result.AddResult(
+ base_test_result.BaseTestResult('OutputJsonList', result_type))
+
+ # Ideally, results would be populated as early as possible, so that in the
+ # event of an exception or timeout, the caller will still have partially
+ # populated results.
+ results.append(result)
+
+ # override
+ def _CreateShards(self, _tests):
+ raise NotImplementedError
+
+ # override
+ def _RunTest(self, _device, _test):
+ raise NotImplementedError
+
+
+class PrintStep(LocalDevicePerfTestRun):
+ # override
+ def SetUp(self):
+ pass
+
+ # override
+ def RunTests(self, results):
+ result_type = self._test_instance.PrintTestOutput()
+ result = base_test_result.TestRunResults()
+ result.AddResult(
+ base_test_result.BaseTestResult('PrintStep', result_type))
+
+ # Ideally, results would be populated as early as possible, so that in the
+ # event of an exception or timeout, the caller will still have partially
+ # populated results.
+ results.append(result)
+
+ # override
+ def _CreateShards(self, _tests):
+ raise NotImplementedError
+
+ # override
+ def _RunTest(self, _device, _test):
+ raise NotImplementedError
+
+
+class TestDictVersionError(Exception):
+ pass
+
+class PerfTestRunGetStepsError(Exception):
+ pass
diff --git a/deps/v8/build/android/pylib/local/device/local_device_test_run.py b/deps/v8/build/android/pylib/local/device/local_device_test_run.py
new file mode 100644
index 0000000000..62adfabfad
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_test_run.py
@@ -0,0 +1,251 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import logging
+import posixpath
+import signal
+import thread
+import threading
+
+from devil import base_error
+from devil.android import crash_handler
+from devil.android import device_errors
+from devil.android.sdk import version_codes
+from devil.android.tools import device_recovery
+from devil.utils import signal_handler
+from pylib import valgrind_tools
+from pylib.base import base_test_result
+from pylib.base import test_run
+from pylib.base import test_collection
+from pylib.local.device import local_device_environment
+
+
+_SIGTERM_TEST_LOG = (
+ ' Suite execution terminated, probably due to swarming timeout.\n'
+ ' Your test may not have run.')
+
+
+def SubstituteDeviceRoot(device_path, device_root):
+ if not device_path:
+ return device_root
+ elif isinstance(device_path, list):
+ return posixpath.join(*(p if p else device_root for p in device_path))
+ else:
+ return device_path
+
+
+class TestsTerminated(Exception):
+ pass
+
+
+class InvalidShardingSettings(Exception):
+ def __init__(self, shard_index, total_shards):
+ super(InvalidShardingSettings, self).__init__(
+ 'Invalid sharding settings. shard_index: %d total_shards: %d'
+ % (shard_index, total_shards))
+
+
+class LocalDeviceTestRun(test_run.TestRun):
+
+ def __init__(self, env, test_instance):
+ super(LocalDeviceTestRun, self).__init__(env, test_instance)
+ self._tools = {}
+ env.SetPreferredAbis(test_instance.GetPreferredAbis())
+
+ #override
+ def RunTests(self, results):
+ tests = self._GetTests()
+
+ exit_now = threading.Event()
+
+ @local_device_environment.handle_shard_failures
+ def run_tests_on_device(dev, tests, results):
+ for test in tests:
+ if exit_now.isSet():
+ thread.exit()
+
+ result = None
+ rerun = None
+ try:
+ result, rerun = crash_handler.RetryOnSystemCrash(
+ lambda d, t=test: self._RunTest(d, t),
+ device=dev)
+ if isinstance(result, base_test_result.BaseTestResult):
+ results.AddResult(result)
+ elif isinstance(result, list):
+ results.AddResults(result)
+ else:
+ raise Exception(
+ 'Unexpected result type: %s' % type(result).__name__)
+ except device_errors.CommandTimeoutError:
+ if isinstance(test, list):
+ results.AddResults(
+ base_test_result.BaseTestResult(
+ self._GetUniqueTestName(t),
+ base_test_result.ResultType.TIMEOUT)
+ for t in test)
+ else:
+ results.AddResult(
+ base_test_result.BaseTestResult(
+ self._GetUniqueTestName(test),
+ base_test_result.ResultType.TIMEOUT))
+ except Exception as e: # pylint: disable=broad-except
+ if isinstance(tests, test_collection.TestCollection):
+ rerun = test
+ if (isinstance(e, device_errors.DeviceUnreachableError)
+ or not isinstance(e, base_error.BaseError)):
+ # If we get a device error but believe the device is still
+ # reachable, attempt to continue using it. Otherwise, raise
+ # the exception and terminate this run_tests_on_device call.
+ raise
+ finally:
+ if isinstance(tests, test_collection.TestCollection):
+ if rerun:
+ tests.add(rerun)
+ tests.test_completed()
+
+ logging.info('Finished running tests on this device.')
+
+ def stop_tests(_signum, _frame):
+ logging.critical('Received SIGTERM. Stopping test execution.')
+ exit_now.set()
+ raise TestsTerminated()
+
+ try:
+ with signal_handler.AddSignalHandler(signal.SIGTERM, stop_tests):
+ tries = 0
+ while tries < self._env.max_tries and tests:
+ logging.info('STARTING TRY #%d/%d', tries + 1, self._env.max_tries)
+ if tries > 0 and self._env.recover_devices:
+ if any(d.build_version_sdk == version_codes.LOLLIPOP_MR1
+ for d in self._env.devices):
+ logging.info(
+ 'Attempting to recover devices due to known issue on L MR1. '
+ 'See crbug.com/787056 for details.')
+ self._env.parallel_devices.pMap(
+ device_recovery.RecoverDevice, None)
+ elif tries + 1 == self._env.max_tries:
+ logging.info(
+ 'Attempting to recover devices prior to last test attempt.')
+ self._env.parallel_devices.pMap(
+ device_recovery.RecoverDevice, None)
+ logging.info('Will run %d tests on %d devices: %s',
+ len(tests), len(self._env.devices),
+ ', '.join(str(d) for d in self._env.devices))
+ for t in tests:
+ logging.debug(' %s', t)
+
+ try_results = base_test_result.TestRunResults()
+ test_names = (self._GetUniqueTestName(t) for t in tests)
+ try_results.AddResults(
+ base_test_result.BaseTestResult(
+ t, base_test_result.ResultType.NOTRUN)
+ for t in test_names if not t.endswith('*'))
+
+ # As soon as we know the names of the tests, we populate |results|.
+ # The tests in try_results will have their results updated by
+ # try_results.AddResult() as they are run.
+ results.append(try_results)
+
+ try:
+ if self._ShouldShard():
+ tc = test_collection.TestCollection(self._CreateShards(tests))
+ self._env.parallel_devices.pMap(
+ run_tests_on_device, tc, try_results).pGet(None)
+ else:
+ self._env.parallel_devices.pMap(
+ run_tests_on_device, tests, try_results).pGet(None)
+ except TestsTerminated:
+ for unknown_result in try_results.GetUnknown():
+ try_results.AddResult(
+ base_test_result.BaseTestResult(
+ unknown_result.GetName(),
+ base_test_result.ResultType.TIMEOUT,
+ log=_SIGTERM_TEST_LOG))
+ raise
+
+ tries += 1
+ tests = self._GetTestsToRetry(tests, try_results)
+
+ logging.info('FINISHED TRY #%d/%d', tries, self._env.max_tries)
+ if tests:
+ logging.info('%d failed tests remain.', len(tests))
+ else:
+ logging.info('All tests completed.')
+ except TestsTerminated:
+ pass
+
+ def _GetTestsToRetry(self, tests, try_results):
+
+ def is_failure_result(test_result):
+ if isinstance(test_result, list):
+ return any(is_failure_result(r) for r in test_result)
+ return (
+ test_result is None
+ or test_result.GetType() not in (
+ base_test_result.ResultType.PASS,
+ base_test_result.ResultType.SKIP))
+
+ all_test_results = {r.GetName(): r for r in try_results.GetAll()}
+
+ tests_and_names = ((t, self._GetUniqueTestName(t)) for t in tests)
+
+ tests_and_results = {}
+ for test, name in tests_and_names:
+ if name.endswith('*'):
+ tests_and_results[name] = (
+ test,
+ [r for n, r in all_test_results.iteritems()
+ if fnmatch.fnmatch(n, name)])
+ else:
+ tests_and_results[name] = (test, all_test_results.get(name))
+
+ failed_tests_and_results = (
+ (test, result) for test, result in tests_and_results.itervalues()
+ if is_failure_result(result)
+ )
+
+ return [t for t, r in failed_tests_and_results if self._ShouldRetry(t, r)]
+
+ def _ApplyExternalSharding(self, tests, shard_index, total_shards):
+ logging.info('Using external sharding settings. This is shard %d/%d',
+ shard_index, total_shards)
+
+ if total_shards < 0 or shard_index < 0 or total_shards <= shard_index:
+ raise InvalidShardingSettings(shard_index, total_shards)
+
+ return [
+ t for t in tests
+ if hash(self._GetUniqueTestName(t)) % total_shards == shard_index]
+
+ def GetTool(self, device):
+ if str(device) not in self._tools:
+ self._tools[str(device)] = valgrind_tools.CreateTool(
+ self._env.tool, device)
+ return self._tools[str(device)]
+
+ def _CreateShards(self, tests):
+ raise NotImplementedError
+
+ def _GetUniqueTestName(self, test):
+ # pylint: disable=no-self-use
+ return test
+
+ def _ShouldRetry(self, test, result):
+ # pylint: disable=no-self-use,unused-argument
+ return True
+
+ def _GetTests(self):
+ raise NotImplementedError
+
+ def _RunTest(self, device, test):
+ raise NotImplementedError
+
+ def _ShouldShard(self):
+ raise NotImplementedError
+
+
+class NoTestsError(Exception):
+ """Error for when no tests are found."""
diff --git a/deps/v8/build/android/pylib/local/device/local_device_test_run_test.py b/deps/v8/build/android/pylib/local/device/local_device_test_run_test.py
new file mode 100755
index 0000000000..525bf25200
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/device/local_device_test_run_test.py
@@ -0,0 +1,174 @@
+#!/usr/bin/env vpython
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import unittest
+
+from pylib.base import base_test_result
+from pylib.constants import host_paths
+from pylib.local.device import local_device_test_run
+
+with host_paths.SysPath(host_paths.PYMOCK_PATH):
+ import mock # pylint: disable=import-error
+
+
+class SubstituteDeviceRootTest(unittest.TestCase):
+
+ def testNoneDevicePath(self):
+ self.assertEquals(
+ '/fake/device/root',
+ local_device_test_run.SubstituteDeviceRoot(
+ None, '/fake/device/root'))
+
+ def testStringDevicePath(self):
+ self.assertEquals(
+ '/another/fake/device/path',
+ local_device_test_run.SubstituteDeviceRoot(
+ '/another/fake/device/path', '/fake/device/root'))
+
+ def testListWithNoneDevicePath(self):
+ self.assertEquals(
+ '/fake/device/root/subpath',
+ local_device_test_run.SubstituteDeviceRoot(
+ [None, 'subpath'], '/fake/device/root'))
+
+ def testListWithoutNoneDevicePath(self):
+ self.assertEquals(
+ '/another/fake/device/path',
+ local_device_test_run.SubstituteDeviceRoot(
+ ['/', 'another', 'fake', 'device', 'path'],
+ '/fake/device/root'))
+
+
+class TestLocalDeviceTestRun(local_device_test_run.LocalDeviceTestRun):
+
+ # pylint: disable=abstract-method
+
+ def __init__(self):
+ super(TestLocalDeviceTestRun, self).__init__(
+ mock.MagicMock(), mock.MagicMock())
+
+
+class TestLocalDeviceNonStringTestRun(
+ local_device_test_run.LocalDeviceTestRun):
+
+ # pylint: disable=abstract-method
+
+ def __init__(self):
+ super(TestLocalDeviceNonStringTestRun, self).__init__(
+ mock.MagicMock(), mock.MagicMock())
+
+ def _GetUniqueTestName(self, test):
+ return test['name']
+
+
+class LocalDeviceTestRunTest(unittest.TestCase):
+
+ def testGetTestsToRetry_allTestsPassed(self):
+ results = [
+ base_test_result.BaseTestResult(
+ 'Test1', base_test_result.ResultType.PASS),
+ base_test_result.BaseTestResult(
+ 'Test2', base_test_result.ResultType.PASS),
+ ]
+
+ tests = [r.GetName() for r in results]
+ try_results = base_test_result.TestRunResults()
+ try_results.AddResults(results)
+
+ test_run = TestLocalDeviceTestRun()
+ tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+ self.assertEquals(0, len(tests_to_retry))
+
+ def testGetTestsToRetry_testFailed(self):
+ results = [
+ base_test_result.BaseTestResult(
+ 'Test1', base_test_result.ResultType.FAIL),
+ base_test_result.BaseTestResult(
+ 'Test2', base_test_result.ResultType.PASS),
+ ]
+
+ tests = [r.GetName() for r in results]
+ try_results = base_test_result.TestRunResults()
+ try_results.AddResults(results)
+
+ test_run = TestLocalDeviceTestRun()
+ tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+ self.assertEquals(1, len(tests_to_retry))
+ self.assertIn('Test1', tests_to_retry)
+
+ def testGetTestsToRetry_testUnknown(self):
+ results = [
+ base_test_result.BaseTestResult(
+ 'Test2', base_test_result.ResultType.PASS),
+ ]
+
+ tests = ['Test1'] + [r.GetName() for r in results]
+ try_results = base_test_result.TestRunResults()
+ try_results.AddResults(results)
+
+ test_run = TestLocalDeviceTestRun()
+ tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+ self.assertEquals(1, len(tests_to_retry))
+ self.assertIn('Test1', tests_to_retry)
+
+ def testGetTestsToRetry_wildcardFilter_allPass(self):
+ results = [
+ base_test_result.BaseTestResult(
+ 'TestCase.Test1', base_test_result.ResultType.PASS),
+ base_test_result.BaseTestResult(
+ 'TestCase.Test2', base_test_result.ResultType.PASS),
+ ]
+
+ tests = ['TestCase.*']
+ try_results = base_test_result.TestRunResults()
+ try_results.AddResults(results)
+
+ test_run = TestLocalDeviceTestRun()
+ tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+ self.assertEquals(0, len(tests_to_retry))
+
+ def testGetTestsToRetry_wildcardFilter_oneFails(self):
+ results = [
+ base_test_result.BaseTestResult(
+ 'TestCase.Test1', base_test_result.ResultType.PASS),
+ base_test_result.BaseTestResult(
+ 'TestCase.Test2', base_test_result.ResultType.FAIL),
+ ]
+
+ tests = ['TestCase.*']
+ try_results = base_test_result.TestRunResults()
+ try_results.AddResults(results)
+
+ test_run = TestLocalDeviceTestRun()
+ tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+ self.assertEquals(1, len(tests_to_retry))
+ self.assertIn('TestCase.*', tests_to_retry)
+
+ def testGetTestsToRetry_nonStringTests(self):
+ results = [
+ base_test_result.BaseTestResult(
+ 'TestCase.Test1', base_test_result.ResultType.PASS),
+ base_test_result.BaseTestResult(
+ 'TestCase.Test2', base_test_result.ResultType.FAIL),
+ ]
+
+ tests = [
+ {'name': 'TestCase.Test1'},
+ {'name': 'TestCase.Test2'},
+ ]
+ try_results = base_test_result.TestRunResults()
+ try_results.AddResults(results)
+
+ test_run = TestLocalDeviceNonStringTestRun()
+ tests_to_retry = test_run._GetTestsToRetry(tests, try_results)
+ self.assertEquals(1, len(tests_to_retry))
+ self.assertIsInstance(tests_to_retry[0], dict)
+ self.assertEquals(tests[1], tests_to_retry[0])
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/deps/v8/build/android/pylib/local/local_test_server_spawner.py b/deps/v8/build/android/pylib/local/local_test_server_spawner.py
new file mode 100644
index 0000000000..6cd282e3a4
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/local_test_server_spawner.py
@@ -0,0 +1,100 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import time
+
+from devil.android import forwarder
+from devil.android import ports
+from pylib.base import test_server
+from pylib.constants import host_paths
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+ import chrome_test_server_spawner
+
+
+# The tests should not need more than one test server instance.
+MAX_TEST_SERVER_INSTANCES = 1
+
+
+def _WaitUntil(predicate, max_attempts=5):
+ """Blocks until the provided predicate (function) is true.
+
+ Returns:
+ Whether the provided predicate was satisfied once (before the timeout).
+ """
+ sleep_time_sec = 0.025
+ for _ in xrange(1, max_attempts):
+ if predicate():
+ return True
+ time.sleep(sleep_time_sec)
+ sleep_time_sec = min(1, sleep_time_sec * 2) # Don't wait more than 1 sec.
+ return False
+
+
+class PortForwarderAndroid(chrome_test_server_spawner.PortForwarder):
+ def __init__(self, device, tool):
+ self.device = device
+ self.tool = tool
+
+ def Map(self, port_pairs):
+ forwarder.Forwarder.Map(port_pairs, self.device, self.tool)
+
+ def GetDevicePortForHostPort(self, host_port):
+ return forwarder.Forwarder.DevicePortForHostPort(host_port)
+
+ def WaitHostPortAvailable(self, port):
+ return _WaitUntil(lambda: ports.IsHostPortAvailable(port))
+
+ def WaitPortNotAvailable(self, port):
+ return _WaitUntil(lambda: not ports.IsHostPortAvailable(port))
+
+ def WaitDevicePortReady(self, port):
+ return _WaitUntil(lambda: ports.IsDevicePortUsed(self.device, port))
+
+ def Unmap(self, device_port):
+ forwarder.Forwarder.UnmapDevicePort(device_port, self.device)
+
+
+class LocalTestServerSpawner(test_server.TestServer):
+
+ def __init__(self, port, device, tool):
+ super(LocalTestServerSpawner, self).__init__()
+ self._device = device
+ self._spawning_server = chrome_test_server_spawner.SpawningServer(
+ port, PortForwarderAndroid(device, tool), MAX_TEST_SERVER_INSTANCES)
+ self._tool = tool
+
+ @property
+ def server_address(self):
+ return self._spawning_server.server.server_address
+
+ @property
+ def port(self):
+ return self.server_address[1]
+
+ #override
+ def SetUp(self):
+ # See net/test/spawned_test_server/test_server_config.h for description of
+ # the fields in the config file.
+ test_server_config = json.dumps({
+ 'address': '127.0.0.1',
+ 'spawner_url_base': 'http://localhost:%d' % self.port
+ })
+ self._device.WriteFile(
+ '%s/net-test-server-config' % self._device.GetExternalStoragePath(),
+ test_server_config)
+ forwarder.Forwarder.Map(
+ [(self.port, self.port)], self._device, self._tool)
+ self._spawning_server.Start()
+
+ #override
+ def Reset(self):
+ self._spawning_server.CleanupState()
+
+ #override
+ def TearDown(self):
+ self.Reset()
+ self._spawning_server.Stop()
+ forwarder.Forwarder.UnmapDevicePort(self.port, self._device)
diff --git a/deps/v8/build/android/pylib/local/machine/__init__.py b/deps/v8/build/android/pylib/local/machine/__init__.py
new file mode 100644
index 0000000000..ca3e206fdd
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/machine/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/local/machine/local_machine_environment.py b/deps/v8/build/android/pylib/local/machine/local_machine_environment.py
new file mode 100644
index 0000000000..3752a8afbd
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/machine/local_machine_environment.py
@@ -0,0 +1,24 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import devil_chromium
+from pylib import constants
+from pylib.base import environment
+
+
+class LocalMachineEnvironment(environment.Environment):
+
+ def __init__(self, _args, output_manager, _error_func):
+ super(LocalMachineEnvironment, self).__init__(output_manager)
+
+ devil_chromium.Initialize(
+ output_directory=constants.GetOutDirectory())
+
+ #override
+ def SetUp(self):
+ pass
+
+ #override
+ def TearDown(self):
+ pass
diff --git a/deps/v8/build/android/pylib/local/machine/local_machine_junit_test_run.py b/deps/v8/build/android/pylib/local/machine/local_machine_junit_test_run.py
new file mode 100644
index 0000000000..dbfc505d81
--- /dev/null
+++ b/deps/v8/build/android/pylib/local/machine/local_machine_junit_test_run.py
@@ -0,0 +1,136 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import logging
+import os
+import zipfile
+
+from devil.utils import cmd_helper
+from devil.utils import reraiser_thread
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_run
+from pylib.constants import host_paths
+from pylib.results import json_results
+from py_utils import tempfile_ext
+
+
+class LocalMachineJunitTestRun(test_run.TestRun):
+ def __init__(self, env, test_instance):
+ super(LocalMachineJunitTestRun, self).__init__(env, test_instance)
+
+ #override
+ def TestPackage(self):
+ return self._test_instance.suite
+
+ #override
+ def SetUp(self):
+ pass
+
+ #override
+ def RunTests(self, results):
+ with tempfile_ext.NamedTemporaryDirectory() as temp_dir:
+ json_file_path = os.path.join(temp_dir, 'results.json')
+
+ # Extract resources needed for test.
+ # TODO(mikecase): Investigate saving md5sums of zipfiles, and only
+ # extract zipfiles when they change.
+ def extract_resource_zip(resource_zip, filename):
+ def helper():
+ extract_dest = os.path.join(temp_dir, filename)
+ with zipfile.ZipFile(resource_zip, 'r') as zf:
+ zf.extractall(extract_dest)
+ return extract_dest
+ return helper
+
+ resource_dirs = reraiser_thread.RunAsync(
+ extract_resource_zip(resource_zip, 'resources_%d' % index) for index,
+ resource_zip in enumerate(self._test_instance.resource_zips))
+
+ java_script = os.path.join(
+ constants.GetOutDirectory(), 'bin', 'helper',
+ self._test_instance.suite)
+ command = [java_script]
+
+ # Add Jar arguments.
+ jar_args = ['-test-jars', self._test_instance.suite + '.jar',
+ '-json-results-file', json_file_path]
+ if self._test_instance.test_filter:
+ jar_args.extend(['-gtest-filter', self._test_instance.test_filter])
+ if self._test_instance.package_filter:
+ jar_args.extend(['-package-filter',
+ self._test_instance.package_filter])
+ if self._test_instance.runner_filter:
+ jar_args.extend(['-runner-filter', self._test_instance.runner_filter])
+ command.extend(['--jar-args', '"%s"' % ' '.join(jar_args)])
+
+ # Add JVM arguments.
+ jvm_args = ['-Drobolectric.dependency.dir=%s' %
+ self._test_instance.robolectric_runtime_deps_dir,
+ '-Ddir.source.root=%s' % constants.DIR_SOURCE_ROOT,]
+
+ if self._test_instance.android_manifest_path:
+ jvm_args += ['-Dchromium.robolectric.manifest=%s' %
+ self._test_instance.android_manifest_path]
+
+ if self._test_instance.package_name:
+ jvm_args += ['-Dchromium.robolectric.package.name=%s' %
+ self._test_instance.package_name]
+
+ if resource_dirs:
+ jvm_args += ['-Dchromium.robolectric.resource.dirs=%s' %
+ ':'.join(resource_dirs)]
+
+ if logging.getLogger().isEnabledFor(logging.INFO):
+ jvm_args += ['-Drobolectric.logging=stdout']
+
+ if self._test_instance.debug_socket:
+ jvm_args += ['-agentlib:jdwp=transport=dt_socket'
+ ',server=y,suspend=y,address=%s' %
+ self._test_instance.debug_socket]
+
+ if self._test_instance.coverage_dir:
+ if not os.path.exists(self._test_instance.coverage_dir):
+ os.makedirs(self._test_instance.coverage_dir)
+ elif not os.path.isdir(self._test_instance.coverage_dir):
+ raise Exception('--coverage-dir takes a directory, not file path.')
+ if self._test_instance.jacoco:
+ jacoco_coverage_file = os.path.join(
+ self._test_instance.coverage_dir,
+ '%s.exec' % self._test_instance.suite)
+ jacoco_agent_path = os.path.join(host_paths.DIR_SOURCE_ROOT,
+ 'third_party', 'jacoco', 'lib',
+ 'jacocoagent.jar')
+ jacoco_args = '-javaagent:{}=destfile={},includes=org.chromium.*'
+ jvm_args.append(
+ jacoco_args.format(jacoco_agent_path, jacoco_coverage_file))
+ else:
+ jvm_args.append('-Demma.coverage.out.file=%s' % os.path.join(
+ self._test_instance.coverage_dir,
+ '%s.ec' % self._test_instance.suite))
+
+ if jvm_args:
+ command.extend(['--jvm-args', '"%s"' % ' '.join(jvm_args)])
+
+ cmd_helper.RunCmd(command)
+ try:
+ with open(json_file_path, 'r') as f:
+ results_list = json_results.ParseResultsFromJson(
+ json.loads(f.read()))
+ except IOError:
+ # In the case of a failure in the JUnit or Robolectric test runner
+ # the output json file may never be written.
+ results_list = [
+ base_test_result.BaseTestResult(
+ 'Test Runner Failure', base_test_result.ResultType.UNKNOWN)
+ ]
+
+ test_run_results = base_test_result.TestRunResults()
+ test_run_results.AddResults(results_list)
+ results.append(test_run_results)
+
+ #override
+ def TearDown(self):
+ pass
diff --git a/deps/v8/build/android/pylib/monkey/__init__.py b/deps/v8/build/android/pylib/monkey/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/deps/v8/build/android/pylib/monkey/__init__.py
diff --git a/deps/v8/build/android/pylib/monkey/monkey_test_instance.py b/deps/v8/build/android/pylib/monkey/monkey_test_instance.py
new file mode 100644
index 0000000000..10b11315bc
--- /dev/null
+++ b/deps/v8/build/android/pylib/monkey/monkey_test_instance.py
@@ -0,0 +1,72 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import random
+
+from pylib import constants
+from pylib.base import test_instance
+
+
+_SINGLE_EVENT_TIMEOUT = 100 # Milliseconds
+
+class MonkeyTestInstance(test_instance.TestInstance):
+
+ def __init__(self, args, _):
+ super(MonkeyTestInstance, self).__init__()
+
+ self._categories = args.categories
+ self._event_count = args.event_count
+ self._seed = args.seed or random.randint(1, 100)
+ self._throttle = args.throttle
+ self._verbose_count = args.verbose_count
+
+ self._package = constants.PACKAGE_INFO[args.browser].package
+ self._activity = constants.PACKAGE_INFO[args.browser].activity
+
+ self._timeout_s = (
+ self.event_count * (self.throttle + _SINGLE_EVENT_TIMEOUT)) / 1000
+
+ #override
+ def TestType(self):
+ return 'monkey'
+
+ #override
+ def SetUp(self):
+ pass
+
+ #override
+ def TearDown(self):
+ pass
+
+ @property
+ def activity(self):
+ return self._activity
+
+ @property
+ def categories(self):
+ return self._categories
+
+ @property
+ def event_count(self):
+ return self._event_count
+
+ @property
+ def package(self):
+ return self._package
+
+ @property
+ def seed(self):
+ return self._seed
+
+ @property
+ def throttle(self):
+ return self._throttle
+
+ @property
+ def timeout(self):
+ return self._timeout_s
+
+ @property
+ def verbose_count(self):
+ return self._verbose_count
diff --git a/deps/v8/build/android/pylib/output/__init__.py b/deps/v8/build/android/pylib/output/__init__.py
new file mode 100644
index 0000000000..a22a6ee39a
--- /dev/null
+++ b/deps/v8/build/android/pylib/output/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/output/local_output_manager.py b/deps/v8/build/android/pylib/output/local_output_manager.py
new file mode 100644
index 0000000000..89becd7f71
--- /dev/null
+++ b/deps/v8/build/android/pylib/output/local_output_manager.py
@@ -0,0 +1,45 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import time
+import os
+import shutil
+import urllib
+
+from pylib.base import output_manager
+
+
+class LocalOutputManager(output_manager.OutputManager):
+ """Saves and manages test output files locally in output directory.
+
+ Location files will be saved in {output_dir}/TEST_RESULTS_{timestamp}.
+ """
+
+ def __init__(self, output_dir):
+ super(LocalOutputManager, self).__init__()
+ timestamp = time.strftime(
+ '%Y_%m_%dT%H_%M_%S', time.localtime())
+ self._output_root = os.path.abspath(os.path.join(
+ output_dir, 'TEST_RESULTS_%s' % timestamp))
+
+ #override
+ def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
+ return LocalArchivedFile(
+ out_filename, out_subdir, datatype, self._output_root)
+
+
+class LocalArchivedFile(output_manager.ArchivedFile):
+
+ def __init__(self, out_filename, out_subdir, datatype, out_root):
+ super(LocalArchivedFile, self).__init__(
+ out_filename, out_subdir, datatype)
+ self._output_path = os.path.join(out_root, out_subdir, out_filename)
+
+ def _Link(self):
+ return 'file://%s' % urllib.quote(self._output_path)
+
+ def _Archive(self):
+ if not os.path.exists(os.path.dirname(self._output_path)):
+ os.makedirs(os.path.dirname(self._output_path))
+ shutil.copy(self.name, self._output_path)
diff --git a/deps/v8/build/android/pylib/output/local_output_manager_test.py b/deps/v8/build/android/pylib/output/local_output_manager_test.py
new file mode 100755
index 0000000000..12452a6616
--- /dev/null
+++ b/deps/v8/build/android/pylib/output/local_output_manager_test.py
@@ -0,0 +1,34 @@
+#! /usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import tempfile
+import shutil
+import unittest
+
+from pylib.base import output_manager
+from pylib.base import output_manager_test_case
+from pylib.output import local_output_manager
+
+
+class LocalOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
+
+ def setUp(self):
+ self._output_dir = tempfile.mkdtemp()
+ self._output_manager = local_output_manager.LocalOutputManager(
+ self._output_dir)
+
+ def testUsableTempFile(self):
+ self.assertUsableTempFile(
+ self._output_manager._CreateArchivedFile(
+ 'test_file', 'test_subdir', output_manager.Datatype.TEXT))
+
+ def tearDown(self):
+ shutil.rmtree(self._output_dir)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/output/noop_output_manager.py b/deps/v8/build/android/pylib/output/noop_output_manager.py
new file mode 100644
index 0000000000..d29a7432f9
--- /dev/null
+++ b/deps/v8/build/android/pylib/output/noop_output_manager.py
@@ -0,0 +1,42 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import output_manager
+
+# TODO(jbudorick): This class is currently mostly unused.
+# Add a --bot-mode argument that all bots pass. If --bot-mode and
+# --local-output args are both not passed to test runner then use this
+# as the output manager impl.
+
+# pylint: disable=no-self-use
+
+class NoopOutputManager(output_manager.OutputManager):
+
+ def __init__(self):
+ super(NoopOutputManager, self).__init__()
+
+ #override
+ def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
+ del out_filename, out_subdir, datatype
+ return NoopArchivedFile()
+
+
+class NoopArchivedFile(output_manager.ArchivedFile):
+
+ def __init__(self):
+ super(NoopArchivedFile, self).__init__(None, None, None)
+
+ def Link(self):
+ """NoopArchivedFiles are not retained."""
+ return ''
+
+ def _Link(self):
+ pass
+
+ def Archive(self):
+ """NoopArchivedFiles are not retained."""
+ pass
+
+ def _Archive(self):
+ pass
diff --git a/deps/v8/build/android/pylib/output/noop_output_manager_test.py b/deps/v8/build/android/pylib/output/noop_output_manager_test.py
new file mode 100755
index 0000000000..c735a0469a
--- /dev/null
+++ b/deps/v8/build/android/pylib/output/noop_output_manager_test.py
@@ -0,0 +1,27 @@
+#! /usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import unittest
+
+from pylib.base import output_manager
+from pylib.base import output_manager_test_case
+from pylib.output import noop_output_manager
+
+
+class NoopOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
+
+ def setUp(self):
+ self._output_manager = noop_output_manager.NoopOutputManager()
+
+ def testUsableTempFile(self):
+ self.assertUsableTempFile(
+ self._output_manager._CreateArchivedFile(
+ 'test_file', 'test_subdir', output_manager.Datatype.TEXT))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/output/remote_output_manager.py b/deps/v8/build/android/pylib/output/remote_output_manager.py
new file mode 100644
index 0000000000..9fdb4bf65f
--- /dev/null
+++ b/deps/v8/build/android/pylib/output/remote_output_manager.py
@@ -0,0 +1,89 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import hashlib
+import os
+
+from pylib.base import output_manager
+from pylib.output import noop_output_manager
+from pylib.utils import logdog_helper
+from pylib.utils import google_storage_helper
+
+
+class RemoteOutputManager(output_manager.OutputManager):
+
+ def __init__(self, bucket):
+ """Uploads output files to Google Storage or LogDog.
+
+ Files will either be uploaded directly to Google Storage or LogDog
+ depending on the datatype.
+
+ Args
+ bucket: Bucket to use when saving to Google Storage.
+ """
+ super(RemoteOutputManager, self).__init__()
+ self._bucket = bucket
+
+ #override
+ def _CreateArchivedFile(self, out_filename, out_subdir, datatype):
+ if datatype == output_manager.Datatype.TEXT:
+ try:
+ logdog_helper.get_logdog_client()
+ return LogdogArchivedFile(out_filename, out_subdir, datatype)
+ except RuntimeError:
+ return noop_output_manager.NoopArchivedFile()
+ else:
+ if self._bucket is None:
+ return noop_output_manager.NoopArchivedFile()
+ return GoogleStorageArchivedFile(
+ out_filename, out_subdir, datatype, self._bucket)
+
+
+class LogdogArchivedFile(output_manager.ArchivedFile):
+
+ def __init__(self, out_filename, out_subdir, datatype):
+ super(LogdogArchivedFile, self).__init__(out_filename, out_subdir, datatype)
+ self._stream_name = '%s_%s' % (out_subdir, out_filename)
+
+ def _Link(self):
+ return logdog_helper.get_viewer_url(self._stream_name)
+
+ def _Archive(self):
+ with open(self.name, 'r') as f:
+ logdog_helper.text(self._stream_name, f.read())
+
+
+class GoogleStorageArchivedFile(output_manager.ArchivedFile):
+
+ def __init__(self, out_filename, out_subdir, datatype, bucket):
+ super(GoogleStorageArchivedFile, self).__init__(
+ out_filename, out_subdir, datatype)
+ self._bucket = bucket
+ self._upload_path = None
+ self._content_addressed = None
+
+ def _PrepareArchive(self):
+ self._content_addressed = (self._datatype in (
+ output_manager.Datatype.HTML,
+ output_manager.Datatype.PNG,
+ output_manager.Datatype.JSON))
+ if self._content_addressed:
+ sha1 = hashlib.sha1()
+ with open(self.name, 'rb') as f:
+ sha1.update(f.read())
+ self._upload_path = sha1.hexdigest()
+ else:
+ self._upload_path = os.path.join(self._out_subdir, self._out_filename)
+
+ def _Link(self):
+ return google_storage_helper.get_url_link(
+ self._upload_path, self._bucket)
+
+ def _Archive(self):
+ if (self._content_addressed and
+ google_storage_helper.exists(self._upload_path, self._bucket)):
+ return
+
+ google_storage_helper.upload(
+ self._upload_path, self.name, self._bucket, content_type=self._datatype)
diff --git a/deps/v8/build/android/pylib/output/remote_output_manager_test.py b/deps/v8/build/android/pylib/output/remote_output_manager_test.py
new file mode 100755
index 0000000000..6917260dd7
--- /dev/null
+++ b/deps/v8/build/android/pylib/output/remote_output_manager_test.py
@@ -0,0 +1,34 @@
+#! /usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import unittest
+
+from pylib.base import output_manager
+from pylib.base import output_manager_test_case
+from pylib.constants import host_paths
+from pylib.output import remote_output_manager
+
+with host_paths.SysPath(host_paths.PYMOCK_PATH):
+ import mock # pylint: disable=import-error
+
+
+@mock.patch('pylib.utils.google_storage_helper')
+class RemoteOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
+
+ def setUp(self):
+ self._output_manager = remote_output_manager.RemoteOutputManager(
+ 'this-is-a-fake-bucket')
+
+ def testUsableTempFile(self, google_storage_helper_mock):
+ del google_storage_helper_mock
+ self.assertUsableTempFile(
+ self._output_manager._CreateArchivedFile(
+ 'test_file', 'test_subdir', output_manager.Datatype.TEXT))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/perf/__init__.py b/deps/v8/build/android/pylib/perf/__init__.py
new file mode 100644
index 0000000000..9228df89b0
--- /dev/null
+++ b/deps/v8/build/android/pylib/perf/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/perf/perf_test_instance.py b/deps/v8/build/android/pylib/perf/perf_test_instance.py
new file mode 100644
index 0000000000..49d75e48e3
--- /dev/null
+++ b/deps/v8/build/android/pylib/perf/perf_test_instance.py
@@ -0,0 +1,239 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import logging
+import os
+import pickle
+import re
+
+from devil import base_error
+from devil.utils import cmd_helper
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_instance
+from pylib.constants import host_paths
+from pylib.utils import test_filter
+
+
+_GIT_CR_POS_RE = re.compile(r'^Cr-Commit-Position: refs/heads/master@{#(\d+)}$')
+
+
+def _GetPersistedResult(test_name):
+ file_name = os.path.join(constants.PERF_OUTPUT_DIR, test_name)
+ if not os.path.exists(file_name):
+ logging.error('File not found %s', file_name)
+ return None
+
+ with file(file_name, 'r') as f:
+ return pickle.load(f)
+
+
+def _GetChromiumRevision():
+ # pylint: disable=line-too-long
+ """Get the git hash and commit position of the chromium master branch.
+
+ See:
+ https://chromium.googlesource.com/chromium/tools/build/+/387e3cf3/scripts/slave/runtest.py#211
+
+ Returns:
+ A dictionary with 'revision' and 'commit_pos' keys.
+ """
+ # pylint: enable=line-too-long
+ status, output = cmd_helper.GetCmdStatusAndOutput(
+ ['git', 'log', '-n', '1', '--pretty=format:%H%n%B', 'HEAD'],
+ cwd=host_paths.DIR_SOURCE_ROOT)
+ revision = None
+ commit_pos = None
+ if not status:
+ lines = output.splitlines()
+ revision = lines[0]
+ for line in reversed(lines):
+ m = _GIT_CR_POS_RE.match(line.strip())
+ if m:
+ commit_pos = int(m.group(1))
+ break
+ return {'revision': revision, 'commit_pos': commit_pos}
+
+
+class PerfTestInstance(test_instance.TestInstance):
+ def __init__(self, args, _):
+ super(PerfTestInstance, self).__init__()
+
+ self._collect_chartjson_data = args.collect_chartjson_data
+ self._dry_run = args.dry_run
+ self._output_dir_archive_path = args.output_dir_archive_path
+ # TODO(rnephew): Get rid of this when everything uses
+ # --output-dir-archive-path
+ if self._output_dir_archive_path is None and args.get_output_dir_archive:
+ self._output_dir_archive_path = args.get_output_dir_archive
+ self._known_devices_file = args.known_devices_file
+ self._max_battery_temp = args.max_battery_temp
+ self._min_battery_level = args.min_battery_level
+ self._no_timeout = args.no_timeout
+ self._output_chartjson_data = args.output_chartjson_data
+ self._output_json_list = args.output_json_list
+ self._print_step = args.print_step
+ self._single_step = (
+ ' '.join(args.single_step_command) if args.single_step else None)
+ self._steps = args.steps
+ self._test_filter = test_filter.InitializeFilterFromArgs(args)
+ self._write_buildbot_json = args.write_buildbot_json
+
+ #override
+ def SetUp(self):
+ pass
+
+ #override
+ def TearDown(self):
+ pass
+
+ def OutputJsonList(self):
+ try:
+ with file(self._steps, 'r') as i:
+ all_steps = json.load(i)
+
+ step_values = []
+ for k, v in all_steps['steps'].iteritems():
+ data = {'test': k, 'device_affinity': v['device_affinity']}
+
+ persisted_result = _GetPersistedResult(k)
+ if persisted_result:
+ data['start_time'] = persisted_result['start_time']
+ data['end_time'] = persisted_result['end_time']
+ data['total_time'] = persisted_result['total_time']
+ data['has_archive'] = persisted_result['archive_bytes'] is not None
+ step_values.append(data)
+
+ with file(self.output_json_list, 'w') as o:
+ o.write(json.dumps(step_values))
+ return base_test_result.ResultType.PASS
+ except KeyError:
+ logging.exception('Persistent results file missing key.')
+ return base_test_result.ResultType.FAIL
+
+ def PrintTestOutput(self):
+ """Helper method to print the output of previously executed test_name.
+
+ Test_name is passed from the command line as print_step
+
+ Returns:
+ exit code generated by the test step.
+ """
+ persisted_result = _GetPersistedResult(self._print_step)
+ if not persisted_result:
+ raise PersistentDataError('No data for test %s found.' % self._print_step)
+ logging.info('*' * 80)
+ logging.info('Output from:')
+ logging.info(persisted_result['cmd'])
+ logging.info('*' * 80)
+
+ output_formatted = ''
+ persisted_outputs = persisted_result['output']
+ for i in xrange(len(persisted_outputs)):
+ output_formatted += '\n\nOutput from run #%d:\n\n%s' % (
+ i, persisted_outputs[i])
+ print output_formatted
+
+ if self.output_chartjson_data:
+ with file(self.output_chartjson_data, 'w') as f:
+ f.write(persisted_result['chartjson'])
+
+ if self.output_dir_archive_path:
+ if persisted_result['archive_bytes'] is not None:
+ with file(self.output_dir_archive_path, 'wb') as f:
+ f.write(persisted_result['archive_bytes'])
+ else:
+ logging.error('The output dir was not archived.')
+ if persisted_result['exit_code'] == 0:
+ return base_test_result.ResultType.PASS
+ return base_test_result.ResultType.FAIL
+
+ #override
+ def TestType(self):
+ return 'perf'
+
+ @staticmethod
+ def ReadChartjsonOutput(output_dir):
+ if not output_dir:
+ return ''
+ json_output_path = os.path.join(output_dir, 'results-chart.json')
+ try:
+ with open(json_output_path) as f:
+ return f.read()
+ except IOError:
+ logging.exception('Exception when reading chartjson.')
+ logging.error('This usually means that telemetry did not run, so it could'
+ ' not generate the file. Please check the device running'
+ ' the test.')
+ return ''
+
+ def WriteBuildBotJson(self, output_dir):
+ """Write metadata about the buildbot environment to the output dir."""
+ if not output_dir or not self._write_buildbot_json:
+ return
+ data = {
+ 'chromium': _GetChromiumRevision(),
+ 'environment': dict(os.environ)
+ }
+ with open(os.path.join(output_dir, 'buildbot.json'), 'w') as f:
+ json.dump(data, f, sort_keys=True, separators=(',', ': '))
+
+ @property
+ def collect_chartjson_data(self):
+ return self._collect_chartjson_data
+
+ @property
+ def dry_run(self):
+ return self._dry_run
+
+ @property
+ def known_devices_file(self):
+ return self._known_devices_file
+
+ @property
+ def max_battery_temp(self):
+ return self._max_battery_temp
+
+ @property
+ def min_battery_level(self):
+ return self._min_battery_level
+
+ @property
+ def no_timeout(self):
+ return self._no_timeout
+
+ @property
+ def output_chartjson_data(self):
+ return self._output_chartjson_data
+
+ @property
+ def output_dir_archive_path(self):
+ return self._output_dir_archive_path
+
+ @property
+ def output_json_list(self):
+ return self._output_json_list
+
+ @property
+ def print_step(self):
+ return self._print_step
+
+ @property
+ def single_step(self):
+ return self._single_step
+
+ @property
+ def steps(self):
+ return self._steps
+
+ @property
+ def test_filter(self):
+ return self._test_filter
+
+
+class PersistentDataError(base_error.BaseError):
+ def __init__(self, message):
+ super(PersistentDataError, self).__init__(message)
+ self._is_infra_error = True
diff --git a/deps/v8/build/android/pylib/pexpect.py b/deps/v8/build/android/pylib/pexpect.py
new file mode 100644
index 0000000000..cf59fb0f6d
--- /dev/null
+++ b/deps/v8/build/android/pylib/pexpect.py
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import absolute_import
+
+import os
+import sys
+
+_CHROME_SRC = os.path.join(
+ os.path.abspath(os.path.dirname(__file__)), '..', '..', '..')
+
+_PEXPECT_PATH = os.path.join(_CHROME_SRC, 'third_party', 'pexpect')
+if _PEXPECT_PATH not in sys.path:
+ sys.path.append(_PEXPECT_PATH)
+
+# pexpect is not available on all platforms. We allow this file to be imported
+# on platforms without pexpect and only fail when pexpect is actually used.
+try:
+ from pexpect import * # pylint: disable=W0401,W0614
+except ImportError:
+ pass
diff --git a/deps/v8/build/android/pylib/restart_adbd.sh b/deps/v8/build/android/pylib/restart_adbd.sh
new file mode 100755
index 0000000000..393b2ebac0
--- /dev/null
+++ b/deps/v8/build/android/pylib/restart_adbd.sh
@@ -0,0 +1,20 @@
+#!/system/bin/sh
+
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Android shell script to restart adbd on the device. This has to be run
+# atomically as a shell script because stopping adbd prevents further commands
+# from running (even if called in the same adb shell).
+
+trap '' HUP
+trap '' TERM
+trap '' PIPE
+
+function restart() {
+ stop adbd
+ start adbd
+}
+
+restart &
diff --git a/deps/v8/build/android/pylib/results/__init__.py b/deps/v8/build/android/pylib/results/__init__.py
new file mode 100644
index 0000000000..4d6aabb953
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/results/flakiness_dashboard/__init__.py b/deps/v8/build/android/pylib/results/flakiness_dashboard/__init__.py
new file mode 100644
index 0000000000..4d6aabb953
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/flakiness_dashboard/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/results/flakiness_dashboard/json_results_generator.py b/deps/v8/build/android/pylib/results/flakiness_dashboard/json_results_generator.py
new file mode 100644
index 0000000000..5e5f83f2a2
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/flakiness_dashboard/json_results_generator.py
@@ -0,0 +1,699 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Most of this file was ported over from Blink's
+# tools/blinkpy/web_tests/layout_package/json_results_generator.py
+# tools/blinkpy/common/net/file_uploader.py
+#
+
+import json
+import logging
+import mimetypes
+import os
+import time
+import urllib2
+
+_log = logging.getLogger(__name__)
+
+_JSON_PREFIX = 'ADD_RESULTS('
+_JSON_SUFFIX = ');'
+
+
+def HasJSONWrapper(string):
+ return string.startswith(_JSON_PREFIX) and string.endswith(_JSON_SUFFIX)
+
+
+def StripJSONWrapper(json_content):
+ # FIXME: Kill this code once the server returns json instead of jsonp.
+ if HasJSONWrapper(json_content):
+ return json_content[len(_JSON_PREFIX):len(json_content) - len(_JSON_SUFFIX)]
+ return json_content
+
+
+def WriteJSON(json_object, file_path, callback=None):
+ # Specify separators in order to get compact encoding.
+ json_string = json.dumps(json_object, separators=(',', ':'))
+ if callback:
+ json_string = callback + '(' + json_string + ');'
+ with open(file_path, 'w') as fp:
+ fp.write(json_string)
+
+
+def ConvertTrieToFlatPaths(trie, prefix=None):
+ """Flattens the trie of paths, prepending a prefix to each."""
+ result = {}
+ for name, data in trie.iteritems():
+ if prefix:
+ name = prefix + '/' + name
+
+ if len(data) and not 'results' in data:
+ result.update(ConvertTrieToFlatPaths(data, name))
+ else:
+ result[name] = data
+
+ return result
+
+
+def AddPathToTrie(path, value, trie):
+ """Inserts a single path and value into a directory trie structure."""
+ if not '/' in path:
+ trie[path] = value
+ return
+
+ directory, _, rest = path.partition('/')
+ if not directory in trie:
+ trie[directory] = {}
+ AddPathToTrie(rest, value, trie[directory])
+
+
+def TestTimingsTrie(individual_test_timings):
+ """Breaks a test name into dicts by directory
+
+ foo/bar/baz.html: 1ms
+ foo/bar/baz1.html: 3ms
+
+ becomes
+ foo: {
+ bar: {
+ baz.html: 1,
+ baz1.html: 3
+ }
+ }
+ """
+ trie = {}
+ for test_result in individual_test_timings:
+ test = test_result.test_name
+
+ AddPathToTrie(test, int(1000 * test_result.test_run_time), trie)
+
+ return trie
+
+
+class TestResult(object):
+ """A simple class that represents a single test result."""
+
+ # Test modifier constants.
+ (NONE, FAILS, FLAKY, DISABLED) = range(4)
+
+ def __init__(self, test, failed=False, elapsed_time=0):
+ self.test_name = test
+ self.failed = failed
+ self.test_run_time = elapsed_time
+
+ test_name = test
+ try:
+ test_name = test.split('.')[1]
+ except IndexError:
+ _log.warn('Invalid test name: %s.', test)
+
+ if test_name.startswith('FAILS_'):
+ self.modifier = self.FAILS
+ elif test_name.startswith('FLAKY_'):
+ self.modifier = self.FLAKY
+ elif test_name.startswith('DISABLED_'):
+ self.modifier = self.DISABLED
+ else:
+ self.modifier = self.NONE
+
+ def Fixable(self):
+ return self.failed or self.modifier == self.DISABLED
+
+
+class JSONResultsGeneratorBase(object):
+ """A JSON results generator for generic tests."""
+
+ MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG = 750
+ # Min time (seconds) that will be added to the JSON.
+ MIN_TIME = 1
+
+ # Note that in non-chromium tests those chars are used to indicate
+ # test modifiers (FAILS, FLAKY, etc) but not actual test results.
+ PASS_RESULT = 'P'
+ SKIP_RESULT = 'X'
+ FAIL_RESULT = 'F'
+ FLAKY_RESULT = 'L'
+ NO_DATA_RESULT = 'N'
+
+ MODIFIER_TO_CHAR = {TestResult.NONE: PASS_RESULT,
+ TestResult.DISABLED: SKIP_RESULT,
+ TestResult.FAILS: FAIL_RESULT,
+ TestResult.FLAKY: FLAKY_RESULT}
+
+ VERSION = 4
+ VERSION_KEY = 'version'
+ RESULTS = 'results'
+ TIMES = 'times'
+ BUILD_NUMBERS = 'buildNumbers'
+ TIME = 'secondsSinceEpoch'
+ TESTS = 'tests'
+
+ FIXABLE_COUNT = 'fixableCount'
+ FIXABLE = 'fixableCounts'
+ ALL_FIXABLE_COUNT = 'allFixableCount'
+
+ RESULTS_FILENAME = 'results.json'
+ TIMES_MS_FILENAME = 'times_ms.json'
+ INCREMENTAL_RESULTS_FILENAME = 'incremental_results.json'
+
+ # line too long pylint: disable=line-too-long
+ URL_FOR_TEST_LIST_JSON = (
+ 'https://%s/testfile?builder=%s&name=%s&testlistjson=1&testtype=%s&'
+ 'master=%s')
+ # pylint: enable=line-too-long
+
+ def __init__(self, builder_name, build_name, build_number,
+ results_file_base_path, builder_base_url,
+ test_results_map, svn_repositories=None,
+ test_results_server=None,
+ test_type='',
+ master_name=''):
+ """Modifies the results.json file. Grabs it off the archive directory
+ if it is not found locally.
+
+ Args
+ builder_name: the builder name (e.g. Webkit).
+ build_name: the build name (e.g. webkit-rel).
+ build_number: the build number.
+ results_file_base_path: Absolute path to the directory containing the
+ results json file.
+ builder_base_url: the URL where we have the archived test results.
+ If this is None no archived results will be retrieved.
+ test_results_map: A dictionary that maps test_name to TestResult.
+ svn_repositories: A (json_field_name, svn_path) pair for SVN
+ repositories that tests rely on. The SVN revision will be
+ included in the JSON with the given json_field_name.
+ test_results_server: server that hosts test results json.
+ test_type: test type string (e.g. 'layout-tests').
+ master_name: the name of the buildbot master.
+ """
+ self._builder_name = builder_name
+ self._build_name = build_name
+ self._build_number = build_number
+ self._builder_base_url = builder_base_url
+ self._results_directory = results_file_base_path
+
+ self._test_results_map = test_results_map
+ self._test_results = test_results_map.values()
+
+ self._svn_repositories = svn_repositories
+ if not self._svn_repositories:
+ self._svn_repositories = {}
+
+ self._test_results_server = test_results_server
+ self._test_type = test_type
+ self._master_name = master_name
+
+ self._archived_results = None
+
+ def GenerateJSONOutput(self):
+ json_object = self.GetJSON()
+ if json_object:
+ file_path = (
+ os.path.join(
+ self._results_directory,
+ self.INCREMENTAL_RESULTS_FILENAME))
+ WriteJSON(json_object, file_path)
+
+ def GenerateTimesMSFile(self):
+ times = TestTimingsTrie(self._test_results_map.values())
+ file_path = os.path.join(self._results_directory, self.TIMES_MS_FILENAME)
+ WriteJSON(times, file_path)
+
+ def GetJSON(self):
+ """Gets the results for the results.json file."""
+ results_json = {}
+
+ if not results_json:
+ results_json, error = self._GetArchivedJSONResults()
+ if error:
+ # If there was an error don't write a results.json
+ # file at all as it would lose all the information on the
+ # bot.
+ _log.error('Archive directory is inaccessible. Not '
+ 'modifying or clobbering the results.json '
+ 'file: ' + str(error))
+ return None
+
+ builder_name = self._builder_name
+ if results_json and builder_name not in results_json:
+ _log.debug('Builder name (%s) is not in the results.json file.',
+ builder_name)
+
+ self._ConvertJSONToCurrentVersion(results_json)
+
+ if builder_name not in results_json:
+ results_json[builder_name] = (
+ self._CreateResultsForBuilderJSON())
+
+ results_for_builder = results_json[builder_name]
+
+ if builder_name:
+ self._InsertGenericMetaData(results_for_builder)
+
+ self._InsertFailureSummaries(results_for_builder)
+
+ # Update the all failing tests with result type and time.
+ tests = results_for_builder[self.TESTS]
+ all_failing_tests = self._GetFailedTestNames()
+ all_failing_tests.update(ConvertTrieToFlatPaths(tests))
+
+ for test in all_failing_tests:
+ self._InsertTestTimeAndResult(test, tests)
+
+ return results_json
+
+ def SetArchivedResults(self, archived_results):
+ self._archived_results = archived_results
+
+ def UploadJSONFiles(self, json_files):
+ """Uploads the given json_files to the test_results_server (if the
+ test_results_server is given)."""
+ if not self._test_results_server:
+ return
+
+ if not self._master_name:
+ _log.error(
+ '--test-results-server was set, but --master-name was not. Not '
+ 'uploading JSON files.')
+ return
+
+ _log.info('Uploading JSON files for builder: %s', self._builder_name)
+ attrs = [('builder', self._builder_name),
+ ('testtype', self._test_type),
+ ('master', self._master_name)]
+
+ files = [(json_file, os.path.join(self._results_directory, json_file))
+ for json_file in json_files]
+
+ url = 'https://%s/testfile/upload' % self._test_results_server
+ # Set uploading timeout in case appengine server is having problems.
+ # 120 seconds are more than enough to upload test results.
+ uploader = _FileUploader(url, 120)
+ try:
+ response = uploader.UploadAsMultipartFormData(files, attrs)
+ if response:
+ if response.code == 200:
+ _log.info('JSON uploaded.')
+ else:
+ _log.debug(
+ "JSON upload failed, %d: '%s'", response.code, response.read())
+ else:
+ _log.error('JSON upload failed; no response returned')
+ except Exception, err: # pylint: disable=broad-except
+ _log.error('Upload failed: %s', err)
+ return
+
+ def _GetTestTiming(self, test_name):
+ """Returns test timing data (elapsed time) in second
+ for the given test_name."""
+ if test_name in self._test_results_map:
+ # Floor for now to get time in seconds.
+ return int(self._test_results_map[test_name].test_run_time)
+ return 0
+
+ def _GetFailedTestNames(self):
+ """Returns a set of failed test names."""
+ return set([r.test_name for r in self._test_results if r.failed])
+
+ def _GetModifierChar(self, test_name):
+ """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
+ PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test modifier
+ for the given test_name.
+ """
+ if test_name not in self._test_results_map:
+ return self.__class__.NO_DATA_RESULT
+
+ test_result = self._test_results_map[test_name]
+ if test_result.modifier in self.MODIFIER_TO_CHAR.keys():
+ return self.MODIFIER_TO_CHAR[test_result.modifier]
+
+ return self.__class__.PASS_RESULT
+
+ def _get_result_char(self, test_name):
+ """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
+ PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test result
+ for the given test_name.
+ """
+ if test_name not in self._test_results_map:
+ return self.__class__.NO_DATA_RESULT
+
+ test_result = self._test_results_map[test_name]
+ if test_result.modifier == TestResult.DISABLED:
+ return self.__class__.SKIP_RESULT
+
+ if test_result.failed:
+ return self.__class__.FAIL_RESULT
+
+ return self.__class__.PASS_RESULT
+
+ def _GetSVNRevision(self, in_directory):
+ """Returns the svn revision for the given directory.
+
+ Args:
+ in_directory: The directory where svn is to be run.
+ """
+ # This is overridden in flakiness_dashboard_results_uploader.py.
+ raise NotImplementedError()
+
+ def _GetArchivedJSONResults(self):
+ """Download JSON file that only contains test
+ name list from test-results server. This is for generating incremental
+ JSON so the file generated has info for tests that failed before but
+ pass or are skipped from current run.
+
+ Returns (archived_results, error) tuple where error is None if results
+ were successfully read.
+ """
+ results_json = {}
+ old_results = None
+ error = None
+
+ if not self._test_results_server:
+ return {}, None
+
+ results_file_url = (self.URL_FOR_TEST_LIST_JSON %
+ (urllib2.quote(self._test_results_server),
+ urllib2.quote(self._builder_name),
+ self.RESULTS_FILENAME,
+ urllib2.quote(self._test_type),
+ urllib2.quote(self._master_name)))
+
+ # pylint: disable=redefined-variable-type
+ try:
+ # FIXME: We should talk to the network via a Host object.
+ results_file = urllib2.urlopen(results_file_url)
+ old_results = results_file.read()
+ except urllib2.HTTPError, http_error:
+ # A non-4xx status code means the bot is hosed for some reason
+ # and we can't grab the results.json file off of it.
+ if http_error.code < 400 and http_error.code >= 500:
+ error = http_error
+ except urllib2.URLError, url_error:
+ error = url_error
+ # pylint: enable=redefined-variable-type
+
+ if old_results:
+ # Strip the prefix and suffix so we can get the actual JSON object.
+ old_results = StripJSONWrapper(old_results)
+
+ try:
+ results_json = json.loads(old_results)
+ except Exception: # pylint: disable=broad-except
+ _log.debug('results.json was not valid JSON. Clobbering.')
+ # The JSON file is not valid JSON. Just clobber the results.
+ results_json = {}
+ else:
+ _log.debug('Old JSON results do not exist. Starting fresh.')
+ results_json = {}
+
+ return results_json, error
+
+ def _InsertFailureSummaries(self, results_for_builder):
+ """Inserts aggregate pass/failure statistics into the JSON.
+ This method reads self._test_results and generates
+ FIXABLE, FIXABLE_COUNT and ALL_FIXABLE_COUNT entries.
+
+ Args:
+ results_for_builder: Dictionary containing the test results for a
+ single builder.
+ """
+ # Insert the number of tests that failed or skipped.
+ fixable_count = len([r for r in self._test_results if r.Fixable()])
+ self._InsertItemIntoRawList(results_for_builder,
+ fixable_count, self.FIXABLE_COUNT)
+
+ # Create a test modifiers (FAILS, FLAKY etc) summary dictionary.
+ entry = {}
+ for test_name in self._test_results_map.iterkeys():
+ result_char = self._GetModifierChar(test_name)
+ entry[result_char] = entry.get(result_char, 0) + 1
+
+ # Insert the pass/skip/failure summary dictionary.
+ self._InsertItemIntoRawList(results_for_builder, entry,
+ self.FIXABLE)
+
+ # Insert the number of all the tests that are supposed to pass.
+ all_test_count = len(self._test_results)
+ self._InsertItemIntoRawList(results_for_builder,
+ all_test_count, self.ALL_FIXABLE_COUNT)
+
+ def _InsertItemIntoRawList(self, results_for_builder, item, key):
+ """Inserts the item into the list with the given key in the results for
+ this builder. Creates the list if no such list exists.
+
+ Args:
+ results_for_builder: Dictionary containing the test results for a
+ single builder.
+ item: Number or string to insert into the list.
+ key: Key in results_for_builder for the list to insert into.
+ """
+ if key in results_for_builder:
+ raw_list = results_for_builder[key]
+ else:
+ raw_list = []
+
+ raw_list.insert(0, item)
+ raw_list = raw_list[:self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG]
+ results_for_builder[key] = raw_list
+
+ def _InsertItemRunLengthEncoded(self, item, encoded_results):
+ """Inserts the item into the run-length encoded results.
+
+ Args:
+ item: String or number to insert.
+ encoded_results: run-length encoded results. An array of arrays, e.g.
+ [[3,'A'],[1,'Q']] encodes AAAQ.
+ """
+ if len(encoded_results) and item == encoded_results[0][1]:
+ num_results = encoded_results[0][0]
+ if num_results <= self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
+ encoded_results[0][0] = num_results + 1
+ else:
+ # Use a list instead of a class for the run-length encoding since
+ # we want the serialized form to be concise.
+ encoded_results.insert(0, [1, item])
+
+ def _InsertGenericMetaData(self, results_for_builder):
+ """ Inserts generic metadata (such as version number, current time etc)
+ into the JSON.
+
+ Args:
+ results_for_builder: Dictionary containing the test results for
+ a single builder.
+ """
+ self._InsertItemIntoRawList(results_for_builder,
+ self._build_number, self.BUILD_NUMBERS)
+
+ # Include SVN revisions for the given repositories.
+ for (name, path) in self._svn_repositories:
+ # Note: for JSON file's backward-compatibility we use 'chrome' rather
+ # than 'chromium' here.
+ lowercase_name = name.lower()
+ if lowercase_name == 'chromium':
+ lowercase_name = 'chrome'
+ self._InsertItemIntoRawList(results_for_builder,
+ self._GetSVNRevision(path),
+ lowercase_name + 'Revision')
+
+ self._InsertItemIntoRawList(results_for_builder,
+ int(time.time()),
+ self.TIME)
+
+ def _InsertTestTimeAndResult(self, test_name, tests):
+ """ Insert a test item with its results to the given tests dictionary.
+
+ Args:
+ tests: Dictionary containing test result entries.
+ """
+
+ result = self._get_result_char(test_name)
+ test_time = self._GetTestTiming(test_name)
+
+ this_test = tests
+ for segment in test_name.split('/'):
+ if segment not in this_test:
+ this_test[segment] = {}
+ this_test = this_test[segment]
+
+ if not len(this_test):
+ self._PopulateResultsAndTimesJSON(this_test)
+
+ if self.RESULTS in this_test:
+ self._InsertItemRunLengthEncoded(result, this_test[self.RESULTS])
+ else:
+ this_test[self.RESULTS] = [[1, result]]
+
+ if self.TIMES in this_test:
+ self._InsertItemRunLengthEncoded(test_time, this_test[self.TIMES])
+ else:
+ this_test[self.TIMES] = [[1, test_time]]
+
+ def _ConvertJSONToCurrentVersion(self, results_json):
+ """If the JSON does not match the current version, converts it to the
+ current version and adds in the new version number.
+ """
+ if self.VERSION_KEY in results_json:
+ archive_version = results_json[self.VERSION_KEY]
+ if archive_version == self.VERSION:
+ return
+ else:
+ archive_version = 3
+
+ # version 3->4
+ if archive_version == 3:
+ for results in results_json.values():
+ self._ConvertTestsToTrie(results)
+
+ results_json[self.VERSION_KEY] = self.VERSION
+
+ def _ConvertTestsToTrie(self, results):
+ if not self.TESTS in results:
+ return
+
+ test_results = results[self.TESTS]
+ test_results_trie = {}
+ for test in test_results.iterkeys():
+ single_test_result = test_results[test]
+ AddPathToTrie(test, single_test_result, test_results_trie)
+
+ results[self.TESTS] = test_results_trie
+
+ def _PopulateResultsAndTimesJSON(self, results_and_times):
+ results_and_times[self.RESULTS] = []
+ results_and_times[self.TIMES] = []
+ return results_and_times
+
+ def _CreateResultsForBuilderJSON(self):
+ results_for_builder = {}
+ results_for_builder[self.TESTS] = {}
+ return results_for_builder
+
+ def _RemoveItemsOverMaxNumberOfBuilds(self, encoded_list):
+ """Removes items from the run-length encoded list after the final
+ item that exceeds the max number of builds to track.
+
+ Args:
+ encoded_results: run-length encoded results. An array of arrays, e.g.
+ [[3,'A'],[1,'Q']] encodes AAAQ.
+ """
+ num_builds = 0
+ index = 0
+ for result in encoded_list:
+ num_builds = num_builds + result[0]
+ index = index + 1
+ if num_builds > self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
+ return encoded_list[:index]
+ return encoded_list
+
+ def _NormalizeResultsJSON(self, test, test_name, tests):
+ """ Prune tests where all runs pass or tests that no longer exist and
+ truncate all results to maxNumberOfBuilds.
+
+ Args:
+ test: ResultsAndTimes object for this test.
+ test_name: Name of the test.
+ tests: The JSON object with all the test results for this builder.
+ """
+ test[self.RESULTS] = self._RemoveItemsOverMaxNumberOfBuilds(
+ test[self.RESULTS])
+ test[self.TIMES] = self._RemoveItemsOverMaxNumberOfBuilds(
+ test[self.TIMES])
+
+ is_all_pass = self._IsResultsAllOfType(test[self.RESULTS],
+ self.PASS_RESULT)
+ is_all_no_data = self._IsResultsAllOfType(test[self.RESULTS],
+ self.NO_DATA_RESULT)
+ max_time = max([test_time[1] for test_time in test[self.TIMES]])
+
+ # Remove all passes/no-data from the results to reduce noise and
+ # filesize. If a test passes every run, but takes > MIN_TIME to run,
+ # don't throw away the data.
+ if is_all_no_data or (is_all_pass and max_time <= self.MIN_TIME):
+ del tests[test_name]
+
+ # method could be a function pylint: disable=R0201
+ def _IsResultsAllOfType(self, results, result_type):
+ """Returns whether all the results are of the given type
+ (e.g. all passes)."""
+ return len(results) == 1 and results[0][1] == result_type
+
+
+class _FileUploader(object):
+
+ def __init__(self, url, timeout_seconds):
+ self._url = url
+ self._timeout_seconds = timeout_seconds
+
+ def UploadAsMultipartFormData(self, files, attrs):
+ file_objs = []
+ for filename, path in files:
+ with file(path, 'rb') as fp:
+ file_objs.append(('file', filename, fp.read()))
+
+ # FIXME: We should use the same variable names for the formal and actual
+ # parameters.
+ content_type, data = _EncodeMultipartFormData(attrs, file_objs)
+ return self._UploadData(content_type, data)
+
+ def _UploadData(self, content_type, data):
+ start = time.time()
+ end = start + self._timeout_seconds
+ while time.time() < end:
+ try:
+ request = urllib2.Request(self._url, data,
+ {'Content-Type': content_type})
+ return urllib2.urlopen(request)
+ except urllib2.HTTPError as e:
+ _log.warn("Received HTTP status %s loading \"%s\". "
+ 'Retrying in 10 seconds...', e.code, e.filename)
+ time.sleep(10)
+
+
+def _GetMIMEType(filename):
+ return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+
+
+# FIXME: Rather than taking tuples, this function should take more
+# structured data.
+def _EncodeMultipartFormData(fields, files):
+ """Encode form fields for multipart/form-data.
+
+ Args:
+ fields: A sequence of (name, value) elements for regular form fields.
+ files: A sequence of (name, filename, value) elements for data to be
+ uploaded as files.
+ Returns:
+ (content_type, body) ready for httplib.HTTP instance.
+
+ Source:
+ http://code.google.com/p/rietveld/source/browse/trunk/upload.py
+ """
+ BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
+ CRLF = '\r\n'
+ lines = []
+
+ for key, value in fields:
+ lines.append('--' + BOUNDARY)
+ lines.append('Content-Disposition: form-data; name="%s"' % key)
+ lines.append('')
+ if isinstance(value, unicode):
+ value = value.encode('utf-8')
+ lines.append(value)
+
+ for key, filename, value in files:
+ lines.append('--' + BOUNDARY)
+ lines.append('Content-Disposition: form-data; name="%s"; '
+ 'filename="%s"' % (key, filename))
+ lines.append('Content-Type: %s' % _GetMIMEType(filename))
+ lines.append('')
+ if isinstance(value, unicode):
+ value = value.encode('utf-8')
+ lines.append(value)
+
+ lines.append('--' + BOUNDARY + '--')
+ lines.append('')
+ body = CRLF.join(lines)
+ content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
+ return content_type, body
diff --git a/deps/v8/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py b/deps/v8/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py
new file mode 100644
index 0000000000..d6aee057bf
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py
@@ -0,0 +1,213 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Most of this file was ported over from Blink's
+# webkitpy/layout_tests/layout_package/json_results_generator_unittest.py
+#
+
+import unittest
+import json
+
+from pylib.results.flakiness_dashboard import json_results_generator
+
+
+class JSONGeneratorTest(unittest.TestCase):
+
+ def setUp(self):
+ self.builder_name = 'DUMMY_BUILDER_NAME'
+ self.build_name = 'DUMMY_BUILD_NAME'
+ self.build_number = 'DUMMY_BUILDER_NUMBER'
+
+ # For archived results.
+ self._json = None
+ self._num_runs = 0
+ self._tests_set = set([])
+ self._test_timings = {}
+ self._failed_count_map = {}
+
+ self._PASS_count = 0
+ self._DISABLED_count = 0
+ self._FLAKY_count = 0
+ self._FAILS_count = 0
+ self._fixable_count = 0
+
+ self._orig_write_json = json_results_generator.WriteJSON
+
+ # unused arguments ... pylint: disable=W0613
+ def _WriteJSONStub(json_object, file_path, callback=None):
+ pass
+
+ json_results_generator.WriteJSON = _WriteJSONStub
+
+ def tearDown(self):
+ json_results_generator.WriteJSON = self._orig_write_json
+
+ def _TestJSONGeneration(self, passed_tests_list, failed_tests_list):
+ tests_set = set(passed_tests_list) | set(failed_tests_list)
+
+ DISABLED_tests = set([t for t in tests_set
+ if t.startswith('DISABLED_')])
+ FLAKY_tests = set([t for t in tests_set
+ if t.startswith('FLAKY_')])
+ FAILS_tests = set([t for t in tests_set
+ if t.startswith('FAILS_')])
+ PASS_tests = tests_set - (DISABLED_tests | FLAKY_tests | FAILS_tests)
+
+ failed_tests = set(failed_tests_list) - DISABLED_tests
+ failed_count_map = dict([(t, 1) for t in failed_tests])
+
+ test_timings = {}
+ i = 0
+ for test in tests_set:
+ test_timings[test] = float(self._num_runs * 100 + i)
+ i += 1
+
+ test_results_map = dict()
+ for test in tests_set:
+ test_results_map[test] = json_results_generator.TestResult(
+ test, failed=(test in failed_tests),
+ elapsed_time=test_timings[test])
+
+ generator = json_results_generator.JSONResultsGeneratorBase(
+ self.builder_name, self.build_name, self.build_number,
+ '',
+ None, # don't fetch past json results archive
+ test_results_map)
+
+ failed_count_map = dict([(t, 1) for t in failed_tests])
+
+ # Test incremental json results
+ incremental_json = generator.GetJSON()
+ self._VerifyJSONResults(
+ tests_set,
+ test_timings,
+ failed_count_map,
+ len(PASS_tests),
+ len(DISABLED_tests),
+ len(FLAKY_tests),
+ len(DISABLED_tests | failed_tests),
+ incremental_json,
+ 1)
+
+ # We don't verify the results here, but at least we make sure the code
+ # runs without errors.
+ generator.GenerateJSONOutput()
+ generator.GenerateTimesMSFile()
+
+ def _VerifyJSONResults(self, tests_set, test_timings, failed_count_map,
+ PASS_count, DISABLED_count, FLAKY_count,
+ fixable_count, json_obj, num_runs):
+ # Aliasing to a short name for better access to its constants.
+ JRG = json_results_generator.JSONResultsGeneratorBase
+
+ self.assertIn(JRG.VERSION_KEY, json_obj)
+ self.assertIn(self.builder_name, json_obj)
+
+ buildinfo = json_obj[self.builder_name]
+ self.assertIn(JRG.FIXABLE, buildinfo)
+ self.assertIn(JRG.TESTS, buildinfo)
+ self.assertEqual(len(buildinfo[JRG.BUILD_NUMBERS]), num_runs)
+ self.assertEqual(buildinfo[JRG.BUILD_NUMBERS][0], self.build_number)
+
+ if tests_set or DISABLED_count:
+ fixable = {}
+ for fixable_items in buildinfo[JRG.FIXABLE]:
+ for (result_type, count) in fixable_items.iteritems():
+ if result_type in fixable:
+ fixable[result_type] = fixable[result_type] + count
+ else:
+ fixable[result_type] = count
+
+ if PASS_count:
+ self.assertEqual(fixable[JRG.PASS_RESULT], PASS_count)
+ else:
+ self.assertTrue(JRG.PASS_RESULT not in fixable or
+ fixable[JRG.PASS_RESULT] == 0)
+ if DISABLED_count:
+ self.assertEqual(fixable[JRG.SKIP_RESULT], DISABLED_count)
+ else:
+ self.assertTrue(JRG.SKIP_RESULT not in fixable or
+ fixable[JRG.SKIP_RESULT] == 0)
+ if FLAKY_count:
+ self.assertEqual(fixable[JRG.FLAKY_RESULT], FLAKY_count)
+ else:
+ self.assertTrue(JRG.FLAKY_RESULT not in fixable or
+ fixable[JRG.FLAKY_RESULT] == 0)
+
+ if failed_count_map:
+ tests = buildinfo[JRG.TESTS]
+ for test_name in failed_count_map.iterkeys():
+ test = self._FindTestInTrie(test_name, tests)
+
+ failed = 0
+ for result in test[JRG.RESULTS]:
+ if result[1] == JRG.FAIL_RESULT:
+ failed += result[0]
+ self.assertEqual(failed_count_map[test_name], failed)
+
+ timing_count = 0
+ for timings in test[JRG.TIMES]:
+ if timings[1] == test_timings[test_name]:
+ timing_count = timings[0]
+ self.assertEqual(1, timing_count)
+
+ if fixable_count:
+ self.assertEqual(sum(buildinfo[JRG.FIXABLE_COUNT]), fixable_count)
+
+ def _FindTestInTrie(self, path, trie):
+ nodes = path.split('/')
+ sub_trie = trie
+ for node in nodes:
+ self.assertIn(node, sub_trie)
+ sub_trie = sub_trie[node]
+ return sub_trie
+
+ def testJSONGeneration(self):
+ self._TestJSONGeneration([], [])
+ self._TestJSONGeneration(['A1', 'B1'], [])
+ self._TestJSONGeneration([], ['FAILS_A2', 'FAILS_B2'])
+ self._TestJSONGeneration(['DISABLED_A3', 'DISABLED_B3'], [])
+ self._TestJSONGeneration(['A4'], ['B4', 'FAILS_C4'])
+ self._TestJSONGeneration(['DISABLED_C5', 'DISABLED_D5'], ['A5', 'B5'])
+ self._TestJSONGeneration(
+ ['A6', 'B6', 'FAILS_C6', 'DISABLED_E6', 'DISABLED_F6'],
+ ['FAILS_D6'])
+
+ # Generate JSON with the same test sets. (Both incremental results and
+ # archived results must be updated appropriately.)
+ self._TestJSONGeneration(
+ ['A', 'FLAKY_B', 'DISABLED_C'],
+ ['FAILS_D', 'FLAKY_E'])
+ self._TestJSONGeneration(
+ ['A', 'DISABLED_C', 'FLAKY_E'],
+ ['FLAKY_B', 'FAILS_D'])
+ self._TestJSONGeneration(
+ ['FLAKY_B', 'DISABLED_C', 'FAILS_D'],
+ ['A', 'FLAKY_E'])
+
+ def testHierarchicalJSNGeneration(self):
+ # FIXME: Re-work tests to be more comprehensible and comprehensive.
+ self._TestJSONGeneration(['foo/A'], ['foo/B', 'bar/C'])
+
+ def testTestTimingsTrie(self):
+ individual_test_timings = []
+ individual_test_timings.append(
+ json_results_generator.TestResult(
+ 'foo/bar/baz.html',
+ elapsed_time=1.2))
+ individual_test_timings.append(
+ json_results_generator.TestResult('bar.html', elapsed_time=0.0001))
+ trie = json_results_generator.TestTimingsTrie(individual_test_timings)
+
+ expected_trie = {
+ 'bar.html': 0,
+ 'foo': {
+ 'bar': {
+ 'baz.html': 1200,
+ }
+ }
+ }
+
+ self.assertEqual(json.dumps(trie), json.dumps(expected_trie))
diff --git a/deps/v8/build/android/pylib/results/flakiness_dashboard/results_uploader.py b/deps/v8/build/android/pylib/results/flakiness_dashboard/results_uploader.py
new file mode 100644
index 0000000000..b68a898b7d
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/flakiness_dashboard/results_uploader.py
@@ -0,0 +1,176 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Uploads the results to the flakiness dashboard server."""
+# pylint: disable=E1002,R0201
+
+import logging
+import os
+import shutil
+import tempfile
+import xml
+
+
+from devil.utils import cmd_helper
+from pylib.constants import host_paths
+from pylib.results.flakiness_dashboard import json_results_generator
+from pylib.utils import repo_utils
+
+
+
+class JSONResultsGenerator(json_results_generator.JSONResultsGeneratorBase):
+ """Writes test results to a JSON file and handles uploading that file to
+ the test results server.
+ """
+ def __init__(self, builder_name, build_name, build_number, tmp_folder,
+ test_results_map, test_results_server, test_type, master_name):
+ super(JSONResultsGenerator, self).__init__(
+ builder_name=builder_name,
+ build_name=build_name,
+ build_number=build_number,
+ results_file_base_path=tmp_folder,
+ builder_base_url=None,
+ test_results_map=test_results_map,
+ svn_repositories=(('webkit', 'third_party/WebKit'),
+ ('chrome', '.')),
+ test_results_server=test_results_server,
+ test_type=test_type,
+ master_name=master_name)
+
+ #override
+ def _GetModifierChar(self, test_name):
+ if test_name not in self._test_results_map:
+ return self.__class__.NO_DATA_RESULT
+
+ return self._test_results_map[test_name].modifier
+
+ #override
+ def _GetSVNRevision(self, in_directory):
+ """Returns the git/svn revision for the given directory.
+
+ Args:
+ in_directory: The directory relative to src.
+ """
+ def _is_git_directory(in_directory):
+ """Returns true if the given directory is in a git repository.
+
+ Args:
+ in_directory: The directory path to be tested.
+ """
+ if os.path.exists(os.path.join(in_directory, '.git')):
+ return True
+ parent = os.path.dirname(in_directory)
+ if parent == host_paths.DIR_SOURCE_ROOT or parent == in_directory:
+ return False
+ return _is_git_directory(parent)
+
+ in_directory = os.path.join(host_paths.DIR_SOURCE_ROOT, in_directory)
+
+ if not os.path.exists(os.path.join(in_directory, '.svn')):
+ if _is_git_directory(in_directory):
+ return repo_utils.GetGitHeadSHA1(in_directory)
+ else:
+ return ''
+
+ output = cmd_helper.GetCmdOutput(['svn', 'info', '--xml'], cwd=in_directory)
+ try:
+ dom = xml.dom.minidom.parseString(output)
+ return dom.getElementsByTagName('entry')[0].getAttribute('revision')
+ except xml.parsers.expat.ExpatError:
+ return ''
+ return ''
+
+
+class ResultsUploader(object):
+ """Handles uploading buildbot tests results to the flakiness dashboard."""
+ def __init__(self, tests_type):
+ self._build_number = os.environ.get('BUILDBOT_BUILDNUMBER')
+ self._master_name = os.environ.get('BUILDBOT_MASTERNAME')
+ self._builder_name = os.environ.get('BUILDBOT_BUILDERNAME')
+ self._tests_type = tests_type
+ self._build_name = None
+
+ if not self._build_number or not self._builder_name:
+ raise Exception('You should not be uploading tests results to the server'
+ 'from your local machine.')
+
+ upstream = (tests_type != 'Chromium_Android_Instrumentation')
+ if not upstream:
+ self._build_name = 'chromium-android'
+ buildbot_branch = os.environ.get('BUILDBOT_BRANCH')
+ if not buildbot_branch:
+ buildbot_branch = 'master'
+ else:
+ # Ensure there's no leading "origin/"
+ buildbot_branch = buildbot_branch[buildbot_branch.find('/') + 1:]
+ self._master_name = '%s-%s' % (self._build_name, buildbot_branch)
+
+ self._test_results_map = {}
+
+ def AddResults(self, test_results):
+ # TODO(frankf): Differentiate between fail/crash/timeouts.
+ conversion_map = [
+ (test_results.GetPass(), False,
+ json_results_generator.JSONResultsGeneratorBase.PASS_RESULT),
+ (test_results.GetFail(), True,
+ json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+ (test_results.GetCrash(), True,
+ json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+ (test_results.GetTimeout(), True,
+ json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+ (test_results.GetUnknown(), True,
+ json_results_generator.JSONResultsGeneratorBase.NO_DATA_RESULT),
+ ]
+
+ for results_list, failed, modifier in conversion_map:
+ for single_test_result in results_list:
+ test_result = json_results_generator.TestResult(
+ test=single_test_result.GetName(),
+ failed=failed,
+ elapsed_time=single_test_result.GetDuration() / 1000)
+ # The WebKit TestResult object sets the modifier it based on test name.
+ # Since we don't use the same test naming convention as WebKit the
+ # modifier will be wrong, so we need to overwrite it.
+ test_result.modifier = modifier
+
+ self._test_results_map[single_test_result.GetName()] = test_result
+
+ def Upload(self, test_results_server):
+ if not self._test_results_map:
+ return
+
+ tmp_folder = tempfile.mkdtemp()
+
+ try:
+ results_generator = JSONResultsGenerator(
+ builder_name=self._builder_name,
+ build_name=self._build_name,
+ build_number=self._build_number,
+ tmp_folder=tmp_folder,
+ test_results_map=self._test_results_map,
+ test_results_server=test_results_server,
+ test_type=self._tests_type,
+ master_name=self._master_name)
+
+ json_files = ["incremental_results.json", "times_ms.json"]
+ results_generator.GenerateJSONOutput()
+ results_generator.GenerateTimesMSFile()
+ results_generator.UploadJSONFiles(json_files)
+ except Exception as e: # pylint: disable=broad-except
+ logging.error("Uploading results to test server failed: %s.", e)
+ finally:
+ shutil.rmtree(tmp_folder)
+
+
+def Upload(results, flakiness_dashboard_server, test_type):
+ """Reports test results to the flakiness dashboard for Chrome for Android.
+
+ Args:
+ results: test results.
+ flakiness_dashboard_server: the server to upload the results to.
+ test_type: the type of the tests (as displayed by the flakiness dashboard).
+ """
+ uploader = ResultsUploader(test_type)
+ uploader.AddResults(results)
+ uploader.Upload(flakiness_dashboard_server)
diff --git a/deps/v8/build/android/pylib/results/json_results.py b/deps/v8/build/android/pylib/results/json_results.py
new file mode 100644
index 0000000000..6a10ba4bc9
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/json_results.py
@@ -0,0 +1,154 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import itertools
+import json
+import logging
+
+from pylib.base import base_test_result
+
+def GenerateResultsDict(test_run_results, global_tags=None):
+ """Create a results dict from |test_run_results| suitable for writing to JSON.
+ Args:
+ test_run_results: a list of base_test_result.TestRunResults objects.
+ Returns:
+ A results dict that mirrors the one generated by
+ base/test/launcher/test_results_tracker.cc:SaveSummaryAsJSON.
+ """
+ # Example json output.
+ # {
+ # "global_tags": [],
+ # "all_tests": [
+ # "test1",
+ # "test2",
+ # ],
+ # "disabled_tests": [],
+ # "per_iteration_data": [
+ # {
+ # "test1": [
+ # {
+ # "status": "SUCCESS",
+ # "elapsed_time_ms": 1,
+ # "output_snippet": "",
+ # "output_snippet_base64": "",
+ # "losless_snippet": "",
+ # },
+ # ...
+ # ],
+ # "test2": [
+ # {
+ # "status": "FAILURE",
+ # "elapsed_time_ms": 12,
+ # "output_snippet": "",
+ # "output_snippet_base64": "",
+ # "losless_snippet": "",
+ # },
+ # ...
+ # ],
+ # },
+ # {
+ # "test1": [
+ # {
+ # "status": "SUCCESS",
+ # "elapsed_time_ms": 1,
+ # "output_snippet": "",
+ # "output_snippet_base64": "",
+ # "losless_snippet": "",
+ # },
+ # ],
+ # "test2": [
+ # {
+ # "status": "FAILURE",
+ # "elapsed_time_ms": 12,
+ # "output_snippet": "",
+ # "output_snippet_base64": "",
+ # "losless_snippet": "",
+ # },
+ # ],
+ # },
+ # ...
+ # ],
+ # }
+
+ all_tests = set()
+ per_iteration_data = []
+ test_run_links = {}
+
+ for test_run_result in test_run_results:
+ iteration_data = collections.defaultdict(list)
+ if isinstance(test_run_result, list):
+ results_iterable = itertools.chain(*(t.GetAll() for t in test_run_result))
+ for tr in test_run_result:
+ test_run_links.update(tr.GetLinks())
+
+ else:
+ results_iterable = test_run_result.GetAll()
+ test_run_links.update(test_run_result.GetLinks())
+
+ for r in results_iterable:
+ result_dict = {
+ 'status': r.GetType(),
+ 'elapsed_time_ms': r.GetDuration(),
+ 'output_snippet': unicode(r.GetLog(), errors='replace'),
+ 'losless_snippet': True,
+ 'output_snippet_base64': '',
+ 'links': r.GetLinks(),
+ }
+ iteration_data[r.GetName()].append(result_dict)
+
+ all_tests = all_tests.union(set(iteration_data.iterkeys()))
+ per_iteration_data.append(iteration_data)
+
+ return {
+ 'global_tags': global_tags or [],
+ 'all_tests': sorted(list(all_tests)),
+ # TODO(jbudorick): Add support for disabled tests within base_test_result.
+ 'disabled_tests': [],
+ 'per_iteration_data': per_iteration_data,
+ 'links': test_run_links,
+ }
+
+
+def GenerateJsonResultsFile(test_run_result, file_path, global_tags=None,
+ **kwargs):
+ """Write |test_run_result| to JSON.
+
+ This emulates the format of the JSON emitted by
+ base/test/launcher/test_results_tracker.cc:SaveSummaryAsJSON.
+
+ Args:
+ test_run_result: a base_test_result.TestRunResults object.
+ file_path: The path to the JSON file to write.
+ """
+ with open(file_path, 'w') as json_result_file:
+ json_result_file.write(json.dumps(
+ GenerateResultsDict(test_run_result, global_tags=global_tags),
+ **kwargs))
+ logging.info('Generated json results file at %s', file_path)
+
+
+def ParseResultsFromJson(json_results):
+ """Creates a list of BaseTestResult objects from JSON.
+
+ Args:
+ json_results: A JSON dict in the format created by
+ GenerateJsonResultsFile.
+ """
+
+ def string_as_status(s):
+ if s in base_test_result.ResultType.GetTypes():
+ return s
+ return base_test_result.ResultType.UNKNOWN
+
+ results_list = []
+ testsuite_runs = json_results['per_iteration_data']
+ for testsuite_run in testsuite_runs:
+ for test, test_runs in testsuite_run.iteritems():
+ results_list.extend(
+ [base_test_result.BaseTestResult(test,
+ string_as_status(tr['status']),
+ duration=tr['elapsed_time_ms'])
+ for tr in test_runs])
+ return results_list
diff --git a/deps/v8/build/android/pylib/results/json_results_test.py b/deps/v8/build/android/pylib/results/json_results_test.py
new file mode 100755
index 0000000000..68e71f5785
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/json_results_test.py
@@ -0,0 +1,207 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.base import base_test_result
+from pylib.results import json_results
+
+
+class JsonResultsTest(unittest.TestCase):
+
+ def testGenerateResultsDict_passedResult(self):
+ result = base_test_result.BaseTestResult(
+ 'test.package.TestName', base_test_result.ResultType.PASS)
+
+ all_results = base_test_result.TestRunResults()
+ all_results.AddResult(result)
+
+ results_dict = json_results.GenerateResultsDict([all_results])
+ self.assertEquals(
+ ['test.package.TestName'],
+ results_dict['all_tests'])
+ self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+ iteration_result = results_dict['per_iteration_data'][0]
+ self.assertTrue('test.package.TestName' in iteration_result)
+ self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+ test_iteration_result = iteration_result['test.package.TestName'][0]
+ self.assertTrue('status' in test_iteration_result)
+ self.assertEquals('SUCCESS', test_iteration_result['status'])
+
+ def testGenerateResultsDict_skippedResult(self):
+ result = base_test_result.BaseTestResult(
+ 'test.package.TestName', base_test_result.ResultType.SKIP)
+
+ all_results = base_test_result.TestRunResults()
+ all_results.AddResult(result)
+
+ results_dict = json_results.GenerateResultsDict([all_results])
+ self.assertEquals(
+ ['test.package.TestName'],
+ results_dict['all_tests'])
+ self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+ iteration_result = results_dict['per_iteration_data'][0]
+ self.assertTrue('test.package.TestName' in iteration_result)
+ self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+ test_iteration_result = iteration_result['test.package.TestName'][0]
+ self.assertTrue('status' in test_iteration_result)
+ self.assertEquals('SKIPPED', test_iteration_result['status'])
+
+ def testGenerateResultsDict_failedResult(self):
+ result = base_test_result.BaseTestResult(
+ 'test.package.TestName', base_test_result.ResultType.FAIL)
+
+ all_results = base_test_result.TestRunResults()
+ all_results.AddResult(result)
+
+ results_dict = json_results.GenerateResultsDict([all_results])
+ self.assertEquals(
+ ['test.package.TestName'],
+ results_dict['all_tests'])
+ self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+ iteration_result = results_dict['per_iteration_data'][0]
+ self.assertTrue('test.package.TestName' in iteration_result)
+ self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+ test_iteration_result = iteration_result['test.package.TestName'][0]
+ self.assertTrue('status' in test_iteration_result)
+ self.assertEquals('FAILURE', test_iteration_result['status'])
+
+ def testGenerateResultsDict_duration(self):
+ result = base_test_result.BaseTestResult(
+ 'test.package.TestName', base_test_result.ResultType.PASS, duration=123)
+
+ all_results = base_test_result.TestRunResults()
+ all_results.AddResult(result)
+
+ results_dict = json_results.GenerateResultsDict([all_results])
+ self.assertEquals(
+ ['test.package.TestName'],
+ results_dict['all_tests'])
+ self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+ iteration_result = results_dict['per_iteration_data'][0]
+ self.assertTrue('test.package.TestName' in iteration_result)
+ self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+ test_iteration_result = iteration_result['test.package.TestName'][0]
+ self.assertTrue('elapsed_time_ms' in test_iteration_result)
+ self.assertEquals(123, test_iteration_result['elapsed_time_ms'])
+
+ def testGenerateResultsDict_multipleResults(self):
+ result1 = base_test_result.BaseTestResult(
+ 'test.package.TestName1', base_test_result.ResultType.PASS)
+ result2 = base_test_result.BaseTestResult(
+ 'test.package.TestName2', base_test_result.ResultType.PASS)
+
+ all_results = base_test_result.TestRunResults()
+ all_results.AddResult(result1)
+ all_results.AddResult(result2)
+
+ results_dict = json_results.GenerateResultsDict([all_results])
+ self.assertEquals(
+ ['test.package.TestName1', 'test.package.TestName2'],
+ results_dict['all_tests'])
+
+ self.assertTrue('per_iteration_data' in results_dict)
+ iterations = results_dict['per_iteration_data']
+ self.assertEquals(1, len(iterations))
+
+ expected_tests = set([
+ 'test.package.TestName1',
+ 'test.package.TestName2',
+ ])
+
+ for test_name, iteration_result in iterations[0].iteritems():
+ self.assertTrue(test_name in expected_tests)
+ expected_tests.remove(test_name)
+ self.assertEquals(1, len(iteration_result))
+
+ test_iteration_result = iteration_result[0]
+ self.assertTrue('status' in test_iteration_result)
+ self.assertEquals('SUCCESS', test_iteration_result['status'])
+
+ def testGenerateResultsDict_passOnRetry(self):
+ raw_results = []
+
+ result1 = base_test_result.BaseTestResult(
+ 'test.package.TestName1', base_test_result.ResultType.FAIL)
+ run_results1 = base_test_result.TestRunResults()
+ run_results1.AddResult(result1)
+ raw_results.append(run_results1)
+
+ result2 = base_test_result.BaseTestResult(
+ 'test.package.TestName1', base_test_result.ResultType.PASS)
+ run_results2 = base_test_result.TestRunResults()
+ run_results2.AddResult(result2)
+ raw_results.append(run_results2)
+
+ results_dict = json_results.GenerateResultsDict([raw_results])
+ self.assertEquals(['test.package.TestName1'], results_dict['all_tests'])
+
+ # Check that there's only one iteration.
+ self.assertIn('per_iteration_data', results_dict)
+ iterations = results_dict['per_iteration_data']
+ self.assertEquals(1, len(iterations))
+
+ # Check that test.package.TestName1 is the only test in the iteration.
+ self.assertEquals(1, len(iterations[0]))
+ self.assertIn('test.package.TestName1', iterations[0])
+
+ # Check that there are two results for test.package.TestName1.
+ actual_test_results = iterations[0]['test.package.TestName1']
+ self.assertEquals(2, len(actual_test_results))
+
+ # Check that the first result is a failure.
+ self.assertIn('status', actual_test_results[0])
+ self.assertEquals('FAILURE', actual_test_results[0]['status'])
+
+ # Check that the second result is a success.
+ self.assertIn('status', actual_test_results[1])
+ self.assertEquals('SUCCESS', actual_test_results[1]['status'])
+
+ def testGenerateResultsDict_globalTags(self):
+ raw_results = []
+ global_tags = ['UNRELIABLE_RESULTS']
+
+ results_dict = json_results.GenerateResultsDict(
+ [raw_results], global_tags=global_tags)
+ self.assertEquals(['UNRELIABLE_RESULTS'], results_dict['global_tags'])
+
+ def testGenerateResultsDict_loslessSnippet(self):
+ result = base_test_result.BaseTestResult(
+ 'test.package.TestName', base_test_result.ResultType.FAIL)
+ log = 'blah-blah'
+ result.SetLog(log)
+
+ all_results = base_test_result.TestRunResults()
+ all_results.AddResult(result)
+
+ results_dict = json_results.GenerateResultsDict([all_results])
+ self.assertEquals(
+ ['test.package.TestName'],
+ results_dict['all_tests'])
+ self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+ iteration_result = results_dict['per_iteration_data'][0]
+ self.assertTrue('test.package.TestName' in iteration_result)
+ self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+ test_iteration_result = iteration_result['test.package.TestName'][0]
+ self.assertTrue('losless_snippet' in test_iteration_result)
+ self.assertTrue(test_iteration_result['losless_snippet'])
+ self.assertTrue('output_snippet' in test_iteration_result)
+ self.assertEquals(log, test_iteration_result['output_snippet'])
+ self.assertTrue('output_snippet_base64' in test_iteration_result)
+ self.assertEquals('', test_iteration_result['output_snippet_base64'])
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/deps/v8/build/android/pylib/results/presentation/__init__.py b/deps/v8/build/android/pylib/results/presentation/__init__.py
new file mode 100644
index 0000000000..a22a6ee39a
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/presentation/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/deps/v8/build/android/pylib/results/presentation/javascript/main_html.js b/deps/v8/build/android/pylib/results/presentation/javascript/main_html.js
new file mode 100644
index 0000000000..76f22f09d5
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/presentation/javascript/main_html.js
@@ -0,0 +1,214 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+function getArguments() {
+ // Returns the URL arguments as a dictionary.
+ args = {}
+ var s = location.search;
+ if (s) {
+ var vals = s.substring(1).split('&');
+ for (var i = 0; i < vals.length; i++) {
+ var pair = vals[i].split('=');
+ args[pair[0]] = pair[1];
+ }
+ }
+ return args;
+}
+
+function showSuiteTable(show_the_table) {
+ document.getElementById('suite-table').style.display = (
+ show_the_table ? 'table' : 'none');
+}
+
+function showTestTable(show_the_table) {
+ document.getElementById('test-table').style.display = (
+ show_the_table ? 'table' : 'none');
+}
+
+function showTestsOfOneSuiteOnly(suite_name) {
+ setTitle('Test Results of Suite: ' + suite_name)
+ show_all = (suite_name == 'TOTAL')
+ var testTableBlocks = document.getElementById('test-table')
+ .getElementsByClassName('row_block');
+ Array.prototype.slice.call(testTableBlocks)
+ .forEach(function(testTableBlock) {
+ if (!show_all) {
+ var table_block_in_suite = (testTableBlock.firstElementChild
+ .firstElementChild.firstElementChild.innerHTML)
+ .startsWith(suite_name);
+ if (!table_block_in_suite) {
+ testTableBlock.style.display = 'none';
+ return;
+ }
+ }
+ testTableBlock.style.display = 'table-row-group';
+ });
+ showTestTable(true);
+ showSuiteTable(false);
+ window.scrollTo(0, 0);
+}
+
+function showTestsOfOneSuiteOnlyWithNewState(suite_name) {
+ showTestsOfOneSuiteOnly(suite_name);
+ history.pushState({suite: suite_name}, suite_name, '');
+}
+
+function showSuiteTableOnly() {
+ setTitle('Suites Summary')
+ showTestTable(false);
+ showSuiteTable(true);
+ window.scrollTo(0, 0);
+}
+
+function showSuiteTableOnlyWithReplaceState() {
+ showSuiteTableOnly();
+ history.replaceState({}, 'suite_table', '');
+}
+
+function setBrowserBackButtonLogic() {
+ window.onpopstate = function(event) {
+ if (!event.state || !event.state.suite) {
+ showSuiteTableOnly();
+ } else {
+ showTestsOfOneSuiteOnly(event.state.suite);
+ }
+ };
+}
+
+function setTitle(title) {
+ document.getElementById('summary-header').textContent = title;
+}
+
+function sortByColumn(head) {
+ var table = head.parentNode.parentNode.parentNode;
+ var rowBlocks = Array.prototype.slice.call(
+ table.getElementsByTagName('tbody'));
+
+ // Determine whether to asc or desc and set arrows.
+ var headers = head.parentNode.getElementsByTagName('th');
+ var headIndex = Array.prototype.slice.call(headers).indexOf(head);
+ var asc = -1;
+ for (var i = 0; i < headers.length; i++) {
+ if (headers[i].dataset.ascSorted != 0) {
+ if (headers[i].dataset.ascSorted == 1) {
+ headers[i].getElementsByClassName('up')[0]
+ .style.display = 'none';
+ } else {
+ headers[i].getElementsByClassName('down')[0]
+ .style.display = 'none';
+ }
+ if (headers[i] == head) {
+ asc = headers[i].dataset.ascSorted * -1;
+ } else {
+ headers[i].dataset.ascSorted = 0;
+ }
+ break;
+ }
+ }
+ headers[headIndex].dataset.ascSorted = asc;
+ if (asc == 1) {
+ headers[headIndex].getElementsByClassName('up')[0]
+ .style.display = 'inline';
+ } else {
+ headers[headIndex].getElementsByClassName('down')[0]
+ .style.display = 'inline';
+ }
+
+ // Sort the array by the specified column number (col) and order (asc).
+ rowBlocks.sort(function (a, b) {
+ if (a.style.display == 'none') {
+ return -1;
+ } else if (b.style.display == 'none') {
+ return 1;
+ }
+ var a_rows = Array.prototype.slice.call(a.children);
+ var b_rows = Array.prototype.slice.call(b.children);
+ if (head.className == "text") {
+ // If sorting by text, we only compare the entry on the first row.
+ var aInnerHTML = a_rows[0].children[headIndex].innerHTML;
+ var bInnerHTML = b_rows[0].children[headIndex].innerHTML;
+ return (aInnerHTML == bInnerHTML) ? 0 : (
+ (aInnerHTML > bInnerHTML) ? asc : -1 * asc);
+ } else if (head.className == "number") {
+ // If sorting by number, for example, duration,
+ // we will sum up the durations of different test runs
+ // for one specific test case and sort by the sum.
+ var avalue = 0;
+ var bvalue = 0;
+ a_rows.forEach(function (row, i) {
+ var index = (i > 0) ? headIndex - 1 : headIndex;
+ avalue += Number(row.children[index].innerHTML);
+ });
+ b_rows.forEach(function (row, i) {
+ var index = (i > 0) ? headIndex - 1 : headIndex;
+ bvalue += Number(row.children[index].innerHTML);
+ });
+ } else if (head.className == "flaky") {
+ // Flakiness = (#total - #success - #skipped) / (#total - #skipped)
+ var a_success_or_skipped = 0;
+ var a_skipped = 0;
+ var b_success_or_skipped = 0;
+ var b_skipped = 0;
+ a_rows.forEach(function (row, i) {
+ var index = (i > 0) ? headIndex - 1 : headIndex;
+ var status = row.children[index].innerHTML.trim();
+ if (status == 'SUCCESS') {
+ a_success_or_skipped += 1;
+ }
+ if (status == 'SKIPPED') {
+ a_success_or_skipped += 1;
+ a_skipped += 1;
+ }
+ });
+ b_rows.forEach(function (row, i) {
+ var index = (i > 0) ? headIndex - 1 : headIndex;
+ var status = row.children[index].innerHTML.trim();
+ if (status == 'SUCCESS') {
+ b_success_or_skipped += 1;
+ }
+ if (status == 'SKIPPED') {
+ b_success_or_skipped += 1;
+ b_skipped += 1;
+ }
+ });
+ var atotal_minus_skipped = a_rows.length - a_skipped;
+ var btotal_minus_skipped = b_rows.length - b_skipped;
+
+ var avalue = ((atotal_minus_skipped == 0) ? -1 :
+ (a_rows.length - a_success_or_skipped) / atotal_minus_skipped);
+ var bvalue = ((btotal_minus_skipped == 0) ? -1 :
+ (b_rows.length - b_success_or_skipped) / btotal_minus_skipped);
+ }
+ return asc * (avalue - bvalue);
+ });
+
+ for (var i = 0; i < rowBlocks.length; i++) {
+ table.appendChild(rowBlocks[i]);
+ }
+}
+
+function sortSuiteTableByFailedTestCases() {
+ sortByColumn(document.getElementById('number_fail_tests'));
+}
+
+function setTableCellsAsClickable() {
+ const tableCells = document.getElementsByTagName('td');
+ for(let i = 0; i < tableCells.length; i++) {
+ const links = tableCells[i].getElementsByTagName('a');
+ // Only make the cell clickable if there is only one link.
+ if (links.length == 1) {
+ tableCells[i].addEventListener('click', function() {
+ links[0].click();
+ });
+ tableCells[i].addEventListener('mouseover', function() {
+ tableCells[i].style.cursor = 'pointer';
+ links[0].style.textDecoration = 'underline';
+ });
+ tableCells[i].addEventListener('mouseout', function() {
+ tableCells[i].style.cursor = 'initial';
+ links[0].style.textDecoration = 'initial';
+ });
+ }
+ }
+}
diff --git a/deps/v8/build/android/pylib/results/presentation/standard_gtest_merge.py b/deps/v8/build/android/pylib/results/presentation/standard_gtest_merge.py
new file mode 100755
index 0000000000..5dba4df326
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/presentation/standard_gtest_merge.py
@@ -0,0 +1,168 @@
+#! /usr/bin/env python
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import json
+import os
+import sys
+
+
+def merge_shard_results(summary_json, jsons_to_merge):
+ """Reads JSON test output from all shards and combines them into one.
+
+ Returns dict with merged test output on success or None on failure. Emits
+ annotations.
+ """
+ try:
+ with open(summary_json) as f:
+ summary = json.load(f)
+ except (IOError, ValueError):
+ raise Exception('Summary json cannot be loaded.')
+
+ # Merge all JSON files together. Keep track of missing shards.
+ merged = {
+ 'all_tests': set(),
+ 'disabled_tests': set(),
+ 'global_tags': set(),
+ 'missing_shards': [],
+ 'per_iteration_data': [],
+ 'swarming_summary': summary,
+ 'links': set()
+ }
+ for index, result in enumerate(summary['shards']):
+ if result is None:
+ merged['missing_shards'].append(index)
+ continue
+
+ # Author note: this code path doesn't trigger convert_to_old_format() in
+ # client/swarming.py, which means the state enum is saved in its string
+ # name form, not in the number form.
+ state = result.get('state')
+ if state == u'BOT_DIED':
+ print >> sys.stderr, 'Shard #%d had a Swarming internal failure' % index
+ elif state == u'EXPIRED':
+ print >> sys.stderr, 'There wasn\'t enough capacity to run your test'
+ elif state == u'TIMED_OUT':
+ print >> sys.stderr, (
+ 'Test runtime exceeded allocated time'
+ 'Either it ran for too long (hard timeout) or it didn\'t produce '
+ 'I/O for an extended period of time (I/O timeout)')
+ elif state != u'COMPLETED':
+ print >> sys.stderr, 'Invalid Swarming task state: %s' % state
+
+ json_data, err_msg = load_shard_json(index, result.get('task_id'),
+ jsons_to_merge)
+ if json_data:
+ # Set-like fields.
+ for key in ('all_tests', 'disabled_tests', 'global_tags', 'links'):
+ merged[key].update(json_data.get(key), [])
+
+ # 'per_iteration_data' is a list of dicts. Dicts should be merged
+ # together, not the 'per_iteration_data' list itself.
+ merged['per_iteration_data'] = merge_list_of_dicts(
+ merged['per_iteration_data'], json_data.get('per_iteration_data', []))
+ else:
+ merged['missing_shards'].append(index)
+ print >> sys.stderr, 'No result was found: %s' % err_msg
+
+ # If some shards are missing, make it known. Continue parsing anyway. Step
+ # should be red anyway, since swarming.py return non-zero exit code in that
+ # case.
+ if merged['missing_shards']:
+ as_str = ', '.join([str(shard) for shard in merged['missing_shards']])
+ print >> sys.stderr, ('some shards did not complete: %s' % as_str)
+ # Not all tests run, combined JSON summary can not be trusted.
+ merged['global_tags'].add('UNRELIABLE_RESULTS')
+
+ # Convert to jsonish dict.
+ for key in ('all_tests', 'disabled_tests', 'global_tags', 'links'):
+ merged[key] = sorted(merged[key])
+ return merged
+
+
+OUTPUT_JSON_SIZE_LIMIT = 100 * 1024 * 1024 # 100 MB
+
+
+def load_shard_json(index, task_id, jsons_to_merge):
+ """Reads JSON output of the specified shard.
+
+ Args:
+ output_dir: The directory in which to look for the JSON output to load.
+ index: The index of the shard to load data for, this is for old api.
+ task_id: The directory of the shard to load data for, this is for new api.
+
+ Returns: A tuple containing:
+ * The contents of path, deserialized into a python object.
+ * An error string.
+ (exactly one of the tuple elements will be non-None).
+ """
+ matching_json_files = [
+ j for j in jsons_to_merge
+ if (os.path.basename(j) == 'output.json' and
+ (os.path.basename(os.path.dirname(j)) == str(index) or
+ os.path.basename(os.path.dirname(j)) == task_id))]
+
+ if not matching_json_files:
+ print >> sys.stderr, 'shard %s test output missing' % index
+ return (None, 'shard %s test output was missing' % index)
+ elif len(matching_json_files) > 1:
+ print >> sys.stderr, 'duplicate test output for shard %s' % index
+ return (None, 'shard %s test output was duplicated' % index)
+
+ path = matching_json_files[0]
+
+ try:
+ filesize = os.stat(path).st_size
+ if filesize > OUTPUT_JSON_SIZE_LIMIT:
+ print >> sys.stderr, 'output.json is %d bytes. Max size is %d' % (
+ filesize, OUTPUT_JSON_SIZE_LIMIT)
+ return (None, 'shard %s test output exceeded the size limit' % index)
+
+ with open(path) as f:
+ return (json.load(f), None)
+ except (IOError, ValueError, OSError) as e:
+ print >> sys.stderr, 'Missing or invalid gtest JSON file: %s' % path
+ print >> sys.stderr, '%s: %s' % (type(e).__name__, e)
+
+ return (None, 'shard %s test output was missing or invalid' % index)
+
+
+def merge_list_of_dicts(left, right):
+ """Merges dicts left[0] with right[0], left[1] with right[1], etc."""
+ output = []
+ for i in xrange(max(len(left), len(right))):
+ left_dict = left[i] if i < len(left) else {}
+ right_dict = right[i] if i < len(right) else {}
+ merged_dict = left_dict.copy()
+ merged_dict.update(right_dict)
+ output.append(merged_dict)
+ return output
+
+
+def standard_gtest_merge(
+ output_json, summary_json, jsons_to_merge):
+
+ output = merge_shard_results(summary_json, jsons_to_merge)
+ with open(output_json, 'wb') as f:
+ json.dump(output, f)
+
+ return 0
+
+
+def main(raw_args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--summary-json')
+ parser.add_argument('-o', '--output-json', required=True)
+ parser.add_argument('jsons_to_merge', nargs='*')
+
+ args = parser.parse_args(raw_args)
+
+ return standard_gtest_merge(
+ args.output_json, args.summary_json, args.jsons_to_merge)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/pylib/results/presentation/template/main.html b/deps/v8/build/android/pylib/results/presentation/template/main.html
new file mode 100644
index 0000000000..5c8df5e121
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/presentation/template/main.html
@@ -0,0 +1,97 @@
+<!DOCTYPE html>
+<html>
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+ <style>
+ body {
+ background-color: #fff;
+ color: #333;
+ font-family: Verdana, sans-serif;
+ font-size: 10px;
+ margin-left: 30px;
+ margin-right: 30px;
+ margin-top: 20px;
+ margin-bottom: 50px;
+ padding: 0;
+ }
+ table, th, td {
+ border: 1px solid black;
+ border-collapse: collapse;
+ text-align: center;
+ }
+ table, td {
+ padding: 0.1em 1em 0.1em 1em;
+ }
+ th {
+ cursor: pointer;
+ padding: 0.2em 1.5em 0.2em 1.5em;
+ }
+ table {
+ width: 100%;
+ }
+ .center {
+ text-align: center;
+ }
+ .left {
+ text-align: left;
+ }
+ a {
+ text-decoration: none;
+ }
+ a:hover {
+ text-decoration: underline;
+ cursor: pointer;
+ }
+ a:link,a:visited,a:active {
+ color: #444;
+ }
+ .row_block:hover {
+ background-color: #F6F6F6;
+ }
+ .skipped, .success, .failure {
+ border-color: #000000;
+ }
+ .success {
+ color: #000;
+ background-color: #8d4;
+ }
+ .failure {
+ color: #000;
+ background-color: #e88;
+ }
+ .skipped {
+ color: #000;
+ background: #AADDEE;
+ }
+ </style>
+ <script type="text/javascript">
+ {% include "javascript/main_html.js" %}
+ </script>
+ </head>
+ <body>
+ <div>
+ <h2 id="summary-header"></h2>
+ {% for tb_value in tb_values %}
+ {% include 'template/table.html' %}
+ {% endfor %}
+ </div>
+ {% if feedback_url %}
+ </br>
+ <a href="{{feedback_url}}" target="_blank"><b>Feedback</b></a>
+ </body>
+ {%- endif %}
+ <script>
+ sortSuiteTableByFailedTestCases();
+ showSuiteTableOnlyWithReplaceState();
+ // Enable sorting for each column of tables.
+ Array.prototype.slice.call(document.getElementsByTagName('th'))
+ .forEach(function(head) {
+ head.addEventListener(
+ "click",
+ function() { sortByColumn(head); });
+ }
+ );
+ setBrowserBackButtonLogic();
+ setTableCellsAsClickable();
+ </script>
+</html> \ No newline at end of file
diff --git a/deps/v8/build/android/pylib/results/presentation/template/table.html b/deps/v8/build/android/pylib/results/presentation/template/table.html
new file mode 100644
index 0000000000..4240043490
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/presentation/template/table.html
@@ -0,0 +1,60 @@
+<table id="{{tb_value.table_id}}" style="display:none;">
+ <thead class="heads">
+ <tr>
+ {% for cell in tb_value.table_headers -%}
+ <th class="{{cell.class}}" id="{{cell.data}}" data-asc-sorted=0>
+ {{cell.data}}
+ <span class="up" style="display:none;"> &#8593</span>
+ <span class="down" style="display:none;"> &#8595</span>
+ </th>
+ {%- endfor %}
+ </tr>
+ </thead>
+ {% for block in tb_value.table_row_blocks -%}
+ <tbody class="row_block">
+ {% for row in block -%}
+ <tr class="{{tb_value.table_id}}-body-row">
+ {% for cell in row -%}
+ {% if cell.rowspan -%}
+ <td rowspan="{{cell.rowspan}}" class="{{tb_value.table_id}}-body-column-{{loop.index0}} {{cell.class}}">
+ {%- else -%}
+ <td rowspan="1" class="{{tb_value.table_id}}-body-column-{{loop.index0}} {{cell.class}}">
+ {%- endif %}
+ {% if cell.cell_type == 'pre' -%}
+ <pre>{{cell.data}}</pre>
+ {%- elif cell.cell_type == 'links' -%}
+ {% for link in cell.links -%}
+ <a href="{{link.href}}" target="{{link.target}}">{{link.data}}</a>
+ {% if not loop.last -%}
+ <br />
+ {%- endif %}
+ {%- endfor %}
+ {%- elif cell.cell_type == 'action' -%}
+ <a onclick="{{cell.action}}">{{cell.data}}</a>
+ {%- else -%}
+ {{cell.data}}
+ {%- endif %}
+ </td>
+ {%- endfor %}
+ </tr>
+ {%- endfor %}
+ </tbody>
+ {%- endfor %}
+ <tfoot>
+ <tr>
+ {% for cell in tb_value.table_footer -%}
+ <td class="{{tb_value.table_id}}-summary-column-{{loop.index0}} {{cell.class}}">
+ {% if cell.cell_type == 'links' -%}
+ {% for link in cell.links -%}
+ <a href="{{link.href}}" target="{{link.target}}"><b>{{link.data}}</b></a>
+ {%- endfor %}
+ {%- elif cell.cell_type == 'action' -%}
+ <a onclick="{{cell.action}}">{{cell.data}}</a>
+ {%- else -%}
+ <b>{{cell.data}}</b>
+ {%- endif %}
+ </td>
+ {%- endfor %}
+ </tr>
+ </tfoot>
+</table>
diff --git a/deps/v8/build/android/pylib/results/presentation/test_results_presentation.py b/deps/v8/build/android/pylib/results/presentation/test_results_presentation.py
new file mode 100755
index 0000000000..82d6c88470
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/presentation/test_results_presentation.py
@@ -0,0 +1,543 @@
+#!/usr/bin/env python
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import contextlib
+import json
+import logging
+import tempfile
+import os
+import sys
+import urllib
+
+
+CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
+BASE_DIR = os.path.abspath(os.path.join(
+ CURRENT_DIR, '..', '..', '..', '..', '..'))
+
+sys.path.append(os.path.join(BASE_DIR, 'build', 'android'))
+from pylib.results.presentation import standard_gtest_merge
+from pylib.utils import google_storage_helper # pylint: disable=import-error
+
+sys.path.append(os.path.join(BASE_DIR, 'third_party'))
+import jinja2 # pylint: disable=import-error
+JINJA_ENVIRONMENT = jinja2.Environment(
+ loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
+ autoescape=True)
+
+
+def cell(data, html_class='center'):
+ """Formats table cell data for processing in jinja template."""
+ return {
+ 'data': data,
+ 'class': html_class,
+ }
+
+
+def pre_cell(data, html_class='center'):
+ """Formats table <pre> cell data for processing in jinja template."""
+ return {
+ 'cell_type': 'pre',
+ 'data': data,
+ 'class': html_class,
+ }
+
+
+class LinkTarget(object):
+ # Opens the linked document in a new window or tab.
+ NEW_TAB = '_blank'
+ # Opens the linked document in the same frame as it was clicked.
+ CURRENT_TAB = '_self'
+
+
+def link(data, href, target=LinkTarget.CURRENT_TAB):
+ """Formats <a> tag data for processing in jinja template.
+
+ Args:
+ data: String link appears as on HTML page.
+ href: URL where link goes.
+ target: Where link should be opened (e.g. current tab or new tab).
+ """
+ return {
+ 'data': data,
+ 'href': href,
+ 'target': target,
+ }
+
+
+def links_cell(links, html_class='center', rowspan=None):
+ """Formats table cell with links for processing in jinja template.
+
+ Args:
+ links: List of link dictionaries. Use |link| function to generate them.
+ html_class: Class for table cell.
+ rowspan: Rowspan HTML attribute.
+ """
+ return {
+ 'cell_type': 'links',
+ 'class': html_class,
+ 'links': links,
+ 'rowspan': rowspan,
+ }
+
+
+def action_cell(action, data, html_class):
+ """Formats table cell with javascript actions.
+
+ Args:
+ action: Javscript action.
+ data: Data in cell.
+ class: Class for table cell.
+ """
+ return {
+ 'cell_type': 'action',
+ 'action': action,
+ 'data': data,
+ 'class': html_class,
+ }
+
+
+def flakiness_dashbord_link(test_name, suite_name):
+ url_args = urllib.urlencode([
+ ('testType', suite_name),
+ ('tests', test_name)])
+ return ('https://test-results.appspot.com/'
+ 'dashboards/flakiness_dashboard.html#%s' % url_args)
+
+
+def logs_cell(result, test_name, suite_name):
+ """Formats result logs data for processing in jinja template."""
+ link_list = []
+ result_link_dict = result.get('links', {})
+ result_link_dict['flakiness'] = flakiness_dashbord_link(
+ test_name, suite_name)
+ for name, href in sorted(result_link_dict.items()):
+ link_list.append(link(
+ data=name,
+ href=href,
+ target=LinkTarget.NEW_TAB))
+ if link_list:
+ return links_cell(link_list)
+ else:
+ return cell('(no logs)')
+
+
+def code_search(test, cs_base_url):
+ """Returns URL for test on codesearch."""
+ search = test.replace('#', '.')
+ return '%s/?q=%s&type=cs' % (cs_base_url, search)
+
+
+def status_class(status):
+ """Returns HTML class for test status."""
+ if not status:
+ return 'failure unknown'
+ status = status.lower()
+ if status not in ('success', 'skipped'):
+ return 'failure %s' % status
+ return status
+
+
+def create_test_table(results_dict, cs_base_url, suite_name):
+ """Format test data for injecting into HTML table."""
+
+ header_row = [
+ cell(data='test_name', html_class='text'),
+ cell(data='status', html_class='flaky'),
+ cell(data='elapsed_time_ms', html_class='number'),
+ cell(data='logs', html_class='text'),
+ cell(data='output_snippet', html_class='text'),
+ ]
+
+ test_row_blocks = []
+ for test_name, test_results in results_dict.iteritems():
+ test_runs = []
+ for index, result in enumerate(test_results):
+ if index == 0:
+ test_run = [links_cell(
+ links=[
+ link(href=code_search(test_name, cs_base_url),
+ target=LinkTarget.NEW_TAB,
+ data=test_name)],
+ rowspan=len(test_results),
+ html_class='left %s' % test_name
+ )] # test_name
+ else:
+ test_run = []
+
+ test_run.extend([
+ cell(data=result['status'] or 'UNKNOWN',
+ # status
+ html_class=('center %s' %
+ status_class(result['status']))),
+ cell(data=result['elapsed_time_ms']), # elapsed_time_ms
+ logs_cell(result, test_name, suite_name), # logs
+ pre_cell(data=result['output_snippet'], # output_snippet
+ html_class='left'),
+ ])
+ test_runs.append(test_run)
+ test_row_blocks.append(test_runs)
+ return header_row, test_row_blocks
+
+
+def create_suite_table(results_dict):
+ """Format test suite data for injecting into HTML table."""
+
+ SUCCESS_COUNT_INDEX = 1
+ FAIL_COUNT_INDEX = 2
+ ALL_COUNT_INDEX = 3
+ TIME_INDEX = 4
+
+ header_row = [
+ cell(data='suite_name', html_class='text'),
+ cell(data='number_success_tests', html_class='number'),
+ cell(data='number_fail_tests', html_class='number'),
+ cell(data='all_tests', html_class='number'),
+ cell(data='elapsed_time_ms', html_class='number'),
+ ]
+
+ footer_row = [
+ action_cell(
+ 'showTestsOfOneSuiteOnlyWithNewState("TOTAL")',
+ 'TOTAL',
+ 'center'
+ ), # TOTAL
+ cell(data=0), # number_success_tests
+ cell(data=0), # number_fail_tests
+ cell(data=0), # all_tests
+ cell(data=0), # elapsed_time_ms
+ ]
+
+ suite_row_dict = {}
+ for test_name, test_results in results_dict.iteritems():
+ # TODO(mikecase): This logic doesn't work if there are multiple test runs.
+ # That is, if 'per_iteration_data' has multiple entries.
+ # Since we only care about the result of the last test run.
+ result = test_results[-1]
+
+ suite_name = (test_name.split('#')[0] if '#' in test_name
+ else test_name.split('.')[0])
+ if suite_name in suite_row_dict:
+ suite_row = suite_row_dict[suite_name]
+ else:
+ suite_row = [
+ action_cell(
+ 'showTestsOfOneSuiteOnlyWithNewState("%s")' % suite_name,
+ suite_name,
+ 'left'
+ ), # suite_name
+ cell(data=0), # number_success_tests
+ cell(data=0), # number_fail_tests
+ cell(data=0), # all_tests
+ cell(data=0), # elapsed_time_ms
+ ]
+
+ suite_row_dict[suite_name] = suite_row
+
+ suite_row[ALL_COUNT_INDEX]['data'] += 1
+ footer_row[ALL_COUNT_INDEX]['data'] += 1
+
+ if result['status'] == 'SUCCESS':
+ suite_row[SUCCESS_COUNT_INDEX]['data'] += 1
+ footer_row[SUCCESS_COUNT_INDEX]['data'] += 1
+ elif result['status'] != 'SKIPPED':
+ suite_row[FAIL_COUNT_INDEX]['data'] += 1
+ footer_row[FAIL_COUNT_INDEX]['data'] += 1
+
+ suite_row[TIME_INDEX]['data'] += result['elapsed_time_ms']
+ footer_row[TIME_INDEX]['data'] += result['elapsed_time_ms']
+
+ for suite in suite_row_dict.values():
+ if suite[FAIL_COUNT_INDEX]['data'] > 0:
+ suite[FAIL_COUNT_INDEX]['class'] += ' failure'
+ else:
+ suite[FAIL_COUNT_INDEX]['class'] += ' success'
+
+ if footer_row[FAIL_COUNT_INDEX]['data'] > 0:
+ footer_row[FAIL_COUNT_INDEX]['class'] += ' failure'
+ else:
+ footer_row[FAIL_COUNT_INDEX]['class'] += ' success'
+
+ return (header_row,
+ [[suite_row] for suite_row in suite_row_dict.values()],
+ footer_row)
+
+
+def feedback_url(result_details_link):
+ # pylint: disable=redefined-variable-type
+ url_args = [
+ ('labels', 'Pri-2,Type-Bug,Restrict-View-Google'),
+ ('summary', 'Result Details Feedback:'),
+ ('components', 'Test>Android'),
+ ]
+ if result_details_link:
+ url_args.append(('comment', 'Please check out: %s' % result_details_link))
+ url_args = urllib.urlencode(url_args)
+ # pylint: enable=redefined-variable-type
+ return 'https://bugs.chromium.org/p/chromium/issues/entry?%s' % url_args
+
+
+def results_to_html(results_dict, cs_base_url, bucket, test_name,
+ builder_name, build_number, local_output):
+ """Convert list of test results into html format.
+
+ Args:
+ local_output: Whether this results file is uploaded to Google Storage or
+ just a local file.
+ """
+ test_rows_header, test_rows = create_test_table(
+ results_dict, cs_base_url, test_name)
+ suite_rows_header, suite_rows, suite_row_footer = create_suite_table(
+ results_dict)
+
+ suite_table_values = {
+ 'table_id': 'suite-table',
+ 'table_headers': suite_rows_header,
+ 'table_row_blocks': suite_rows,
+ 'table_footer': suite_row_footer,
+ }
+
+ test_table_values = {
+ 'table_id': 'test-table',
+ 'table_headers': test_rows_header,
+ 'table_row_blocks': test_rows,
+ }
+
+ main_template = JINJA_ENVIRONMENT.get_template(
+ os.path.join('template', 'main.html'))
+
+ if local_output:
+ html_render = main_template.render( # pylint: disable=no-member
+ {
+ 'tb_values': [suite_table_values, test_table_values],
+ 'feedback_url': feedback_url(None),
+ })
+ return (html_render, None, None)
+ else:
+ dest = google_storage_helper.unique_name(
+ '%s_%s_%s' % (test_name, builder_name, build_number))
+ result_details_link = google_storage_helper.get_url_link(
+ dest, '%s/html' % bucket)
+ html_render = main_template.render( # pylint: disable=no-member
+ {
+ 'tb_values': [suite_table_values, test_table_values],
+ 'feedback_url': feedback_url(result_details_link),
+ })
+ return (html_render, dest, result_details_link)
+
+
+def result_details(json_path, test_name, cs_base_url, bucket=None,
+ builder_name=None, build_number=None, local_output=False):
+ """Get result details from json path and then convert results to html.
+
+ Args:
+ local_output: Whether this results file is uploaded to Google Storage or
+ just a local file.
+ """
+
+ with open(json_path) as json_file:
+ json_object = json.loads(json_file.read())
+
+ if not 'per_iteration_data' in json_object:
+ return 'Error: json file missing per_iteration_data.'
+
+ results_dict = collections.defaultdict(list)
+ for testsuite_run in json_object['per_iteration_data']:
+ for test, test_runs in testsuite_run.iteritems():
+ results_dict[test].extend(test_runs)
+ return results_to_html(results_dict, cs_base_url, bucket, test_name,
+ builder_name, build_number, local_output)
+
+
+def upload_to_google_bucket(html, bucket, dest):
+ with tempfile.NamedTemporaryFile(suffix='.html') as temp_file:
+ temp_file.write(html)
+ temp_file.flush()
+ return google_storage_helper.upload(
+ name=dest,
+ filepath=temp_file.name,
+ bucket='%s/html' % bucket,
+ content_type='text/html',
+ authenticated_link=True)
+
+
+def ui_screenshot_set(json_path):
+ with open(json_path) as json_file:
+ json_object = json.loads(json_file.read())
+ if not 'per_iteration_data' in json_object:
+ # This will be reported as an error by result_details, no need to duplicate.
+ return None
+ ui_screenshots = []
+ # pylint: disable=too-many-nested-blocks
+ for testsuite_run in json_object['per_iteration_data']:
+ for _, test_runs in testsuite_run.iteritems():
+ for test_run in test_runs:
+ if 'ui screenshot' in test_run['links']:
+ screenshot_link = test_run['links']['ui screenshot']
+ if screenshot_link.startswith('file:'):
+ with contextlib.closing(urllib.urlopen(screenshot_link)) as f:
+ test_screenshots = json.load(f)
+ else:
+ # Assume anything that isn't a file link is a google storage link
+ screenshot_string = google_storage_helper.read_from_link(
+ screenshot_link)
+ if not screenshot_string:
+ logging.error('Bad screenshot link %s', screenshot_link)
+ continue
+ test_screenshots = json.loads(
+ screenshot_string)
+ ui_screenshots.extend(test_screenshots)
+ # pylint: enable=too-many-nested-blocks
+
+ if ui_screenshots:
+ return json.dumps(ui_screenshots)
+ return None
+
+
+def upload_screenshot_set(json_path, test_name, bucket, builder_name,
+ build_number):
+ screenshot_set = ui_screenshot_set(json_path)
+ if not screenshot_set:
+ return None
+ dest = google_storage_helper.unique_name(
+ 'screenshots_%s_%s_%s' % (test_name, builder_name, build_number),
+ suffix='.json')
+ with tempfile.NamedTemporaryFile(suffix='.json') as temp_file:
+ temp_file.write(screenshot_set)
+ temp_file.flush()
+ return google_storage_helper.upload(
+ name=dest,
+ filepath=temp_file.name,
+ bucket='%s/json' % bucket,
+ content_type='application/json',
+ authenticated_link=True)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--json-file', help='Path of json file.')
+ parser.add_argument('--cs-base-url', help='Base url for code search.',
+ default='http://cs.chromium.org')
+ parser.add_argument('--bucket', help='Google storage bucket.', required=True)
+ parser.add_argument('--builder-name', help='Builder name.')
+ parser.add_argument('--build-number', help='Build number.')
+ parser.add_argument('--test-name', help='The name of the test.',
+ required=True)
+ parser.add_argument(
+ '-o', '--output-json',
+ help='(Swarming Merge Script API) '
+ 'Output JSON file to create.')
+ parser.add_argument(
+ '--build-properties',
+ help='(Swarming Merge Script API) '
+ 'Build property JSON file provided by recipes.')
+ parser.add_argument(
+ '--summary-json',
+ help='(Swarming Merge Script API) '
+ 'Summary of shard state running on swarming. '
+ '(Output of the swarming.py collect '
+ '--task-summary-json=XXX command.)')
+ parser.add_argument(
+ '--task-output-dir',
+ help='(Swarming Merge Script API) '
+ 'Directory containing all swarming task results.')
+ parser.add_argument(
+ 'positional', nargs='*',
+ help='output.json from shards.')
+
+ args = parser.parse_args()
+
+ if ((args.build_properties is None) ==
+ (args.build_number is None or args.builder_name is None)):
+ raise parser.error('Exactly one of build_perperties or '
+ '(build_number or builder_name) should be given.')
+
+ if (args.build_number is None) != (args.builder_name is None):
+ raise parser.error('args.build_number and args.builder_name '
+ 'has to be be given together'
+ 'or not given at all.')
+
+ if len(args.positional) == 0 and args.json_file is None:
+ if args.output_json:
+ with open(args.output_json, 'w') as f:
+ json.dump({}, f)
+ return
+ elif len(args.positional) != 0 and args.json_file:
+ raise parser.error('Exactly one of args.positional and '
+ 'args.json_file should be given.')
+
+ if args.build_properties:
+ build_properties = json.loads(args.build_properties)
+ if ((not 'buildnumber' in build_properties) or
+ (not 'buildername' in build_properties)):
+ raise parser.error('Build number/builder name not specified.')
+ build_number = build_properties['buildnumber']
+ builder_name = build_properties['buildername']
+ elif args.build_number and args.builder_name:
+ build_number = args.build_number
+ builder_name = args.builder_name
+
+ if args.positional:
+ if len(args.positional) == 1:
+ json_file = args.positional[0]
+ else:
+ if args.output_json and args.summary_json:
+ standard_gtest_merge.standard_gtest_merge(
+ args.output_json, args.summary_json, args.positional)
+ json_file = args.output_json
+ elif not args.output_json:
+ raise Exception('output_json required by merge API is missing.')
+ else:
+ raise Exception('summary_json required by merge API is missing.')
+ elif args.json_file:
+ json_file = args.json_file
+
+ if not os.path.exists(json_file):
+ raise IOError('--json-file %s not found.' % json_file)
+
+ # Link to result details presentation page is a part of the page.
+ result_html_string, dest, result_details_link = result_details(
+ json_file, args.test_name, args.cs_base_url, args.bucket,
+ builder_name, build_number)
+
+ result_details_link_2 = upload_to_google_bucket(
+ result_html_string.encode('UTF-8'),
+ args.bucket, dest)
+ assert result_details_link == result_details_link_2, (
+ 'Result details link do not match. The link returned by get_url_link'
+ ' should be the same as that returned by upload.')
+
+ ui_screenshot_set_link = upload_screenshot_set(json_file, args.test_name,
+ args.bucket, builder_name, build_number)
+
+ if ui_screenshot_set_link:
+ ui_catalog_url = 'https://chrome-ui-catalog.appspot.com/'
+ ui_catalog_query = urllib.urlencode(
+ {'screenshot_source': ui_screenshot_set_link})
+ ui_screenshot_link = '%s?%s' % (ui_catalog_url, ui_catalog_query)
+
+ if args.output_json:
+ with open(json_file) as original_json_file:
+ json_object = json.load(original_json_file)
+ json_object['links'] = {
+ 'result_details (logcats, flakiness links)': result_details_link
+ }
+
+ if ui_screenshot_set_link:
+ json_object['links']['ui screenshots'] = ui_screenshot_link
+
+ with open(args.output_json, 'w') as f:
+ json.dump(json_object, f)
+ else:
+ print 'Result Details: %s' % result_details_link
+
+ if ui_screenshot_set_link:
+ print 'UI Screenshots %s' % ui_screenshot_link
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/pylib/results/report_results.py b/deps/v8/build/android/pylib/results/report_results.py
new file mode 100644
index 0000000000..e886b72985
--- /dev/null
+++ b/deps/v8/build/android/pylib/results/report_results.py
@@ -0,0 +1,131 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing utility functions for reporting results."""
+
+import logging
+import os
+import re
+
+from pylib import constants
+from pylib.results.flakiness_dashboard import results_uploader
+from pylib.utils import logging_utils
+
+
+def _LogToFile(results, test_type, suite_name):
+ """Log results to local files which can be used for aggregation later."""
+ log_file_path = os.path.join(constants.GetOutDirectory(), 'test_logs')
+ if not os.path.exists(log_file_path):
+ os.mkdir(log_file_path)
+ full_file_name = os.path.join(
+ log_file_path, re.sub(r'\W', '_', test_type).lower() + '.log')
+ if not os.path.exists(full_file_name):
+ with open(full_file_name, 'w') as log_file:
+ print >> log_file, '\n%s results for %s build %s:' % (
+ test_type, os.environ.get('BUILDBOT_BUILDERNAME'),
+ os.environ.get('BUILDBOT_BUILDNUMBER'))
+ logging.info('Writing results to %s.', full_file_name)
+
+ logging.info('Writing results to %s.', full_file_name)
+ with open(full_file_name, 'a') as log_file:
+ shortened_suite_name = suite_name[:25] + (suite_name[25:] and '...')
+ print >> log_file, '%s%s' % (shortened_suite_name.ljust(30),
+ results.GetShortForm())
+
+
+def _LogToFlakinessDashboard(results, test_type, test_package,
+ flakiness_server):
+ """Upload results to the flakiness dashboard"""
+ logging.info('Upload results for test type "%s", test package "%s" to %s',
+ test_type, test_package, flakiness_server)
+
+ try:
+ # TODO(jbudorick): remove Instrumentation once instrumentation tests
+ # switch to platform mode.
+ if test_type in ('instrumentation', 'Instrumentation'):
+ if flakiness_server == constants.UPSTREAM_FLAKINESS_SERVER:
+ assert test_package in ['ContentShellTest',
+ 'ChromePublicTest',
+ 'ChromeSyncShellTest',
+ 'SystemWebViewShellLayoutTest',
+ 'WebViewInstrumentationTest']
+ dashboard_test_type = ('%s_instrumentation_tests' %
+ test_package.lower().rstrip('test'))
+ # Downstream server.
+ else:
+ dashboard_test_type = 'Chromium_Android_Instrumentation'
+
+ elif test_type == 'gtest':
+ dashboard_test_type = test_package
+
+ else:
+ logging.warning('Invalid test type')
+ return
+
+ results_uploader.Upload(
+ results, flakiness_server, dashboard_test_type)
+
+ except Exception: # pylint: disable=broad-except
+ logging.exception('Failure while logging to %s', flakiness_server)
+
+
+def LogFull(results, test_type, test_package, annotation=None,
+ flakiness_server=None):
+ """Log the tests results for the test suite.
+
+ The results will be logged three different ways:
+ 1. Log to stdout.
+ 2. Log to local files for aggregating multiple test steps
+ (on buildbots only).
+ 3. Log to flakiness dashboard (on buildbots only).
+
+ Args:
+ results: An instance of TestRunResults object.
+ test_type: Type of the test (e.g. 'Instrumentation', 'Unit test', etc.).
+ test_package: Test package name (e.g. 'ipc_tests' for gtests,
+ 'ContentShellTest' for instrumentation tests)
+ annotation: If instrumenation test type, this is a list of annotations
+ (e.g. ['Feature', 'SmallTest']).
+ flakiness_server: If provider, upload the results to flakiness dashboard
+ with this URL.
+ """
+ # pylint doesn't like how colorama set up its color enums.
+ # pylint: disable=no-member
+ black_on_white = (logging_utils.BACK.WHITE, logging_utils.FORE.BLACK)
+ with logging_utils.OverrideColor(logging.CRITICAL, black_on_white):
+ if not results.DidRunPass():
+ logging.critical('*' * 80)
+ logging.critical('Detailed Logs')
+ logging.critical('*' * 80)
+ for line in results.GetLogs().splitlines():
+ logging.critical(line)
+ logging.critical('*' * 80)
+ logging.critical('Summary')
+ logging.critical('*' * 80)
+ for line in results.GetGtestForm().splitlines():
+ color = black_on_white
+ if 'FAILED' in line:
+ # Red on white, dim.
+ color = (logging_utils.BACK.WHITE, logging_utils.FORE.RED,
+ logging_utils.STYLE.DIM)
+ elif 'PASSED' in line:
+ # Green on white, dim.
+ color = (logging_utils.BACK.WHITE, logging_utils.FORE.GREEN,
+ logging_utils.STYLE.DIM)
+ with logging_utils.OverrideColor(logging.CRITICAL, color):
+ logging.critical(line)
+ logging.critical('*' * 80)
+
+ if os.environ.get('BUILDBOT_BUILDERNAME'):
+ # It is possible to have multiple buildbot steps for the same
+ # instrumenation test package using different annotations.
+ if annotation and len(annotation) == 1:
+ suite_name = annotation[0]
+ else:
+ suite_name = test_package
+ _LogToFile(results, test_type, suite_name)
+
+ if flakiness_server:
+ _LogToFlakinessDashboard(results, test_type, test_package,
+ flakiness_server)
diff --git a/deps/v8/build/android/pylib/symbols/__init__.py b/deps/v8/build/android/pylib/symbols/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/__init__.py
diff --git a/deps/v8/build/android/pylib/symbols/apk_lib_dump.py b/deps/v8/build/android/pylib/symbols/apk_lib_dump.py
new file mode 100755
index 0000000000..956ee07702
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/apk_lib_dump.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Dump shared library information from an APK file.
+
+This script is used to dump which *uncompressed* native shared libraries an
+APK contains, as well as their position within the file. This is mostly useful
+to diagnose logcat and tombstone symbolization issues when the libraries are
+loaded directly from the APK at runtime.
+
+The default format will print one line per uncompressed shared library with the
+following format:
+
+ 0x<start-offset> 0x<end-offset> 0x<file-size> <file-path>
+
+The --format=python option can be used to dump the same information that is
+easy to use in a Python script, e.g. with a line like:
+
+ (0x<start-offset>, 0x<end-offset>, 0x<file-size>, <file-path>),
+"""
+
+import argparse
+import os
+import sys
+
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
+
+from pylib.symbols import apk_native_libs
+
+def main():
+ parser = argparse.ArgumentParser(
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
+
+ parser.add_argument('apk', help='Input APK file path.')
+
+ parser.add_argument('--format', help='Select output format',
+ default='default', choices=['default', 'python'])
+
+ args = parser.parse_args()
+
+ apk_reader = apk_native_libs.ApkReader(args.apk)
+ lib_map = apk_native_libs.ApkNativeLibraries(apk_reader)
+ for lib_path, file_offset, file_size in lib_map.GetDumpList():
+ if args.format == 'python':
+ print '(0x%08x, 0x%08x, 0x%08x, \'%s\'),' % (
+ file_offset, file_offset + file_size, file_size, lib_path)
+ else:
+ print '0x%08x 0x%08x 0x%08x %s' % (
+ file_offset, file_offset + file_size, file_size, lib_path)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/pylib/symbols/apk_native_libs.py b/deps/v8/build/android/pylib/symbols/apk_native_libs.py
new file mode 100644
index 0000000000..c4af202906
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/apk_native_libs.py
@@ -0,0 +1,419 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import re
+import struct
+import zipfile
+
+# The default zipfile python module cannot open APKs properly, but this
+# fixes it. Note that simply importing this file is sufficient to
+# ensure that zip works correctly for all other modules. See:
+# http://bugs.python.org/issue14315
+# https://hg.python.org/cpython/rev/6dd5e9556a60#l2.8
+def _PatchZipFile():
+ # pylint: disable=protected-access
+ oldDecodeExtra = zipfile.ZipInfo._decodeExtra
+ def decodeExtra(self):
+ try:
+ oldDecodeExtra(self)
+ except struct.error:
+ pass
+ zipfile.ZipInfo._decodeExtra = decodeExtra
+_PatchZipFile()
+
+
+class ApkZipInfo(object):
+ """Models a single file entry from an ApkReader.
+
+ This is very similar to the zipfile.ZipInfo class. It provides a few
+ properties describing the entry:
+ - filename (same as ZipInfo.filename)
+ - file_size (same as ZipInfo.file_size)
+ - compress_size (same as ZipInfo.file_size)
+ - file_offset (note: not provided by ZipInfo)
+
+ And a few useful methods: IsCompressed() and IsElfFile().
+
+ Entries can be created by using ApkReader() methods.
+ """
+ def __init__(self, zip_file, zip_info):
+ """Construct instance. Do not call this directly. Use ApkReader methods."""
+ self._file = zip_file
+ self._info = zip_info
+ self._file_offset = None
+
+ @property
+ def filename(self):
+ """Entry's file path within APK."""
+ return self._info.filename
+
+ @property
+ def file_size(self):
+ """Entry's extracted file size in bytes."""
+ return self._info.file_size
+
+ @property
+ def compress_size(self):
+ """Entry' s compressed file size in bytes."""
+ return self._info.compress_size
+
+ @property
+ def file_offset(self):
+ """Entry's starting file offset in the APK."""
+ if self._file_offset is None:
+ self._file_offset = self._ZipFileOffsetFromLocalHeader(
+ self._file.fp, self._info.header_offset)
+ return self._file_offset
+
+ def __repr__(self):
+ """Convert to string for debugging."""
+ return 'ApkZipInfo["%s",size=0x%x,compressed=0x%x,offset=0x%x]' % (
+ self.filename, self.file_size, self.compress_size, self.file_offset)
+
+ def IsCompressed(self):
+ """Returns True iff the entry is compressed."""
+ return self._info.compress_type != zipfile.ZIP_STORED
+
+ def IsElfFile(self):
+ """Returns True iff the entry is an ELF file."""
+ with self._file.open(self._info, 'r') as f:
+ return f.read(4) == '\x7fELF'
+
+ @staticmethod
+ def _ZipFileOffsetFromLocalHeader(fd, local_header_offset):
+ """Return a file's start offset from its zip archive local header.
+
+ Args:
+ fd: Input file object.
+ local_header_offset: Local header offset (from its ZipInfo entry).
+ Returns:
+ file start offset.
+ """
+ FILE_NAME_LEN_OFFSET = 26
+ FILE_NAME_OFFSET = 30
+ fd.seek(local_header_offset + FILE_NAME_LEN_OFFSET)
+ file_name_len = struct.unpack('H', fd.read(2))[0]
+ extra_field_len = struct.unpack('H', fd.read(2))[0]
+ file_offset = (local_header_offset + FILE_NAME_OFFSET +
+ file_name_len + extra_field_len)
+ return file_offset
+
+
+class ApkReader(object):
+ """A convenience class used to read the content of APK files.
+
+ Its design is very similar to the one from zipfile.ZipFile, except
+ that its returns ApkZipInfo entries which provide a |file_offset|
+ property that can be used to know where a given file is located inside
+ the archive.
+
+ It is also easy to mock for unit-testing (see MockApkReader in
+ apk_utils_unittest.py) without creating any files on disk.
+
+ Usage is the following:
+ - Create an instance using a with statement (for proper unit-testing).
+ - Call ListEntries() to list all entries in the archive. This returns
+ a list of ApkZipInfo entries.
+ - Or call FindEntry() corresponding to a given path within the archive.
+
+ For example:
+ with ApkReader(input_apk_path) as reader:
+ info = reader.FindEntry('lib/armeabi-v7a/libfoo.so')
+ if info.IsCompressed() or not info.IsElfFile():
+ raise Exception('Invalid library path")
+
+ The ApkZipInfo can be used to inspect the entry's metadata, or read its
+ content with the ReadAll() method. See its documentation for all details.
+ """
+ def __init__(self, apk_path):
+ """Initialize instance."""
+ self._zip_file = zipfile.ZipFile(apk_path, 'r')
+ self._path = apk_path
+
+ def __enter__(self):
+ """Python context manager entry."""
+ return self
+
+ def __exit__(self, *kwargs):
+ """Python context manager exit."""
+ self.Close()
+
+ @property
+ def path(self):
+ """The corresponding input APK path."""
+ return self._path
+
+ def Close(self):
+ """Close the reader (and underlying ZipFile instance)."""
+ self._zip_file.close()
+
+ def ListEntries(self):
+ """Return a list of ApkZipInfo entries for this APK."""
+ result = []
+ for info in self._zip_file.infolist():
+ result.append(ApkZipInfo(self._zip_file, info))
+ return result
+
+ def FindEntry(self, file_path):
+ """Return an ApkZipInfo instance for a given archive file path.
+
+ Args:
+ file_path: zip file path.
+ Return:
+ A new ApkZipInfo entry on success.
+ Raises:
+ KeyError on failure (entry not found).
+ """
+ info = self._zip_file.getinfo(file_path)
+ return ApkZipInfo(self._zip_file, info)
+
+
+
+class ApkNativeLibraries(object):
+ """A class for the list of uncompressed shared libraries inside an APK.
+
+ Create a new instance by passing the path to an input APK, then use
+ the FindLibraryByOffset() method to find the native shared library path
+ corresponding to a given file offset.
+
+ GetAbiList() and GetLibrariesList() can also be used to inspect
+ the state of the instance.
+ """
+ def __init__(self, apk_reader):
+ """Initialize instance.
+
+ Args:
+ apk_reader: An ApkReader instance corresponding to the input APK.
+ """
+ self._native_libs = []
+ for entry in apk_reader.ListEntries():
+ # Chromium uses so-called 'placeholder' native shared libraries
+ # that have a size of 0, and are only used to deal with bugs in
+ # older Android system releases (they are never loaded and cannot
+ # appear in stack traces). Ignore these here to avoid generating
+ # confusing results.
+ if entry.file_size == 0:
+ continue
+
+ # Only uncompressed libraries can appear in stack traces.
+ if entry.IsCompressed():
+ continue
+
+ # Only consider files within lib/ and with a filename ending with .so
+ # at the moment. NOTE: Do not require a 'lib' prefix, since that would
+ # prevent finding the 'crazy.libXXX.so' libraries used by Chromium.
+ if (not entry.filename.startswith('lib/') or
+ not entry.filename.endswith('.so')):
+ continue
+
+ lib_path = entry.filename
+
+ self._native_libs.append(
+ (lib_path, entry.file_offset, entry.file_offset + entry.file_size))
+
+ def IsEmpty(self):
+ """Return true iff the list is empty."""
+ return not bool(self._native_libs)
+
+ def GetLibraries(self):
+ """Return the list of all library paths in this instance."""
+ return sorted([x[0] for x in self._native_libs])
+
+ def GetDumpList(self):
+ """Retrieve full library map.
+
+ Returns:
+ A list of (lib_path, file_offset, file_size) tuples, sorted
+ in increasing |file_offset| values.
+ """
+ result = []
+ for entry in self._native_libs:
+ lib_path, file_start, file_end = entry
+ result.append((lib_path, file_start, file_end - file_start))
+
+ return sorted(result, lambda x, y: cmp(x[1], y[1]))
+
+ def FindLibraryByOffset(self, file_offset):
+ """Find the native library at a given file offset.
+
+ Args:
+ file_offset: File offset within the original APK.
+ Returns:
+ Returns a (lib_path, lib_offset) tuple on success, or (None, 0)
+ on failure. Note that lib_path will omit the 'lib/$ABI/' prefix,
+ lib_offset is the adjustment of file_offset within the library.
+ """
+ for lib_path, start_offset, end_offset in self._native_libs:
+ if file_offset >= start_offset and file_offset < end_offset:
+ return (lib_path, file_offset - start_offset)
+
+ return (None, 0)
+
+
+class ApkLibraryPathTranslator(object):
+ """Translates APK file paths + byte offsets into library path + offset.
+
+ The purpose of this class is to translate a native shared library path
+ that points to an APK into a new device-specific path that points to a
+ native shared library, as if it was installed there. E.g.:
+
+ ('/data/data/com.example.app-1/base.apk', 0x123be00)
+
+ would be translated into:
+
+ ('/data/data/com.example.app-1/base.apk!lib/libfoo.so', 0x3be00)
+
+ If the original APK (installed as base.apk) contains an uncompressed shared
+ library under lib/armeabi-v7a/libfoo.so at offset 0x120000.
+
+ Note that the virtual device path after the ! doesn't necessarily match
+ the path inside the .apk. This doesn't really matter for the rest of
+ the symbolization functions since only the file's base name can be used
+ to find the corresponding file on the host.
+
+ Usage is the following:
+
+ 1/ Create new instance.
+
+ 2/ Call AddHostApk() one or several times to add the host path
+ of an APK, its package name, and device-installed named.
+
+ 3/ Call TranslatePath() to translate a (path, offset) tuple corresponding
+ to an on-device APK, into the corresponding virtual device library
+ path and offset.
+ """
+
+ # Depending on the version of the system, a non-system APK might be installed
+ # on a path that looks like the following:
+ #
+ # * /data/..../<package_name>-<number>.apk, where <number> is used to
+ # distinguish several versions of the APK during package updates.
+ #
+ # * /data/..../<package_name>-<suffix>/base.apk, where <suffix> is a
+ # string of random ASCII characters following the dash after the
+ # package name. This serves as a way to distinguish the installation
+ # paths during package update, and randomize its final location
+ # (to prevent apps from hard-coding the paths to other apps).
+ #
+ # Note that the 'base.apk' name comes from the system.
+ #
+ # * /data/.../<package_name>-<suffix>/<split_name>.apk, where <suffix>
+ # is the same as above, and <split_name> is the name of am app bundle
+ # split APK.
+ #
+ # System APKs are installed on paths that look like /system/app/Foo.apk
+ # but this class ignores them intentionally.
+
+ # Compiler regular expression for the first format above.
+ _RE_APK_PATH_1 = re.compile(
+ r'/data/.*/(?P<package_name>[A-Za-z0-9_.]+)-(?P<version>[0-9]+)\.apk')
+
+ # Compiled regular expression for the second and third formats above.
+ _RE_APK_PATH_2 = re.compile(
+ r'/data/.*/(?P<package_name>[A-Za-z0-9_.]+)-(?P<suffix>[^/]+)/' +
+ r'(?P<apk_name>.+\.apk)')
+
+ def __init__(self):
+ """Initialize instance. Call AddHostApk() to add host apk file paths."""
+ self._path_map = {} # Maps (package_name, apk_name) to host-side APK path.
+ self._libs_map = {} # Maps APK host path to ApkNativeLibrariesMap instance.
+
+ def AddHostApk(self, package_name, native_libs, device_apk_name=None):
+ """Add a file path to the host APK search list.
+
+ Args:
+ package_name: Corresponding apk package name.
+ native_libs: ApkNativeLibraries instance for the corresponding APK.
+ device_apk_name: Optional expected name of the installed APK on the
+ device. This is only useful when symbolizing app bundle that run on
+ Android L+. I.e. it will be ignored in other cases.
+ """
+ if native_libs.IsEmpty():
+ logging.debug('Ignoring host APK without any uncompressed native ' +
+ 'libraries: %s', device_apk_name)
+ return
+
+ # If the APK name is not provided, use the default of 'base.apk'. This
+ # will be ignored if we find <package_name>-<number>.apk file paths
+ # in the input, but will work properly for Android L+, as long as we're
+ # not using Android app bundles.
+ device_apk_name = device_apk_name or 'base.apk'
+
+ key = "%s/%s" % (package_name, device_apk_name)
+ if key in self._libs_map:
+ raise KeyError('There is already an APK associated with (%s)' % key)
+
+ self._libs_map[key] = native_libs
+
+ @staticmethod
+ def _MatchApkDeviceInstallPath(apk_path):
+ """Check whether a given path matches an installed APK device file path.
+
+ Args:
+ apk_path: Device-specific file path.
+ Returns:
+ On success, a (package_name, apk_name) tuple. On failure, (None. None).
+ """
+ m = ApkLibraryPathTranslator._RE_APK_PATH_1.match(apk_path)
+ if m:
+ return (m.group('package_name'), 'base.apk')
+
+ m = ApkLibraryPathTranslator._RE_APK_PATH_2.match(apk_path)
+ if m:
+ return (m.group('package_name'), m.group('apk_name'))
+
+ return (None, None)
+
+ def TranslatePath(self, apk_path, apk_offset):
+ """Translate a potential apk file path + offset into library path + offset.
+
+ Args:
+ apk_path: Library or apk file path on the device (e.g.
+ '/data/data/com.example.app-XSAHKSJH/base.apk').
+ apk_offset: Byte offset within the library or apk.
+
+ Returns:
+ a new (lib_path, lib_offset) tuple. If |apk_path| points to an APK,
+ then this function searches inside the corresponding host-side APKs
+ (added with AddHostApk() above) for the corresponding uncompressed
+ native shared library at |apk_offset|, if found, this returns a new
+ device-specific path corresponding to a virtual installation of said
+ library with an adjusted offset.
+
+ Otherwise, just return the original (apk_path, apk_offset) values.
+ """
+ if not apk_path.endswith('.apk'):
+ return (apk_path, apk_offset)
+
+ apk_package, apk_name = self._MatchApkDeviceInstallPath(apk_path)
+ if not apk_package:
+ return (apk_path, apk_offset)
+
+ key = '%s/%s' % (apk_package, apk_name)
+ native_libs = self._libs_map.get(key)
+ if not native_libs:
+ logging.debug('Unknown %s package', key)
+ return (apk_path, apk_offset)
+
+ lib_name, new_offset = native_libs.FindLibraryByOffset(apk_offset)
+ if not lib_name:
+ logging.debug('Invalid offset in %s.apk package: %d', key, apk_offset)
+ return (apk_path, apk_offset)
+
+ lib_name = os.path.basename(lib_name)
+
+ # Some libraries are stored with a crazy. prefix inside the APK, this
+ # is done to prevent the PackageManager from extracting the libraries
+ # at installation time when running on pre Android M systems, where the
+ # system linker cannot load libraries directly from APKs.
+ crazy_prefix = 'crazy.'
+ if lib_name.startswith(crazy_prefix):
+ lib_name = lib_name[len(crazy_prefix):]
+
+ # Put this in a fictional lib sub-directory for good measure.
+ new_path = '%s!lib/%s' % (apk_path, lib_name)
+
+ return (new_path, new_offset)
diff --git a/deps/v8/build/android/pylib/symbols/apk_native_libs_unittest.py b/deps/v8/build/android/pylib/symbols/apk_native_libs_unittest.py
new file mode 100644
index 0000000000..416918d8a1
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/apk_native_libs_unittest.py
@@ -0,0 +1,396 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import unittest
+
+from pylib.symbols import apk_native_libs
+
+# Mock ELF-like data
+MOCK_ELF_DATA = '\x7fELFFFFFFFFFFFFFFFF'
+
+class MockApkZipInfo(object):
+ """A mock ApkZipInfo class, returned by MockApkReaderFactory instances."""
+ def __init__(self, filename, file_size, compress_size, file_offset,
+ file_data):
+ self.filename = filename
+ self.file_size = file_size
+ self.compress_size = compress_size
+ self.file_offset = file_offset
+ self._data = file_data
+
+ def __repr__(self):
+ """Convert to string for debugging."""
+ return 'MockApkZipInfo["%s",size=%d,compressed=%d,offset=%d]' % (
+ self.filename, self.file_size, self.compress_size, self.file_offset)
+
+ def IsCompressed(self):
+ """Returns True iff the entry is compressed."""
+ return self.file_size != self.compress_size
+
+ def IsElfFile(self):
+ """Returns True iff the entry is an ELF file."""
+ if not self._data or len(self._data) < 4:
+ return False
+
+ return self._data[0:4] == '\x7fELF'
+
+
+class MockApkReader(object):
+ """A mock ApkReader instance used during unit-testing.
+
+ Do not use directly, but use a MockApkReaderFactory context, as in:
+
+ with MockApkReaderFactory() as mock:
+ mock.AddTestEntry(file_path, file_size, compress_size, file_data)
+ ...
+
+ # Actually returns the mock instance.
+ apk_reader = apk_native_libs.ApkReader('/some/path.apk')
+ """
+ def __init__(self, apk_path='test.apk'):
+ """Initialize instance."""
+ self._entries = []
+ self._fake_offset = 0
+ self._path = apk_path
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *kwarg):
+ self.Close()
+ return
+
+ @property
+ def path(self):
+ return self._path
+
+ def AddTestEntry(self, filepath, file_size, compress_size, file_data):
+ """Add a new entry to the instance for unit-tests.
+
+ Do not call this directly, use the AddTestEntry() method on the parent
+ MockApkReaderFactory instance.
+
+ Args:
+ filepath: archive file path.
+ file_size: uncompressed file size in bytes.
+ compress_size: compressed size in bytes.
+ file_data: file data to be checked by IsElfFile()
+
+ Note that file_data can be None, or that its size can be actually
+ smaller than |compress_size| when used during unit-testing.
+ """
+ self._entries.append(MockApkZipInfo(filepath, file_size, compress_size,
+ self._fake_offset, file_data))
+ self._fake_offset += compress_size
+
+ def Close(self): # pylint: disable=no-self-use
+ """Close this reader instance."""
+ return
+
+ def ListEntries(self):
+ """Return a list of MockApkZipInfo instances for this input APK."""
+ return self._entries
+
+ def FindEntry(self, file_path):
+ """Find the MockApkZipInfo instance corresponds to a given file path."""
+ for entry in self._entries:
+ if entry.filename == file_path:
+ return entry
+ raise KeyError('Could not find mock zip archive member for: ' + file_path)
+
+
+class MockApkReaderTest(unittest.TestCase):
+
+ def testEmpty(self):
+ with MockApkReader() as reader:
+ entries = reader.ListEntries()
+ self.assertTrue(len(entries) == 0)
+ with self.assertRaises(KeyError):
+ reader.FindEntry('non-existent-entry.txt')
+
+ def testSingleEntry(self):
+ with MockApkReader() as reader:
+ reader.AddTestEntry('some-path/some-file', 20000, 12345, file_data=None)
+ entries = reader.ListEntries()
+ self.assertTrue(len(entries) == 1)
+ entry = entries[0]
+ self.assertEqual(entry.filename, 'some-path/some-file')
+ self.assertEqual(entry.file_size, 20000)
+ self.assertEqual(entry.compress_size, 12345)
+ self.assertTrue(entry.IsCompressed())
+
+ entry2 = reader.FindEntry('some-path/some-file')
+ self.assertEqual(entry, entry2)
+
+ def testMultipleEntries(self):
+ with MockApkReader() as reader:
+ _ENTRIES = {
+ 'foo.txt': (1024, 1024, 'FooFooFoo'),
+ 'lib/bar/libcode.so': (16000, 3240, 1024, '\x7fELFFFFFFFFFFFF'),
+ }
+ for path, props in _ENTRIES.iteritems():
+ reader.AddTestEntry(path, props[0], props[1], props[2])
+
+ entries = reader.ListEntries()
+ self.assertEqual(len(entries), len(_ENTRIES))
+ for path, props in _ENTRIES.iteritems():
+ entry = reader.FindEntry(path)
+ self.assertEqual(entry.filename, path)
+ self.assertEqual(entry.file_size, props[0])
+ self.assertEqual(entry.compress_size, props[1])
+
+
+class ApkNativeLibrariesTest(unittest.TestCase):
+
+ def setUp(self):
+ logging.getLogger().setLevel(logging.ERROR)
+
+ def testEmptyApk(self):
+ with MockApkReader() as reader:
+ libs_map = apk_native_libs.ApkNativeLibraries(reader)
+ self.assertTrue(libs_map.IsEmpty())
+ self.assertEqual(len(libs_map.GetLibraries()), 0)
+ lib_path, lib_offset = libs_map.FindLibraryByOffset(0)
+ self.assertIsNone(lib_path)
+ self.assertEqual(lib_offset, 0)
+
+ def testSimpleApk(self):
+ with MockApkReader() as reader:
+ _MOCK_ENTRIES = [
+ # Top-level library should be ignored.
+ ('libfoo.so', 1000, 1000, MOCK_ELF_DATA, False),
+ # Library not under lib/ should be ignored.
+ ('badlib/test-abi/libfoo2.so', 1001, 1001, MOCK_ELF_DATA, False),
+ # Library under lib/<abi>/ but without .so extension should be ignored.
+ ('lib/test-abi/libfoo4.so.1', 1003, 1003, MOCK_ELF_DATA, False),
+ # Library under lib/<abi>/ with .so suffix, but compressed -> ignored.
+ ('lib/test-abi/libfoo5.so', 1004, 1003, MOCK_ELF_DATA, False),
+ # First correct library
+ ('lib/test-abi/libgood1.so', 1005, 1005, MOCK_ELF_DATA, True),
+ # Second correct library: support sub-directories
+ ('lib/test-abi/subdir/libgood2.so', 1006, 1006, MOCK_ELF_DATA, True),
+ # Third correct library, no lib prefix required
+ ('lib/test-abi/crazy.libgood3.so', 1007, 1007, MOCK_ELF_DATA, True),
+ ]
+ file_offsets = []
+ prev_offset = 0
+ for ent in _MOCK_ENTRIES:
+ reader.AddTestEntry(ent[0], ent[1], ent[2], ent[3])
+ file_offsets.append(prev_offset)
+ prev_offset += ent[2]
+
+ libs_map = apk_native_libs.ApkNativeLibraries(reader)
+ self.assertFalse(libs_map.IsEmpty())
+ self.assertEqual(libs_map.GetLibraries(), [
+ 'lib/test-abi/crazy.libgood3.so',
+ 'lib/test-abi/libgood1.so',
+ 'lib/test-abi/subdir/libgood2.so',
+ ])
+
+ BIAS = 10
+ for mock_ent, file_offset in zip(_MOCK_ENTRIES, file_offsets):
+ if mock_ent[4]:
+ lib_path, lib_offset = libs_map.FindLibraryByOffset(
+ file_offset + BIAS)
+ self.assertEqual(lib_path, mock_ent[0])
+ self.assertEqual(lib_offset, BIAS)
+
+
+ def testMultiAbiApk(self):
+ with MockApkReader() as reader:
+ _MOCK_ENTRIES = [
+ ('lib/abi1/libfoo.so', 1000, 1000, MOCK_ELF_DATA),
+ ('lib/abi2/libfoo.so', 1000, 1000, MOCK_ELF_DATA),
+ ]
+ for ent in _MOCK_ENTRIES:
+ reader.AddTestEntry(ent[0], ent[1], ent[2], ent[3])
+
+ libs_map = apk_native_libs.ApkNativeLibraries(reader)
+ self.assertFalse(libs_map.IsEmpty())
+ self.assertEqual(libs_map.GetLibraries(), [
+ 'lib/abi1/libfoo.so', 'lib/abi2/libfoo.so'])
+
+ lib1_name, lib1_offset = libs_map.FindLibraryByOffset(10)
+ self.assertEqual(lib1_name, 'lib/abi1/libfoo.so')
+ self.assertEqual(lib1_offset, 10)
+
+ lib2_name, lib2_offset = libs_map.FindLibraryByOffset(1000)
+ self.assertEqual(lib2_name, 'lib/abi2/libfoo.so')
+ self.assertEqual(lib2_offset, 0)
+
+
+class MockApkNativeLibraries(apk_native_libs.ApkNativeLibraries):
+ """A mock ApkNativeLibraries instance that can be used as input to
+ ApkLibraryPathTranslator without creating an ApkReader instance.
+
+ Create a new instance, then call AddTestEntry or AddTestEntries
+ as many times as necessary, before using it as a regular
+ ApkNativeLibraries instance.
+ """
+ # pylint: disable=super-init-not-called
+ def __init__(self):
+ self._native_libs = []
+
+ # pylint: enable=super-init-not-called
+
+ def AddTestEntry(self, lib_path, file_offset, file_size):
+ """Add a new test entry.
+
+ Args:
+ entry: A tuple of (library-path, file-offset, file-size) values,
+ (e.g. ('lib/armeabi-v8a/libfoo.so', 0x10000, 0x2000)).
+ """
+ self._native_libs.append((lib_path, file_offset, file_offset + file_size))
+
+ def AddTestEntries(self, entries):
+ """Add a list of new test entries.
+
+ Args:
+ entries: A list of (library-path, file-offset, file-size) values.
+ """
+ for entry in entries:
+ self.AddTestEntry(entry[0], entry[1], entry[2])
+
+
+class MockApkNativeLibrariesTest(unittest.TestCase):
+
+ def testEmptyInstance(self):
+ mock = MockApkNativeLibraries()
+ self.assertTrue(mock.IsEmpty())
+ self.assertEqual(mock.GetLibraries(), [])
+ self.assertEqual(mock.GetDumpList(), [])
+
+ def testAddTestEntry(self):
+ mock = MockApkNativeLibraries()
+ mock.AddTestEntry('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000)
+ mock.AddTestEntry('lib/x86/libzoo.so', 0x10000, 0x10000)
+ mock.AddTestEntry('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000)
+ self.assertFalse(mock.IsEmpty())
+ self.assertEqual(mock.GetLibraries(), ['lib/armeabi-v7a/libbar.so',
+ 'lib/armeabi-v7a/libfoo.so',
+ 'lib/x86/libzoo.so'])
+ self.assertEqual(mock.GetDumpList(), [
+ ('lib/x86/libzoo.so', 0x10000, 0x10000),
+ ('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000),
+ ('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000),
+ ])
+
+ def testAddTestEntries(self):
+ mock = MockApkNativeLibraries()
+ mock.AddTestEntries([
+ ('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000),
+ ('lib/x86/libzoo.so', 0x10000, 0x10000),
+ ('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000),
+ ])
+ self.assertFalse(mock.IsEmpty())
+ self.assertEqual(mock.GetLibraries(), ['lib/armeabi-v7a/libbar.so',
+ 'lib/armeabi-v7a/libfoo.so',
+ 'lib/x86/libzoo.so'])
+ self.assertEqual(mock.GetDumpList(), [
+ ('lib/x86/libzoo.so', 0x10000, 0x10000),
+ ('lib/armeabi-v7a/libfoo.so', 0x20000, 0x4000),
+ ('lib/armeabi-v7a/libbar.so', 0x24000, 0x8000),
+ ])
+
+
+class ApkLibraryPathTranslatorTest(unittest.TestCase):
+
+ def _CheckUntranslated(self, translator, path, offset):
+ """Check that a given (path, offset) is not modified by translation."""
+ self.assertEqual(translator.TranslatePath(path, offset), (path, offset))
+
+
+ def _CheckTranslated(self, translator, path, offset, new_path, new_offset):
+ """Check that (path, offset) is translated into (new_path, new_offset)."""
+ self.assertEqual(translator.TranslatePath(path, offset),
+ (new_path, new_offset))
+
+ def testEmptyInstance(self):
+ translator = apk_native_libs.ApkLibraryPathTranslator()
+ self._CheckUntranslated(
+ translator, '/data/data/com.example.app-1/base.apk', 0x123456)
+
+ def testSimpleApk(self):
+ mock_libs = MockApkNativeLibraries()
+ mock_libs.AddTestEntries([
+ ('lib/test-abi/libfoo.so', 200, 2000),
+ ('lib/test-abi/libbar.so', 3200, 3000),
+ ('lib/test-abi/crazy.libzoo.so', 6200, 2000),
+ ])
+ translator = apk_native_libs.ApkLibraryPathTranslator()
+ translator.AddHostApk('com.example.app', mock_libs)
+
+ # Offset is within the first uncompressed library
+ self._CheckTranslated(
+ translator,
+ '/data/data/com.example.app-9.apk', 757,
+ '/data/data/com.example.app-9.apk!lib/libfoo.so', 557)
+
+ # Offset is within the second compressed library.
+ self._CheckUntranslated(
+ translator,
+ '/data/data/com.example.app-9/base.apk', 2800)
+
+ # Offset is within the third uncompressed library.
+ self._CheckTranslated(
+ translator,
+ '/data/data/com.example.app-1/base.apk', 3628,
+ '/data/data/com.example.app-1/base.apk!lib/libbar.so', 428)
+
+ # Offset is within the fourth uncompressed library with crazy. prefix
+ self._CheckTranslated(
+ translator,
+ '/data/data/com.example.app-XX/base.apk', 6500,
+ '/data/data/com.example.app-XX/base.apk!lib/libzoo.so', 300)
+
+ # Out-of-bounds apk offset.
+ self._CheckUntranslated(
+ translator,
+ '/data/data/com.example.app-1/base.apk', 10000)
+
+ # Invalid package name.
+ self._CheckUntranslated(
+ translator, '/data/data/com.example2.app-1/base.apk', 757)
+
+ # Invalid apk name.
+ self._CheckUntranslated(
+ translator, '/data/data/com.example.app-2/not-base.apk', 100)
+
+ # Invalid file extensions.
+ self._CheckUntranslated(
+ translator, '/data/data/com.example.app-2/base', 100)
+
+ self._CheckUntranslated(
+ translator, '/data/data/com.example.app-2/base.apk.dex', 100)
+
+ def testBundleApks(self):
+ mock_libs1 = MockApkNativeLibraries()
+ mock_libs1.AddTestEntries([
+ ('lib/test-abi/libfoo.so', 200, 2000),
+ ('lib/test-abi/libbbar.so', 3200, 3000),
+ ])
+ mock_libs2 = MockApkNativeLibraries()
+ mock_libs2.AddTestEntries([
+ ('lib/test-abi/libzoo.so', 200, 2000),
+ ('lib/test-abi/libtool.so', 3000, 4000),
+ ])
+ translator = apk_native_libs.ApkLibraryPathTranslator()
+ translator.AddHostApk('com.example.app', mock_libs1, 'base-master.apk')
+ translator.AddHostApk('com.example.app', mock_libs2, 'feature-master.apk')
+
+ self._CheckTranslated(
+ translator,
+ '/data/app/com.example.app-XUIYIUW/base-master.apk', 757,
+ '/data/app/com.example.app-XUIYIUW/base-master.apk!lib/libfoo.so', 557)
+
+ self._CheckTranslated(
+ translator,
+ '/data/app/com.example.app-XUIYIUW/feature-master.apk', 3200,
+ '/data/app/com.example.app-XUIYIUW/feature-master.apk!lib/libtool.so',
+ 200)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/symbols/deobfuscator.py b/deps/v8/build/android/pylib/symbols/deobfuscator.py
new file mode 100644
index 0000000000..ac4ff7e4b4
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/deobfuscator.py
@@ -0,0 +1,165 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import subprocess
+import threading
+import time
+import uuid
+
+from devil.utils import reraiser_thread
+from pylib import constants
+
+
+_MINIUMUM_TIMEOUT = 3.0
+_PER_LINE_TIMEOUT = .002 # Should be able to process 500 lines per second.
+_PROCESS_START_TIMEOUT = 10.0
+
+
+class Deobfuscator(object):
+ def __init__(self, mapping_path):
+ script_path = os.path.join(
+ constants.GetOutDirectory(), 'bin', 'java_deobfuscate')
+ cmd = [script_path, mapping_path]
+ # Allow only one thread to call TransformLines() at a time.
+ self._lock = threading.Lock()
+ # Ensure that only one thread attempts to kill self._proc in Close().
+ self._close_lock = threading.Lock()
+ self._closed_called = False
+ # Assign to None so that attribute exists if Popen() throws.
+ self._proc = None
+ # Start process eagerly to hide start-up latency.
+ self._proc_start_time = time.time()
+ self._proc = subprocess.Popen(
+ cmd, bufsize=1, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+ close_fds=True)
+
+ def IsClosed(self):
+ return self._closed_called or self._proc.returncode is not None
+
+ def IsBusy(self):
+ return self._lock.locked()
+
+ def IsReady(self):
+ return not self.IsClosed() and not self.IsBusy()
+
+ def TransformLines(self, lines):
+ """Deobfuscates obfuscated names found in the given lines.
+
+ If anything goes wrong (process crashes, timeout, etc), returns |lines|.
+
+ Args:
+ lines: A list of strings without trailing newlines.
+
+ Returns:
+ A list of strings without trailing newlines.
+ """
+ if not lines:
+ return []
+
+ # Deobfuscated stacks contain more frames than obfuscated ones when method
+ # inlining occurs. To account for the extra output lines, keep reading until
+ # this eof_line token is reached.
+ eof_line = uuid.uuid4().hex
+ out_lines = []
+
+ def deobfuscate_reader():
+ while True:
+ line = self._proc.stdout.readline()
+ # Return an empty string at EOF (when stdin is closed).
+ if not line:
+ break
+ line = line[:-1]
+ if line == eof_line:
+ break
+ out_lines.append(line)
+
+ if self.IsBusy():
+ logging.warning('deobfuscator: Having to wait for Java deobfuscation.')
+
+ # Allow only one thread to operate at a time.
+ with self._lock:
+ if self.IsClosed():
+ if not self._closed_called:
+ logging.warning('deobfuscator: Process exited with code=%d.',
+ self._proc.returncode)
+ self.Close()
+ return lines
+
+ # TODO(agrieve): Can probably speed this up by only sending lines through
+ # that might contain an obfuscated name.
+ reader_thread = reraiser_thread.ReraiserThread(deobfuscate_reader)
+ reader_thread.start()
+
+ try:
+ self._proc.stdin.write('\n'.join(lines))
+ self._proc.stdin.write('\n{}\n'.format(eof_line))
+ self._proc.stdin.flush()
+ time_since_proc_start = time.time() - self._proc_start_time
+ timeout = (max(0, _PROCESS_START_TIMEOUT - time_since_proc_start) +
+ max(_MINIUMUM_TIMEOUT, len(lines) * _PER_LINE_TIMEOUT))
+ reader_thread.join(timeout)
+ if self.IsClosed():
+ logging.warning(
+ 'deobfuscator: Close() called by another thread during join().')
+ return lines
+ if reader_thread.is_alive():
+ logging.error('deobfuscator: Timed out.')
+ self.Close()
+ return lines
+ return out_lines
+ except IOError:
+ logging.exception('deobfuscator: Exception during java_deobfuscate')
+ self.Close()
+ return lines
+
+ def Close(self):
+ with self._close_lock:
+ needs_closing = not self.IsClosed()
+ self._closed_called = True
+
+ if needs_closing:
+ self._proc.stdin.close()
+ self._proc.kill()
+ self._proc.wait()
+
+ def __del__(self):
+ # self._proc is None when Popen() fails.
+ if not self._closed_called and self._proc:
+ logging.error('deobfuscator: Forgot to Close()')
+ self.Close()
+
+
+class DeobfuscatorPool(object):
+ # As of Sep 2017, each instance requires about 500MB of RAM, as measured by:
+ # /usr/bin/time -v out/Release/bin/java_deobfuscate \
+ # out/Release/apks/ChromePublic.apk.mapping
+ def __init__(self, mapping_path, pool_size=4):
+ self._mapping_path = mapping_path
+ self._pool = [Deobfuscator(mapping_path) for _ in xrange(pool_size)]
+ # Allow only one thread to select from the pool at a time.
+ self._lock = threading.Lock()
+
+ def TransformLines(self, lines):
+ with self._lock:
+ assert self._pool, 'TransformLines() called on a closed DeobfuscatorPool.'
+ # Restart any closed Deobfuscators.
+ for i, d in enumerate(self._pool):
+ if d.IsClosed():
+ logging.warning('deobfuscator: Restarting closed instance.')
+ self._pool[i] = Deobfuscator(self._mapping_path)
+
+ selected = next((x for x in self._pool if x.IsReady()), self._pool[0])
+ # Rotate the order so that next caller will not choose the same one.
+ self._pool.remove(selected)
+ self._pool.append(selected)
+
+ return selected.TransformLines(lines)
+
+ def Close(self):
+ with self._lock:
+ for d in self._pool:
+ d.Close()
+ self._pool = None
diff --git a/deps/v8/build/android/pylib/symbols/elf_symbolizer.py b/deps/v8/build/android/pylib/symbols/elf_symbolizer.py
new file mode 100644
index 0000000000..1f2f918255
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/elf_symbolizer.py
@@ -0,0 +1,487 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import datetime
+import logging
+import multiprocessing
+import os
+import posixpath
+import Queue
+import re
+import subprocess
+import sys
+import threading
+import time
+
+
+# addr2line builds a possibly infinite memory cache that can exhaust
+# the computer's memory if allowed to grow for too long. This constant
+# controls how many lookups we do before restarting the process. 4000
+# gives near peak performance without extreme memory usage.
+ADDR2LINE_RECYCLE_LIMIT = 4000
+
+
+ELF_MAGIC = '\x7f\x45\x4c\x46'
+
+
+def ContainsElfMagic(file_path):
+ if os.path.getsize(file_path) < 4:
+ return False
+ try:
+ with open(file_path, 'r') as f:
+ b = f.read(4)
+ return b == ELF_MAGIC
+ except IOError:
+ return False
+
+
+class ELFSymbolizer(object):
+ """An uber-fast (multiprocessing, pipelined and asynchronous) ELF symbolizer.
+
+ This class is a frontend for addr2line (part of GNU binutils), designed to
+ symbolize batches of large numbers of symbols for a given ELF file. It
+ supports sharding symbolization against many addr2line instances and
+ pipelining of multiple requests per each instance (in order to hide addr2line
+ internals and OS pipe latencies).
+
+ The interface exhibited by this class is a very simple asynchronous interface,
+ which is based on the following three methods:
+ - SymbolizeAsync(): used to request (enqueue) resolution of a given address.
+ - The |callback| method: used to communicated back the symbol information.
+ - Join(): called to conclude the batch to gather the last outstanding results.
+ In essence, before the Join method returns, this class will have issued as
+ many callbacks as the number of SymbolizeAsync() calls. In this regard, note
+ that due to multiprocess sharding, callbacks can be delivered out of order.
+
+ Some background about addr2line:
+ - it is invoked passing the elf path in the cmdline, piping the addresses in
+ its stdin and getting results on its stdout.
+ - it has pretty large response times for the first requests, but it
+ works very well in streaming mode once it has been warmed up.
+ - it doesn't scale by itself (on more cores). However, spawning multiple
+ instances at the same time on the same file is pretty efficient as they
+ keep hitting the pagecache and become mostly CPU bound.
+ - it might hang or crash, mostly for OOM. This class deals with both of these
+ problems.
+
+ Despite the "scary" imports and the multi* words above, (almost) no multi-
+ threading/processing is involved from the python viewpoint. Concurrency
+ here is achieved by spawning several addr2line subprocesses and handling their
+ output pipes asynchronously. Therefore, all the code here (with the exception
+ of the Queue instance in Addr2Line) should be free from mind-blowing
+ thread-safety concerns.
+
+ The multiprocess sharding works as follows:
+ The symbolizer tries to use the lowest number of addr2line instances as
+ possible (with respect of |max_concurrent_jobs|) and enqueue all the requests
+ in a single addr2line instance. For few symbols (i.e. dozens) sharding isn't
+ worth the startup cost.
+ The multiprocess logic kicks in as soon as the queues for the existing
+ instances grow. Specifically, once all the existing instances reach the
+ |max_queue_size| bound, a new addr2line instance is kicked in.
+ In the case of a very eager producer (i.e. all |max_concurrent_jobs| instances
+ have a backlog of |max_queue_size|), back-pressure is applied on the caller by
+ blocking the SymbolizeAsync method.
+
+ This module has been deliberately designed to be dependency free (w.r.t. of
+ other modules in this project), to allow easy reuse in external projects.
+ """
+
+ def __init__(self, elf_file_path, addr2line_path, callback, inlines=False,
+ max_concurrent_jobs=None, addr2line_timeout=30, max_queue_size=50,
+ source_root_path=None, strip_base_path=None):
+ """Args:
+ elf_file_path: path of the elf file to be symbolized.
+ addr2line_path: path of the toolchain's addr2line binary.
+ callback: a callback which will be invoked for each resolved symbol with
+ the two args (sym_info, callback_arg). The former is an instance of
+ |ELFSymbolInfo| and contains the symbol information. The latter is an
+ embedder-provided argument which is passed to SymbolizeAsync().
+ inlines: when True, the ELFSymbolInfo will contain also the details about
+ the outer inlining functions. When False, only the innermost function
+ will be provided.
+ max_concurrent_jobs: Max number of addr2line instances spawned.
+ Parallelize responsibly, addr2line is a memory and I/O monster.
+ max_queue_size: Max number of outstanding requests per addr2line instance.
+ addr2line_timeout: Max time (in seconds) to wait for a addr2line response.
+ After the timeout, the instance will be considered hung and respawned.
+ source_root_path: In some toolchains only the name of the source file is
+ is output, without any path information; disambiguation searches
+ through the source directory specified by |source_root_path| argument
+ for files whose name matches, adding the full path information to the
+ output. For example, if the toolchain outputs "unicode.cc" and there
+ is a file called "unicode.cc" located under |source_root_path|/foo,
+ the tool will replace "unicode.cc" with
+ "|source_root_path|/foo/unicode.cc". If there are multiple files with
+ the same name, disambiguation will fail because the tool cannot
+ determine which of the files was the source of the symbol.
+ strip_base_path: Rebases the symbols source paths onto |source_root_path|
+ (i.e replace |strip_base_path| with |source_root_path).
+ """
+ assert(os.path.isfile(addr2line_path)), 'Cannot find ' + addr2line_path
+ self.elf_file_path = elf_file_path
+ self.addr2line_path = addr2line_path
+ self.callback = callback
+ self.inlines = inlines
+ self.max_concurrent_jobs = (max_concurrent_jobs or
+ min(multiprocessing.cpu_count(), 4))
+ self.max_queue_size = max_queue_size
+ self.addr2line_timeout = addr2line_timeout
+ self.requests_counter = 0 # For generating monotonic request IDs.
+ self._a2l_instances = [] # Up to |max_concurrent_jobs| _Addr2Line inst.
+
+ # If necessary, create disambiguation lookup table
+ self.disambiguate = source_root_path is not None
+ self.disambiguation_table = {}
+ self.strip_base_path = strip_base_path
+ if self.disambiguate:
+ self.source_root_path = os.path.abspath(source_root_path)
+ self._CreateDisambiguationTable()
+
+ # Create one addr2line instance. More instances will be created on demand
+ # (up to |max_concurrent_jobs|) depending on the rate of the requests.
+ self._CreateNewA2LInstance()
+
+ def SymbolizeAsync(self, addr, callback_arg=None):
+ """Requests symbolization of a given address.
+
+ This method is not guaranteed to return immediately. It generally does, but
+ in some scenarios (e.g. all addr2line instances have full queues) it can
+ block to create back-pressure.
+
+ Args:
+ addr: address to symbolize.
+ callback_arg: optional argument which will be passed to the |callback|."""
+ assert isinstance(addr, int)
+
+ # Process all the symbols that have been resolved in the meanwhile.
+ # Essentially, this drains all the addr2line(s) out queues.
+ for a2l_to_purge in self._a2l_instances:
+ a2l_to_purge.ProcessAllResolvedSymbolsInQueue()
+ a2l_to_purge.RecycleIfNecessary()
+
+ # Find the best instance according to this logic:
+ # 1. Find an existing instance with the shortest queue.
+ # 2. If all of instances' queues are full, but there is room in the pool,
+ # (i.e. < |max_concurrent_jobs|) create a new instance.
+ # 3. If there were already |max_concurrent_jobs| instances and all of them
+ # had full queues, make back-pressure.
+
+ # 1.
+ def _SortByQueueSizeAndReqID(a2l):
+ return (a2l.queue_size, a2l.first_request_id)
+ a2l = min(self._a2l_instances, key=_SortByQueueSizeAndReqID)
+
+ # 2.
+ if (a2l.queue_size >= self.max_queue_size and
+ len(self._a2l_instances) < self.max_concurrent_jobs):
+ a2l = self._CreateNewA2LInstance()
+
+ # 3.
+ if a2l.queue_size >= self.max_queue_size:
+ a2l.WaitForNextSymbolInQueue()
+
+ a2l.EnqueueRequest(addr, callback_arg)
+
+ def WaitForIdle(self):
+ """Waits for all the outstanding requests to complete."""
+ for a2l in self._a2l_instances:
+ a2l.WaitForIdle()
+
+ def Join(self):
+ """Waits for all the outstanding requests to complete and terminates."""
+ for a2l in self._a2l_instances:
+ a2l.WaitForIdle()
+ a2l.Terminate()
+
+ def _CreateNewA2LInstance(self):
+ assert len(self._a2l_instances) < self.max_concurrent_jobs
+ a2l = ELFSymbolizer.Addr2Line(self)
+ self._a2l_instances.append(a2l)
+ return a2l
+
+ def _CreateDisambiguationTable(self):
+ """ Non-unique file names will result in None entries"""
+ start_time = time.time()
+ logging.info('Collecting information about available source files...')
+ self.disambiguation_table = {}
+
+ for root, _, filenames in os.walk(self.source_root_path):
+ for f in filenames:
+ self.disambiguation_table[f] = os.path.join(root, f) if (f not in
+ self.disambiguation_table) else None
+ logging.info('Finished collecting information about '
+ 'possible files (took %.1f s).',
+ (time.time() - start_time))
+
+
+ class Addr2Line(object):
+ """A python wrapper around an addr2line instance.
+
+ The communication with the addr2line process looks as follows:
+ [STDIN] [STDOUT] (from addr2line's viewpoint)
+ > f001111
+ > f002222
+ < Symbol::Name(foo, bar) for f001111
+ < /path/to/source/file.c:line_number
+ > f003333
+ < Symbol::Name2() for f002222
+ < /path/to/source/file.c:line_number
+ < Symbol::Name3() for f003333
+ < /path/to/source/file.c:line_number
+ """
+
+ SYM_ADDR_RE = re.compile(r'([^:]+):(\?|\d+).*')
+
+ def __init__(self, symbolizer):
+ self._symbolizer = symbolizer
+ self._lib_file_name = posixpath.basename(symbolizer.elf_file_path)
+
+ # The request queue (i.e. addresses pushed to addr2line's stdin and not
+ # yet retrieved on stdout)
+ self._request_queue = collections.deque()
+
+ # This is essentially len(self._request_queue). It has been optimized to a
+ # separate field because turned out to be a perf hot-spot.
+ self.queue_size = 0
+
+ # Keep track of the number of symbols a process has processed to
+ # avoid a single process growing too big and using all the memory.
+ self._processed_symbols_count = 0
+
+ # Objects required to handle the addr2line subprocess.
+ self._proc = None # Subprocess.Popen(...) instance.
+ self._thread = None # Threading.thread instance.
+ self._out_queue = None # Queue.Queue instance (for buffering a2l stdout).
+ self._RestartAddr2LineProcess()
+
+ def EnqueueRequest(self, addr, callback_arg):
+ """Pushes an address to addr2line's stdin (and keeps track of it)."""
+ self._symbolizer.requests_counter += 1 # For global "age" of requests.
+ req_idx = self._symbolizer.requests_counter
+ self._request_queue.append((addr, callback_arg, req_idx))
+ self.queue_size += 1
+ self._WriteToA2lStdin(addr)
+
+ def WaitForIdle(self):
+ """Waits until all the pending requests have been symbolized."""
+ while self.queue_size > 0:
+ self.WaitForNextSymbolInQueue()
+
+ def WaitForNextSymbolInQueue(self):
+ """Waits for the next pending request to be symbolized."""
+ if not self.queue_size:
+ return
+
+ # This outer loop guards against a2l hanging (detecting stdout timeout).
+ while True:
+ start_time = datetime.datetime.now()
+ timeout = datetime.timedelta(seconds=self._symbolizer.addr2line_timeout)
+
+ # The inner loop guards against a2l crashing (checking if it exited).
+ while datetime.datetime.now() - start_time < timeout:
+ # poll() returns !None if the process exited. a2l should never exit.
+ if self._proc.poll():
+ logging.warning('addr2line crashed, respawning (lib: %s).',
+ self._lib_file_name)
+ self._RestartAddr2LineProcess()
+ # TODO(primiano): the best thing to do in this case would be
+ # shrinking the pool size as, very likely, addr2line is crashed
+ # due to low memory (and the respawned one will die again soon).
+
+ try:
+ lines = self._out_queue.get(block=True, timeout=0.25)
+ except Queue.Empty:
+ # On timeout (1/4 s.) repeat the inner loop and check if either the
+ # addr2line process did crash or we waited its output for too long.
+ continue
+
+ # In nominal conditions, we get straight to this point.
+ self._ProcessSymbolOutput(lines)
+ return
+
+ # If this point is reached, we waited more than |addr2line_timeout|.
+ logging.warning('Hung addr2line process, respawning (lib: %s).',
+ self._lib_file_name)
+ self._RestartAddr2LineProcess()
+
+ def ProcessAllResolvedSymbolsInQueue(self):
+ """Consumes all the addr2line output lines produced (without blocking)."""
+ if not self.queue_size:
+ return
+ while True:
+ try:
+ lines = self._out_queue.get_nowait()
+ except Queue.Empty:
+ break
+ self._ProcessSymbolOutput(lines)
+
+ def RecycleIfNecessary(self):
+ """Restarts the process if it has been used for too long.
+
+ A long running addr2line process will consume excessive amounts
+ of memory without any gain in performance."""
+ if self._processed_symbols_count >= ADDR2LINE_RECYCLE_LIMIT:
+ self._RestartAddr2LineProcess()
+
+
+ def Terminate(self):
+ """Kills the underlying addr2line process.
+
+ The poller |_thread| will terminate as well due to the broken pipe."""
+ try:
+ self._proc.kill()
+ self._proc.communicate() # Essentially wait() without risking deadlock.
+ except Exception: # pylint: disable=broad-except
+ # An exception while terminating? How interesting.
+ pass
+ self._proc = None
+
+ def _WriteToA2lStdin(self, addr):
+ self._proc.stdin.write('%s\n' % hex(addr))
+ if self._symbolizer.inlines:
+ # In the case of inlines we output an extra blank line, which causes
+ # addr2line to emit a (??,??:0) tuple that we use as a boundary marker.
+ self._proc.stdin.write('\n')
+ self._proc.stdin.flush()
+
+ def _ProcessSymbolOutput(self, lines):
+ """Parses an addr2line symbol output and triggers the client callback."""
+ (_, callback_arg, _) = self._request_queue.popleft()
+ self.queue_size -= 1
+
+ innermost_sym_info = None
+ sym_info = None
+ for (line1, line2) in lines:
+ prev_sym_info = sym_info
+ name = line1 if not line1.startswith('?') else None
+ source_path = None
+ source_line = None
+ m = ELFSymbolizer.Addr2Line.SYM_ADDR_RE.match(line2)
+ if m:
+ if not m.group(1).startswith('?'):
+ source_path = m.group(1)
+ if not m.group(2).startswith('?'):
+ source_line = int(m.group(2))
+ else:
+ logging.warning('Got invalid symbol path from addr2line: %s', line2)
+
+ # In case disambiguation is on, and needed
+ was_ambiguous = False
+ disambiguated = False
+ if self._symbolizer.disambiguate:
+ if source_path and not posixpath.isabs(source_path):
+ path = self._symbolizer.disambiguation_table.get(source_path)
+ was_ambiguous = True
+ disambiguated = path is not None
+ source_path = path if disambiguated else source_path
+
+ # Use absolute paths (so that paths are consistent, as disambiguation
+ # uses absolute paths)
+ if source_path and not was_ambiguous:
+ source_path = os.path.abspath(source_path)
+
+ if source_path and self._symbolizer.strip_base_path:
+ # Strip the base path
+ source_path = re.sub('^' + self._symbolizer.strip_base_path,
+ self._symbolizer.source_root_path or '', source_path)
+
+ sym_info = ELFSymbolInfo(name, source_path, source_line, was_ambiguous,
+ disambiguated)
+ if prev_sym_info:
+ prev_sym_info.inlined_by = sym_info
+ if not innermost_sym_info:
+ innermost_sym_info = sym_info
+
+ self._processed_symbols_count += 1
+ self._symbolizer.callback(innermost_sym_info, callback_arg)
+
+ def _RestartAddr2LineProcess(self):
+ if self._proc:
+ self.Terminate()
+
+ # The only reason of existence of this Queue (and the corresponding
+ # Thread below) is the lack of a subprocess.stdout.poll_avail_lines().
+ # Essentially this is a pipe able to extract a couple of lines atomically.
+ self._out_queue = Queue.Queue()
+
+ # Start the underlying addr2line process in line buffered mode.
+
+ cmd = [self._symbolizer.addr2line_path, '--functions', '--demangle',
+ '--exe=' + self._symbolizer.elf_file_path]
+ if self._symbolizer.inlines:
+ cmd += ['--inlines']
+ self._proc = subprocess.Popen(cmd, bufsize=1, stdout=subprocess.PIPE,
+ stdin=subprocess.PIPE, stderr=sys.stderr, close_fds=True)
+
+ # Start the poller thread, which simply moves atomically the lines read
+ # from the addr2line's stdout to the |_out_queue|.
+ self._thread = threading.Thread(
+ target=ELFSymbolizer.Addr2Line.StdoutReaderThread,
+ args=(self._proc.stdout, self._out_queue, self._symbolizer.inlines))
+ self._thread.daemon = True # Don't prevent early process exit.
+ self._thread.start()
+
+ self._processed_symbols_count = 0
+
+ # Replay the pending requests on the new process (only for the case
+ # of a hung addr2line timing out during the game).
+ for (addr, _, _) in self._request_queue:
+ self._WriteToA2lStdin(addr)
+
+ @staticmethod
+ def StdoutReaderThread(process_pipe, queue, inlines):
+ """The poller thread fn, which moves the addr2line stdout to the |queue|.
+
+ This is the only piece of code not running on the main thread. It merely
+ writes to a Queue, which is thread-safe. In the case of inlines, it
+ detects the ??,??:0 marker and sends the lines atomically, such that the
+ main thread always receives all the lines corresponding to one symbol in
+ one shot."""
+ try:
+ lines_for_one_symbol = []
+ while True:
+ line1 = process_pipe.readline().rstrip('\r\n')
+ line2 = process_pipe.readline().rstrip('\r\n')
+ if not line1 or not line2:
+ break
+ inline_has_more_lines = inlines and (len(lines_for_one_symbol) == 0 or
+ (line1 != '??' and line2 != '??:0'))
+ if not inlines or inline_has_more_lines:
+ lines_for_one_symbol += [(line1, line2)]
+ if inline_has_more_lines:
+ continue
+ queue.put(lines_for_one_symbol)
+ lines_for_one_symbol = []
+ process_pipe.close()
+
+ # Every addr2line processes will die at some point, please die silently.
+ except (IOError, OSError):
+ pass
+
+ @property
+ def first_request_id(self):
+ """Returns the request_id of the oldest pending request in the queue."""
+ return self._request_queue[0][2] if self._request_queue else 0
+
+
+class ELFSymbolInfo(object):
+ """The result of the symbolization passed as first arg. of each callback."""
+
+ def __init__(self, name, source_path, source_line, was_ambiguous=False,
+ disambiguated=False):
+ """All the fields here can be None (if addr2line replies with '??')."""
+ self.name = name
+ self.source_path = source_path
+ self.source_line = source_line
+ # In the case of |inlines|=True, the |inlined_by| points to the outer
+ # function inlining the current one (and so on, to form a chain).
+ self.inlined_by = None
+ self.disambiguated = disambiguated
+ self.was_ambiguous = was_ambiguous
+
+ def __str__(self):
+ return '%s [%s:%d]' % (
+ self.name or '??', self.source_path or '??', self.source_line or 0)
diff --git a/deps/v8/build/android/pylib/symbols/elf_symbolizer_unittest.py b/deps/v8/build/android/pylib/symbols/elf_symbolizer_unittest.py
new file mode 100755
index 0000000000..765b5989cb
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/elf_symbolizer_unittest.py
@@ -0,0 +1,196 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import functools
+import logging
+import os
+import unittest
+
+from pylib.symbols import elf_symbolizer
+from pylib.symbols import mock_addr2line
+
+
+_MOCK_A2L_PATH = os.path.join(os.path.dirname(mock_addr2line.__file__),
+ 'mock_addr2line')
+_INCOMPLETE_MOCK_ADDR = 1024 * 1024
+_UNKNOWN_MOCK_ADDR = 2 * 1024 * 1024
+_INLINE_MOCK_ADDR = 3 * 1024 * 1024
+
+
+class ELFSymbolizerTest(unittest.TestCase):
+ def setUp(self):
+ self._callback = functools.partial(
+ ELFSymbolizerTest._SymbolizeCallback, self)
+ self._resolved_addresses = set()
+ # Mute warnings, we expect them due to the crash/hang tests.
+ logging.getLogger().setLevel(logging.ERROR)
+
+ def testParallelism1(self):
+ self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+
+ def testParallelism4(self):
+ self._RunTest(max_concurrent_jobs=4, num_symbols=100)
+
+ def testParallelism8(self):
+ self._RunTest(max_concurrent_jobs=8, num_symbols=100)
+
+ def testCrash(self):
+ os.environ['MOCK_A2L_CRASH_EVERY'] = '99'
+ self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+ os.environ['MOCK_A2L_CRASH_EVERY'] = '0'
+
+ def testHang(self):
+ os.environ['MOCK_A2L_HANG_EVERY'] = '99'
+ self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+ os.environ['MOCK_A2L_HANG_EVERY'] = '0'
+
+ def testInlines(self):
+ """Stimulate the inline processing logic."""
+ symbolizer = elf_symbolizer.ELFSymbolizer(
+ elf_file_path='/path/doesnt/matter/mock_lib1.so',
+ addr2line_path=_MOCK_A2L_PATH,
+ callback=self._callback,
+ inlines=True,
+ max_concurrent_jobs=4)
+
+ for addr in xrange(1000):
+ exp_inline = False
+ exp_unknown = False
+
+ # First 100 addresses with inlines.
+ if addr < 100:
+ addr += _INLINE_MOCK_ADDR
+ exp_inline = True
+
+ # Followed by 100 without inlines.
+ elif addr < 200:
+ pass
+
+ # Followed by 100 interleaved inlines and not inlines.
+ elif addr < 300:
+ if addr & 1:
+ addr += _INLINE_MOCK_ADDR
+ exp_inline = True
+
+ # Followed by 100 interleaved inlines and unknonwn.
+ elif addr < 400:
+ if addr & 1:
+ addr += _INLINE_MOCK_ADDR
+ exp_inline = True
+ else:
+ addr += _UNKNOWN_MOCK_ADDR
+ exp_unknown = True
+
+ exp_name = 'mock_sym_for_addr_%d' % addr if not exp_unknown else None
+ exp_source_path = 'mock_src/mock_lib1.so.c' if not exp_unknown else None
+ exp_source_line = addr if not exp_unknown else None
+ cb_arg = (addr, exp_name, exp_source_path, exp_source_line, exp_inline)
+ symbolizer.SymbolizeAsync(addr, cb_arg)
+
+ symbolizer.Join()
+
+ def testIncompleteSyminfo(self):
+ """Stimulate the symbol-not-resolved logic."""
+ symbolizer = elf_symbolizer.ELFSymbolizer(
+ elf_file_path='/path/doesnt/matter/mock_lib1.so',
+ addr2line_path=_MOCK_A2L_PATH,
+ callback=self._callback,
+ max_concurrent_jobs=1)
+
+ # Test symbols with valid name but incomplete path.
+ addr = _INCOMPLETE_MOCK_ADDR
+ exp_name = 'mock_sym_for_addr_%d' % addr
+ exp_source_path = None
+ exp_source_line = None
+ cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+ symbolizer.SymbolizeAsync(addr, cb_arg)
+
+ # Test symbols with no name or sym info.
+ addr = _UNKNOWN_MOCK_ADDR
+ exp_name = None
+ exp_source_path = None
+ exp_source_line = None
+ cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+ symbolizer.SymbolizeAsync(addr, cb_arg)
+
+ symbolizer.Join()
+
+ def testWaitForIdle(self):
+ symbolizer = elf_symbolizer.ELFSymbolizer(
+ elf_file_path='/path/doesnt/matter/mock_lib1.so',
+ addr2line_path=_MOCK_A2L_PATH,
+ callback=self._callback,
+ max_concurrent_jobs=1)
+
+ # Test symbols with valid name but incomplete path.
+ addr = _INCOMPLETE_MOCK_ADDR
+ exp_name = 'mock_sym_for_addr_%d' % addr
+ exp_source_path = None
+ exp_source_line = None
+ cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+ symbolizer.SymbolizeAsync(addr, cb_arg)
+ symbolizer.WaitForIdle()
+
+ # Test symbols with no name or sym info.
+ addr = _UNKNOWN_MOCK_ADDR
+ exp_name = None
+ exp_source_path = None
+ exp_source_line = None
+ cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+ symbolizer.SymbolizeAsync(addr, cb_arg)
+ symbolizer.Join()
+
+ def _RunTest(self, max_concurrent_jobs, num_symbols):
+ symbolizer = elf_symbolizer.ELFSymbolizer(
+ elf_file_path='/path/doesnt/matter/mock_lib1.so',
+ addr2line_path=_MOCK_A2L_PATH,
+ callback=self._callback,
+ max_concurrent_jobs=max_concurrent_jobs,
+ addr2line_timeout=0.5)
+
+ for addr in xrange(num_symbols):
+ exp_name = 'mock_sym_for_addr_%d' % addr
+ exp_source_path = 'mock_src/mock_lib1.so.c'
+ exp_source_line = addr
+ cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+ symbolizer.SymbolizeAsync(addr, cb_arg)
+
+ symbolizer.Join()
+
+ # Check that all the expected callbacks have been received.
+ for addr in xrange(num_symbols):
+ self.assertIn(addr, self._resolved_addresses)
+ self._resolved_addresses.remove(addr)
+
+ # Check for unexpected callbacks.
+ self.assertEqual(len(self._resolved_addresses), 0)
+
+ def _SymbolizeCallback(self, sym_info, cb_arg):
+ self.assertTrue(isinstance(sym_info, elf_symbolizer.ELFSymbolInfo))
+ self.assertTrue(isinstance(cb_arg, tuple))
+ self.assertEqual(len(cb_arg), 5)
+
+ # Unpack expectations from the callback extra argument.
+ (addr, exp_name, exp_source_path, exp_source_line, exp_inlines) = cb_arg
+ if exp_name is None:
+ self.assertIsNone(sym_info.name)
+ else:
+ self.assertTrue(sym_info.name.startswith(exp_name))
+ self.assertEqual(sym_info.source_path, exp_source_path)
+ self.assertEqual(sym_info.source_line, exp_source_line)
+
+ if exp_inlines:
+ self.assertEqual(sym_info.name, exp_name + '_inner')
+ self.assertEqual(sym_info.inlined_by.name, exp_name + '_middle')
+ self.assertEqual(sym_info.inlined_by.inlined_by.name,
+ exp_name + '_outer')
+
+ # Check against duplicate callbacks.
+ self.assertNotIn(addr, self._resolved_addresses)
+ self._resolved_addresses.add(addr)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/symbols/mock_addr2line/__init__.py b/deps/v8/build/android/pylib/symbols/mock_addr2line/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/mock_addr2line/__init__.py
diff --git a/deps/v8/build/android/pylib/symbols/mock_addr2line/mock_addr2line b/deps/v8/build/android/pylib/symbols/mock_addr2line/mock_addr2line
new file mode 100755
index 0000000000..cd58f56d57
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/mock_addr2line/mock_addr2line
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Simple mock for addr2line.
+
+Outputs mock symbol information, with each symbol being a function of the
+original address (so it is easy to double-check consistency in unittests).
+"""
+
+import optparse
+import os
+import posixpath
+import sys
+import time
+
+
+def main(argv):
+ parser = optparse.OptionParser()
+ parser.add_option('-e', '--exe', dest='exe') # Path of the debug-library.so.
+ # Silently swallow the other unnecessary arguments.
+ parser.add_option('-C', '--demangle', action='store_true')
+ parser.add_option('-f', '--functions', action='store_true')
+ parser.add_option('-i', '--inlines', action='store_true')
+ options, _ = parser.parse_args(argv[1:])
+ lib_file_name = posixpath.basename(options.exe)
+ processed_sym_count = 0
+ crash_every = int(os.environ.get('MOCK_A2L_CRASH_EVERY', 0))
+ hang_every = int(os.environ.get('MOCK_A2L_HANG_EVERY', 0))
+
+ while(True):
+ line = sys.stdin.readline().rstrip('\r')
+ if not line:
+ break
+
+ # An empty line should generate '??,??:0' (is used as marker for inlines).
+ if line == '\n':
+ print '??'
+ print '??:0'
+ sys.stdout.flush()
+ continue
+
+ addr = int(line, 16)
+ processed_sym_count += 1
+ if crash_every and processed_sym_count % crash_every == 0:
+ sys.exit(1)
+ if hang_every and processed_sym_count % hang_every == 0:
+ time.sleep(1)
+
+ # Addresses < 1M will return good mock symbol information.
+ if addr < 1024 * 1024:
+ print 'mock_sym_for_addr_%d' % addr
+ print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+
+ # Addresses 1M <= x < 2M will return symbols with a name but a missing path.
+ elif addr < 2 * 1024 * 1024:
+ print 'mock_sym_for_addr_%d' % addr
+ print '??:0'
+
+ # Addresses 2M <= x < 3M will return unknown symbol information.
+ elif addr < 3 * 1024 * 1024:
+ print '??'
+ print '??'
+
+ # Addresses 3M <= x < 4M will return inlines.
+ elif addr < 4 * 1024 * 1024:
+ print 'mock_sym_for_addr_%d_inner' % addr
+ print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+ print 'mock_sym_for_addr_%d_middle' % addr
+ print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+ print 'mock_sym_for_addr_%d_outer' % addr
+ print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+
+ sys.stdout.flush()
+
+
+if __name__ == '__main__':
+ main(sys.argv) \ No newline at end of file
diff --git a/deps/v8/build/android/pylib/symbols/stack_symbolizer.py b/deps/v8/build/android/pylib/symbols/stack_symbolizer.py
new file mode 100644
index 0000000000..123b726130
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/stack_symbolizer.py
@@ -0,0 +1,81 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import re
+import tempfile
+import time
+
+from devil.utils import cmd_helper
+from pylib import constants
+
+_STACK_TOOL = os.path.join(os.path.dirname(__file__), '..', '..', '..', '..',
+ 'third_party', 'android_platform', 'development',
+ 'scripts', 'stack')
+ABI_REG = re.compile('ABI: \'(.+?)\'')
+
+
+def _DeviceAbiToArch(device_abi):
+ # The order of this list is significant to find the more specific match
+ # (e.g., arm64) before the less specific (e.g., arm).
+ arches = ['arm64', 'arm', 'x86_64', 'x86_64', 'x86', 'mips']
+ for arch in arches:
+ if arch in device_abi:
+ return arch
+ raise RuntimeError('Unknown device ABI: %s' % device_abi)
+
+
+class Symbolizer(object):
+ """A helper class to symbolize stack."""
+
+ def __init__(self, apk_under_test=None):
+ self._apk_under_test = apk_under_test
+ self._time_spent_symbolizing = 0
+
+
+ def __del__(self):
+ self.CleanUp()
+
+
+ def CleanUp(self):
+ """Clean up the temporary directory of apk libs."""
+ if self._time_spent_symbolizing > 0:
+ logging.info(
+ 'Total time spent symbolizing: %.2fs', self._time_spent_symbolizing)
+
+
+ def ExtractAndResolveNativeStackTraces(self, data_to_symbolize,
+ device_abi, include_stack=True):
+ """Run the stack tool for given input.
+
+ Args:
+ data_to_symbolize: a list of strings to symbolize.
+ include_stack: boolean whether to include stack data in output.
+ device_abi: the default ABI of the device which generated the tombstone.
+
+ Yields:
+ A string for each line of resolved stack output.
+ """
+ arch = _DeviceAbiToArch(device_abi)
+ if not arch:
+ logging.warning('No device_abi can be found.')
+ return
+
+ cmd = [_STACK_TOOL, '--arch', arch, '--output-directory',
+ constants.GetOutDirectory(), '--more-info']
+ env = dict(os.environ)
+ env['PYTHONDONTWRITEBYTECODE'] = '1'
+ with tempfile.NamedTemporaryFile() as f:
+ f.write('\n'.join(data_to_symbolize))
+ f.flush()
+ start = time.time()
+ try:
+ _, output = cmd_helper.GetCmdStatusAndOutput(cmd + [f.name], env=env)
+ finally:
+ self._time_spent_symbolizing += time.time() - start
+ for line in output.splitlines():
+ if not include_stack and 'Stack Data:' in line:
+ break
+ yield line
diff --git a/deps/v8/build/android/pylib/symbols/symbol_utils.py b/deps/v8/build/android/pylib/symbols/symbol_utils.py
new file mode 100644
index 0000000000..e4e3faac80
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/symbol_utils.py
@@ -0,0 +1,812 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import bisect
+import collections
+import logging
+import os
+import re
+
+from pylib.constants import host_paths
+from pylib.symbols import elf_symbolizer
+
+
+def _AndroidAbiToCpuArch(android_abi):
+ """Return the Chromium CPU architecture name for a given Android ABI."""
+ _ARCH_MAP = {
+ 'armeabi': 'arm',
+ 'armeabi-v7a': 'arm',
+ 'arm64-v8a': 'arm64',
+ 'x86_64': 'x64',
+ }
+ return _ARCH_MAP.get(android_abi, android_abi)
+
+
+def _HexAddressRegexpFor(android_abi):
+ """Return a regexp matching hexadecimal addresses for a given Android ABI."""
+ if android_abi in ['x86_64', 'arm64-v8a', 'mips64']:
+ width = 16
+ else:
+ width = 8
+ return '[0-9a-f]{%d}' % width
+
+
+class HostLibraryFinder(object):
+ """Translate device library path to matching host unstripped library path.
+
+ Usage is the following:
+ 1) Create instance.
+ 2) Call AddSearchDir() once or more times to add host directory path to
+ look for unstripped native libraries.
+ 3) Call Find(device_libpath) repeatedly to translate a device-specific
+ library path into the corresponding host path to the unstripped
+ version.
+ """
+ def __init__(self):
+ """Initialize instance."""
+ self._search_dirs = []
+ self._lib_map = {} # Map of library name to host file paths.
+
+ def AddSearchDir(self, lib_dir):
+ """Add a directory to the search path for host native shared libraries.
+
+ Args:
+ lib_dir: host path containing native libraries.
+ """
+ if not os.path.exists(lib_dir):
+ logging.warning('Ignoring missing host library directory: %s', lib_dir)
+ return
+ if not os.path.isdir(lib_dir):
+ logging.warning('Ignoring invalid host library directory: %s', lib_dir)
+ return
+ self._search_dirs.append(lib_dir)
+ self._lib_map = {} # Reset the map.
+
+ def Find(self, device_libpath):
+ """Find the host file path matching a specific device library path.
+
+ Args:
+ device_libpath: device-specific file path to library or executable.
+ Returns:
+ host file path to the unstripped version of the library, or None.
+ """
+ host_lib_path = None
+ lib_name = os.path.basename(device_libpath)
+ host_lib_path = self._lib_map.get(lib_name)
+ if not host_lib_path:
+ for search_dir in self._search_dirs:
+ lib_path = os.path.join(search_dir, lib_name)
+ if os.path.exists(lib_path):
+ host_lib_path = lib_path
+ break
+
+ if not host_lib_path:
+ logging.debug('Could not find host library for: %s', lib_name)
+ self._lib_map[lib_name] = host_lib_path
+
+ return host_lib_path
+
+
+
+class SymbolResolver(object):
+ """A base class for objets that can symbolize library (path, offset)
+ pairs into symbol information strings. Usage is the following:
+
+ 1) Create new instance (by calling the constructor of a derived
+ class, since this is only the base one).
+
+ 2) Call SetAndroidAbi() before any call to FindSymbolInfo() in order
+ to set the Android CPU ABI used for symbolization.
+
+ 3) Before the first call to FindSymbolInfo(), one can call
+ AddLibraryOffset(), or AddLibraryOffsets() to record a set of offsets
+ that you will want to symbolize later through FindSymbolInfo(). Doing
+ so allows some SymbolResolver derived classes to work faster (e.g. the
+ one that invokes the 'addr2line' program, since the latter works faster
+ if the offsets provided as inputs are sorted in increasing order).
+
+ 3) Call FindSymbolInfo(path, offset) to return the corresponding
+ symbol information string, or None if this doesn't correspond
+ to anything the instance can handle.
+
+ Note that whether the path is specific to the device or to the
+ host depends on the derived class implementation.
+ """
+ def __init__(self):
+ self._android_abi = None
+ self._lib_offsets_map = collections.defaultdict(set)
+
+ def SetAndroidAbi(self, android_abi):
+ """Set the Android ABI value for this instance.
+
+ Calling this function before FindSymbolInfo() is required by some
+ derived class implementations.
+
+ Args:
+ android_abi: Native Android CPU ABI name (e.g. 'armeabi-v7a').
+ Raises:
+ Exception if the ABI was already set with a different value.
+ """
+ if self._android_abi and self._android_abi != android_abi:
+ raise Exception('Cannot reset Android ABI to new value %s, already set '
+ 'to %s' % (android_abi, self._android_abi))
+
+ self._android_abi = android_abi
+
+ def AddLibraryOffset(self, lib_path, offset):
+ """Associate a single offset to a given device library.
+
+ This must be called before FindSymbolInfo(), otherwise its input arguments
+ will be ignored.
+
+ Args:
+ lib_path: A library path.
+ offset: An integer offset within the corresponding library that will be
+ symbolized by future calls to FindSymbolInfo.
+ """
+ self._lib_offsets_map[lib_path].add(offset)
+
+ def AddLibraryOffsets(self, lib_path, lib_offsets):
+ """Associate a set of wanted offsets to a given device library.
+
+ This must be called before FindSymbolInfo(), otherwise its input arguments
+ will be ignored.
+
+ Args:
+ lib_path: A library path.
+ lib_offsets: An iterable of integer offsets within the corresponding
+ library that will be symbolized by future calls to FindSymbolInfo.
+ """
+ self._lib_offsets_map[lib_path].update(lib_offsets)
+
+ # pylint: disable=unused-argument,no-self-use
+ def FindSymbolInfo(self, lib_path, lib_offset):
+ """Symbolize a device library path and offset.
+
+ Args:
+ lib_path: Library path (device or host specific, depending on the
+ derived class implementation).
+ lib_offset: Integer offset within the library.
+ Returns:
+ Corresponding symbol information string, or None.
+ """
+ # The base implementation cannot symbolize anything.
+ return None
+ # pylint: enable=unused-argument,no-self-use
+
+
+class ElfSymbolResolver(SymbolResolver):
+ """A SymbolResolver that can symbolize host path + offset values using
+ an elf_symbolizer.ELFSymbolizer instance.
+ """
+ def __init__(self, addr2line_path_for_tests=None):
+ super(ElfSymbolResolver, self).__init__()
+ self._addr2line_path = addr2line_path_for_tests
+
+ # Used to cache one ELFSymbolizer instance per library path.
+ self._elf_symbolizer_cache = {}
+
+ # Used to cache FindSymbolInfo() results. Maps host library paths
+ # to (offset -> symbol info string) dictionaries.
+ self._symbol_info_cache = collections.defaultdict(dict)
+ self._allow_symbolizer = True
+
+ def _CreateSymbolizerFor(self, host_path):
+ """Create the ELFSymbolizer instance associated with a given lib path."""
+ addr2line_path = self._addr2line_path
+ if not addr2line_path:
+ if not self._android_abi:
+ raise Exception(
+ 'Android CPU ABI must be set before calling FindSymbolInfo!')
+
+ cpu_arch = _AndroidAbiToCpuArch(self._android_abi)
+ self._addr2line_path = host_paths.ToolPath('addr2line', cpu_arch)
+
+ return elf_symbolizer.ELFSymbolizer(
+ elf_file_path=host_path, addr2line_path=self._addr2line_path,
+ callback=ElfSymbolResolver._Callback, inlines=True)
+
+ def DisallowSymbolizerForTesting(self):
+ """Disallow FindSymbolInfo() from using a symbolizer.
+
+ This is used during unit-testing to ensure that the offsets that were
+ recorded via AddLibraryOffset()/AddLibraryOffsets() are properly
+ symbolized, but not anything else.
+ """
+ self._allow_symbolizer = False
+
+ def FindSymbolInfo(self, host_path, offset):
+ """Override SymbolResolver.FindSymbolInfo.
+
+ Args:
+ host_path: Host-specific path to the native shared library.
+ offset: Integer offset within the native library.
+ Returns:
+ A symbol info string, or None.
+ """
+ offset_map = self._symbol_info_cache[host_path]
+ symbol_info = offset_map.get(offset)
+ if symbol_info:
+ return symbol_info
+
+ # Create symbolizer on demand.
+ symbolizer = self._elf_symbolizer_cache.get(host_path)
+ if not symbolizer:
+ symbolizer = self._CreateSymbolizerFor(host_path)
+ self._elf_symbolizer_cache[host_path] = symbolizer
+
+ # If there are pre-recorded offsets for this path, symbolize them now.
+ offsets = self._lib_offsets_map.get(host_path)
+ if offsets:
+ offset_map = {}
+ for pre_offset in offsets:
+ symbolizer.SymbolizeAsync(
+ pre_offset, callback_arg=(offset_map, pre_offset))
+ symbolizer.WaitForIdle()
+ self._symbol_info_cache[host_path] = offset_map
+
+ symbol_info = offset_map.get(offset)
+ if symbol_info:
+ return symbol_info
+
+ if not self._allow_symbolizer:
+ return None
+
+ # Symbolize single offset. Slower if addresses are not provided in
+ # increasing order to addr2line.
+ symbolizer.SymbolizeAsync(offset,
+ callback_arg=(offset_map, offset))
+ symbolizer.WaitForIdle()
+ return offset_map.get(offset)
+
+ @staticmethod
+ def _Callback(sym_info, callback_arg):
+ offset_map, offset = callback_arg
+ offset_map[offset] = str(sym_info)
+
+
+class DeviceSymbolResolver(SymbolResolver):
+ """A SymbolResolver instance that accepts device-specific path.
+
+ Usage is the following:
+ 1) Create new instance, passing a parent SymbolResolver instance that
+ accepts host-specific paths, and a HostLibraryFinder instance.
+
+ 2) Optional: call AddApkOffsets() to add offsets from within an APK
+ that contains uncompressed native shared libraries.
+
+ 3) Use it as any SymbolResolver instance.
+ """
+ def __init__(self, host_resolver, host_lib_finder):
+ """Initialize instance.
+
+ Args:
+ host_resolver: A parent SymbolResolver instance that will be used
+ to resolve symbols from host library paths.
+ host_lib_finder: A HostLibraryFinder instance used to locate
+ unstripped libraries on the host.
+ """
+ super(DeviceSymbolResolver, self).__init__()
+ self._host_lib_finder = host_lib_finder
+ self._bad_device_lib_paths = set()
+ self._host_resolver = host_resolver
+
+ def SetAndroidAbi(self, android_abi):
+ super(DeviceSymbolResolver, self).SetAndroidAbi(android_abi)
+ self._host_resolver.SetAndroidAbi(android_abi)
+
+ def AddLibraryOffsets(self, device_lib_path, lib_offsets):
+ """Associate a set of wanted offsets to a given device library.
+
+ This must be called before FindSymbolInfo(), otherwise its input arguments
+ will be ignored.
+
+ Args:
+ device_lib_path: A device-specific library path.
+ lib_offsets: An iterable of integer offsets within the corresponding
+ library that will be symbolized by future calls to FindSymbolInfo.
+ want to symbolize.
+ """
+ if device_lib_path in self._bad_device_lib_paths:
+ return
+
+ host_lib_path = self._host_lib_finder.Find(device_lib_path)
+ if not host_lib_path:
+ # NOTE: self._bad_device_lib_paths is only used to only print this
+ # warning once per bad library.
+ logging.warning('Could not find host library matching device path: %s',
+ device_lib_path)
+ self._bad_device_lib_paths.add(device_lib_path)
+ return
+
+ self._host_resolver.AddLibraryOffsets(host_lib_path, lib_offsets)
+
+ def AddApkOffsets(self, device_apk_path, apk_offsets, apk_translator):
+ """Associate a set of wanted offsets to a given device APK path.
+
+ This converts the APK-relative offsets into offsets relative to the
+ uncompressed libraries it contains, then calls AddLibraryOffsets()
+ for each one of the libraries.
+
+ Must be called before FindSymbolInfo() as well, otherwise input arguments
+ will be ignored.
+
+ Args:
+ device_apk_path: Device-specific APK path.
+ apk_offsets: Iterable of offsets within the APK file.
+ apk_translator: An ApkLibraryPathTranslator instance used to extract
+ library paths from the APK.
+ """
+ libraries_map = collections.defaultdict(set)
+ for offset in apk_offsets:
+ lib_path, lib_offset = apk_translator.TranslatePath(device_apk_path,
+ offset)
+ libraries_map[lib_path].add(lib_offset)
+
+ for lib_path, lib_offsets in libraries_map.iteritems():
+ self.AddLibraryOffsets(lib_path, lib_offsets)
+
+ def FindSymbolInfo(self, device_path, offset):
+ """Overrides SymbolResolver.FindSymbolInfo.
+
+ Args:
+ device_path: Device-specific library path (e.g.
+ '/data/app/com.example.app-1/lib/x86/libfoo.so')
+ offset: Offset in device library path.
+ Returns:
+ Corresponding symbol information string, or None.
+ """
+ host_path = self._host_lib_finder.Find(device_path)
+ if not host_path:
+ return None
+
+ return self._host_resolver.FindSymbolInfo(host_path, offset)
+
+
+class MemoryMap(object):
+ """Models the memory map of a given process. Usage is:
+
+ 1) Create new instance, passing the Android ABI.
+
+ 2) Call TranslateLine() whenever you want to detect and translate any
+ memory map input line.
+
+ 3) Otherwise, it is possible to parse the whole memory map input with
+ ParseLines(), then call FindSectionForAddress() repeatedly in order
+ to translate a memory address into the corresponding mapping and
+ file information tuple (e.g. to symbolize stack entries).
+ """
+
+ # A named tuple describing interesting memory map line items.
+ # Fields:
+ # addr_start: Mapping start address in memory.
+ # file_offset: Corresponding file offset.
+ # file_size: Corresponding mapping size in bytes.
+ # file_path: Input file path.
+ # match: Corresponding regular expression match object.
+ LineTuple = collections.namedtuple('MemoryMapLineTuple',
+ 'addr_start,file_offset,file_size,'
+ 'file_path, match')
+
+ # A name tuple describing a memory map section.
+ # Fields:
+ # address: Memory address.
+ # size: Size in bytes in memory
+ # offset: Starting file offset.
+ # path: Input file path.
+ SectionTuple = collections.namedtuple('MemoryMapSection',
+ 'address,size,offset,path')
+
+ def __init__(self, android_abi):
+ """Initializes instance.
+
+ Args:
+ android_abi: Android CPU ABI name (e.g. 'armeabi-v7a')
+ """
+ hex_addr = _HexAddressRegexpFor(android_abi)
+
+ # pylint: disable=line-too-long
+ # A regular expression used to match memory map entries which look like:
+ # b278c000-b2790fff r-- 4fda000 5000 /data/app/com.google.android.apps.chrome-2/base.apk
+ # pylint: enable=line-too-long
+ self._re_map_section = re.compile(
+ r'\s*(?P<addr_start>' + hex_addr + r')-(?P<addr_end>' + hex_addr + ')' +
+ r'\s+' +
+ r'(?P<perm>...)\s+' +
+ r'(?P<file_offset>[0-9a-f]+)\s+' +
+ r'(?P<file_size>[0-9a-f]+)\s*' +
+ r'(?P<file_path>[^ \t]+)?')
+
+ self._addr_map = [] # Sorted list of (address, size, path, offset) tuples.
+ self._sorted_addresses = [] # Sorted list of address fields in _addr_map.
+ self._in_section = False
+
+ def TranslateLine(self, line, apk_path_translator):
+ """Try to translate a memory map input line, if detected.
+
+ This only takes care of converting mapped APK file path and offsets
+ into a corresponding uncompressed native library file path + new offsets,
+ e.g. '..... <offset> <size> /data/.../base.apk' gets
+ translated into '.... <new-offset> <size> /data/.../base.apk!lib/libfoo.so'
+
+ This function should always work, even if ParseLines() was not called
+ previously.
+
+ Args:
+ line: Input memory map / tombstone line.
+ apk_translator: An ApkLibraryPathTranslator instance, used to map
+ APK offsets into uncompressed native libraries + new offsets.
+ Returns:
+ Translated memory map line, if relevant, or unchanged input line
+ otherwise.
+ """
+ t = self._ParseLine(line.rstrip())
+ if not t:
+ return line
+
+ new_path, new_offset = apk_path_translator.TranslatePath(
+ t.file_path, t.file_offset)
+
+ if new_path == t.file_path:
+ return line
+
+ pos = t.match.start('file_path')
+ return '%s%s (offset 0x%x)%s' % (line[0:pos], new_path, new_offset,
+ line[t.match.end('file_path'):])
+
+ def ParseLines(self, input_lines, in_section=False):
+ """Parse a list of input lines and extract the APK memory map out of it.
+
+ Args:
+ input_lines: list, or iterable, of input lines.
+ in_section: Optional. If true, considers that the input lines are
+ already part of the memory map. Otherwise, wait until the start of
+ the section appears in the input before trying to record data.
+ Returns:
+ True iff APK-related memory map entries were found. False otherwise.
+ """
+ addr_list = [] # list of (address, size, file_path, file_offset) tuples.
+ self._in_section = in_section
+ for line in input_lines:
+ t = self._ParseLine(line.rstrip())
+ if not t:
+ continue
+
+ addr_list.append(t)
+
+ self._addr_map = sorted(addr_list,
+ lambda x, y: cmp(x.addr_start, y.addr_start))
+ self._sorted_addresses = [e.addr_start for e in self._addr_map]
+ return bool(self._addr_map)
+
+ def _ParseLine(self, line):
+ """Used internally to recognized memory map input lines.
+
+ Args:
+ line: Input logcat or tomstone line.
+ Returns:
+ A LineTuple instance on success, or None on failure.
+ """
+ if not self._in_section:
+ self._in_section = line.startswith('memory map:')
+ return None
+
+ m = self._re_map_section.match(line)
+ if not m:
+ self._in_section = False # End of memory map section
+ return None
+
+ # Only accept .apk and .so files that are not from the system partitions.
+ file_path = m.group('file_path')
+ if not file_path:
+ return None
+
+ if file_path.startswith('/system') or file_path.startswith('/vendor'):
+ return None
+
+ if not (file_path.endswith('.apk') or file_path.endswith('.so')):
+ return None
+
+ addr_start = int(m.group('addr_start'), 16)
+ file_offset = int(m.group('file_offset'), 16)
+ file_size = int(m.group('file_size'), 16)
+
+ return self.LineTuple(addr_start, file_offset, file_size, file_path, m)
+
+ def Dump(self):
+ """Print memory map for debugging."""
+ print 'MEMORY MAP ['
+ for t in self._addr_map:
+ print '[%08x-%08x %08x %08x %s]' % (
+ t.addr_start, t.addr_start + t.file_size, t.file_size, t.file_offset,
+ t.file_path)
+ print '] MEMORY MAP'
+
+ def FindSectionForAddress(self, addr):
+ """Find the map section corresponding to a specific memory address.
+
+ Call this method only after using ParseLines() was called to extract
+ relevant information from the memory map.
+
+ Args:
+ addr: Memory address
+ Returns:
+ A SectionTuple instance on success, or None on failure.
+ """
+ pos = bisect.bisect_right(self._sorted_addresses, addr)
+ if pos > 0:
+ # All values in [0,pos) are <= addr, just ensure that the last
+ # one contains the address as well.
+ entry = self._addr_map[pos - 1]
+ if entry.addr_start + entry.file_size > addr:
+ return self.SectionTuple(entry.addr_start, entry.file_size,
+ entry.file_offset, entry.file_path)
+ return None
+
+
+class BacktraceTranslator(object):
+ """Translates backtrace-related lines in a tombstone or crash report.
+
+ Usage is the following:
+ 1) Create new instance with appropriate arguments.
+ 2) If the tombstone / logcat input is available, one can call
+ FindLibraryOffsets() in order to detect which library offsets
+ will need to be symbolized during a future parse. Doing so helps
+ speed up the ELF symbolizer.
+ 3) For each tombstone/logcat input line, call TranslateLine() to
+ try to detect and symbolize backtrace lines.
+ """
+
+ # A named tuple for relevant input backtrace lines.
+ # Fields:
+ # rel_pc: Instruction pointer, relative to offset in library start.
+ # location: Library or APK file path.
+ # offset: Load base of executable code in library or apk file path.
+ # match: The corresponding regular expression match object.
+ # Note:
+ # The actual instruction pointer always matches the position at
+ # |offset + rel_pc| in |location|.
+ LineTuple = collections.namedtuple('BacktraceLineTuple',
+ 'rel_pc,location,offset,match')
+
+ def __init__(self, android_abi, apk_translator):
+ """Initialize instance.
+
+ Args:
+ android_abi: Android CPU ABI name (e.g. 'armeabi-v7a').
+ apk_translator: ApkLibraryPathTranslator instance used to convert
+ mapped APK file offsets into uncompressed library file paths with
+ new offsets.
+ """
+ hex_addr = _HexAddressRegexpFor(android_abi)
+
+ # A regular expression used to match backtrace lines.
+ self._re_backtrace = re.compile(
+ r'.*#(?P<frame>[0-9]{2})\s+' +
+ r'(..)\s+' +
+ r'(?P<rel_pc>' + hex_addr + r')\s+' +
+ r'(?P<location>[^ \t]+)' +
+ r'(\s+\(offset 0x(?P<offset>[0-9a-f]+)\))?')
+
+ # In certain cases, offset will be provided as <location>+0x<offset>
+ # instead of <location> (offset 0x<offset>). This is a regexp to detect
+ # this.
+ self._re_location_offset = re.compile(
+ r'.*\+0x(?P<offset>[0-9a-f]+)$')
+
+ self._apk_translator = apk_translator
+ self._in_section = False
+
+ def _ParseLine(self, line):
+ """Used internally to detect and decompose backtrace input lines.
+
+ Args:
+ line: input tombstone line.
+ Returns:
+ A LineTuple instance on success, None on failure.
+ """
+ if not self._in_section:
+ self._in_section = line.startswith('backtrace:')
+ return None
+
+ line = line.rstrip()
+ m = self._re_backtrace.match(line)
+ if not m:
+ self._in_section = False
+ return None
+
+ location = m.group('location')
+ offset = m.group('offset')
+ if not offset:
+ m2 = self._re_location_offset.match(location)
+ if m2:
+ offset = m2.group('offset')
+ location = location[0:m2.start('offset') - 3]
+
+ if not offset:
+ return None
+
+ offset = int(offset, 16)
+ rel_pc = int(m.group('rel_pc'), 16)
+
+ # Two cases to consider here:
+ #
+ # * If this is a library file directly mapped in memory, then |rel_pc|
+ # if the direct offset within the library, and doesn't need any kind
+ # of adjustement.
+ #
+ # * If this is a library mapped directly from an .apk file, then
+ # |rel_pc| is the offset in the APK, and |offset| happens to be the
+ # load base of the corresponding library.
+ #
+ if location.endswith('.so'):
+ # For a native library directly mapped from the file system,
+ return self.LineTuple(rel_pc, location, offset, m)
+
+ if location.endswith('.apk'):
+ # For a native library inside an memory-mapped APK file,
+ new_location, new_offset = self._apk_translator.TranslatePath(
+ location, offset)
+
+ return self.LineTuple(rel_pc, new_location, new_offset, m)
+
+ # Ignore anything else (e.g. .oat or .odex files).
+ return None
+
+ def FindLibraryOffsets(self, input_lines, in_section=False):
+ """Parse a tombstone's backtrace section and find all library offsets in it.
+
+ Args:
+ input_lines: List or iterables of intput tombstone lines.
+ in_section: Optional. If True, considers that the stack section has
+ already started.
+ Returns:
+ A dictionary mapping device library paths to sets of offsets within
+ then.
+ """
+ self._in_section = in_section
+ result = collections.defaultdict(set)
+ for line in input_lines:
+ t = self._ParseLine(line)
+ if not t:
+ continue
+
+ result[t.location].add(t.offset + t.rel_pc)
+ return result
+
+ def TranslateLine(self, line, symbol_resolver):
+ """Symbolize backtrace line if recognized.
+
+ Args:
+ line: input backtrace line.
+ symbol_resolver: symbol resolver instance to use. This method will
+ call its FindSymbolInfo(device_lib_path, lib_offset) method to
+ convert offsets into symbol informations strings.
+ Returns:
+ Translated line (unchanged if not recognized as a back trace).
+ """
+ t = self._ParseLine(line)
+ if not t:
+ return line
+
+ symbol_info = symbol_resolver.FindSymbolInfo(t.location,
+ t.offset + t.rel_pc)
+ if not symbol_info:
+ symbol_info = 'offset 0x%x' % t.offset
+
+ pos = t.match.start('location')
+ pos2 = t.match.end('offset') + 1
+ if pos2 <= 0:
+ pos2 = t.match.end('location')
+ return '%s%s (%s)%s' % (line[:pos], t.location, symbol_info, line[pos2:])
+
+
+class StackTranslator(object):
+ """Translates stack-related lines in a tombstone or crash report."""
+
+ # A named tuple describing relevant stack input lines.
+ # Fields:
+ # address: Address as it appears in the stack.
+ # lib_path: Library path where |address| is mapped.
+ # lib_offset: Library load base offset. for |lib_path|.
+ # match: Corresponding regular expression match object.
+ LineTuple = collections.namedtuple('StackLineTuple',
+ 'address, lib_path, lib_offset, match')
+
+ def __init__(self, android_abi, memory_map, apk_translator):
+ """Initialize instance."""
+ hex_addr = _HexAddressRegexpFor(android_abi)
+
+ # pylint: disable=line-too-long
+ # A regular expression used to recognize stack entries like:
+ #
+ # #05 bf89a180 bf89a1e4 [stack]
+ # bf89a1c8 a0c01c51 /data/app/com.google.android.apps.chrome-2/base.apk
+ # bf89a080 00000000
+ # ........ ........
+ # pylint: enable=line-too-long
+ self._re_stack_line = re.compile(
+ r'\s+(?P<frame_number>#[0-9]+)?\s*' +
+ r'(?P<stack_addr>' + hex_addr + r')\s+' +
+ r'(?P<stack_value>' + hex_addr + r')' +
+ r'(\s+(?P<location>[^ \t]+))?')
+
+ self._re_stack_abbrev = re.compile(r'\s+[.]+\s+[.]+')
+
+ self._memory_map = memory_map
+ self._apk_translator = apk_translator
+ self._in_section = False
+
+ def _ParseLine(self, line):
+ """Check a given input line for a relevant _re_stack_line match.
+
+ Args:
+ line: input tombstone line.
+ Returns:
+ A LineTuple instance on success, None on failure.
+ """
+ line = line.rstrip()
+ if not self._in_section:
+ self._in_section = line.startswith('stack:')
+ return None
+
+ m = self._re_stack_line.match(line)
+ if not m:
+ if not self._re_stack_abbrev.match(line):
+ self._in_section = False
+ return None
+
+ location = m.group('location')
+ if not location:
+ return None
+
+ if not location.endswith('.apk') and not location.endswith('.so'):
+ return None
+
+ addr = int(m.group('stack_value'), 16)
+ t = self._memory_map.FindSectionForAddress(addr)
+ if t is None:
+ return None
+
+ lib_path = t.path
+ lib_offset = t.offset + (addr - t.address)
+
+ if lib_path.endswith('.apk'):
+ lib_path, lib_offset = self._apk_translator.TranslatePath(
+ lib_path, lib_offset)
+
+ return self.LineTuple(addr, lib_path, lib_offset, m)
+
+ def FindLibraryOffsets(self, input_lines, in_section=False):
+ """Parse a tombstone's stack section and find all library offsets in it.
+
+ Args:
+ input_lines: List or iterables of intput tombstone lines.
+ in_section: Optional. If True, considers that the stack section has
+ already started.
+ Returns:
+ A dictionary mapping device library paths to sets of offsets within
+ then.
+ """
+ result = collections.defaultdict(set)
+ self._in_section = in_section
+ for line in input_lines:
+ t = self._ParseLine(line)
+ if t:
+ result[t.lib_path].add(t.lib_offset)
+ return result
+
+ def TranslateLine(self, line, symbol_resolver=None):
+ """Try to translate a line of the stack dump."""
+ t = self._ParseLine(line)
+ if not t:
+ return line
+
+ symbol_info = symbol_resolver.FindSymbolInfo(t.lib_path, t.lib_offset)
+ if not symbol_info:
+ return line
+
+ pos = t.match.start('location')
+ pos2 = t.match.end('location')
+ return '%s%s (%s)%s' % (line[:pos], t.lib_path, symbol_info, line[pos2:])
diff --git a/deps/v8/build/android/pylib/symbols/symbol_utils_unittest.py b/deps/v8/build/android/pylib/symbols/symbol_utils_unittest.py
new file mode 100644
index 0000000000..82a7e313ef
--- /dev/null
+++ b/deps/v8/build/android/pylib/symbols/symbol_utils_unittest.py
@@ -0,0 +1,943 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import contextlib
+import logging
+import os
+import re
+import shutil
+import tempfile
+import unittest
+
+from pylib.symbols import apk_native_libs_unittest
+from pylib.symbols import mock_addr2line
+from pylib.symbols import symbol_utils
+
+_MOCK_ELF_DATA = apk_native_libs_unittest.MOCK_ELF_DATA
+
+_MOCK_A2L_PATH = os.path.join(os.path.dirname(mock_addr2line.__file__),
+ 'mock_addr2line')
+
+
+# pylint: disable=line-too-long
+
+# list of (start_offset, end_offset, size, libpath) tuples corresponding
+# to the content of base.apk. This was taken from an x86 ChromeModern.apk
+# component build.
+_TEST_APK_LIBS = [
+ (0x01331000, 0x013696bc, 0x000386bc, 'libaccessibility.cr.so'),
+ (0x0136a000, 0x013779c4, 0x0000d9c4, 'libanimation.cr.so'),
+ (0x01378000, 0x0137f7e8, 0x000077e8, 'libapdu.cr.so'),
+ (0x01380000, 0x0155ccc8, 0x001dccc8, 'libbase.cr.so'),
+ (0x0155d000, 0x015ab98c, 0x0004e98c, 'libbase_i18n.cr.so'),
+ (0x015ac000, 0x015dff4c, 0x00033f4c, 'libbindings.cr.so'),
+ (0x015e0000, 0x015f5a54, 0x00015a54, 'libbindings_base.cr.so'),
+ (0x0160e000, 0x01731960, 0x00123960, 'libblink_common.cr.so'),
+ (0x01732000, 0x0174ce54, 0x0001ae54, 'libblink_controller.cr.so'),
+ (0x0174d000, 0x0318c528, 0x01a3f528, 'libblink_core.cr.so'),
+ (0x0318d000, 0x03191700, 0x00004700, 'libblink_mojom_broadcastchannel_bindings_shared.cr.so'),
+ (0x03192000, 0x03cd7918, 0x00b45918, 'libblink_modules.cr.so'),
+ (0x03cd8000, 0x03d137d0, 0x0003b7d0, 'libblink_mojo_bindings_shared.cr.so'),
+ (0x03d14000, 0x03d2670c, 0x0001270c, 'libblink_offscreen_canvas_mojo_bindings_shared.cr.so'),
+ (0x03d27000, 0x046c7054, 0x009a0054, 'libblink_platform.cr.so'),
+ (0x046c8000, 0x0473fbfc, 0x00077bfc, 'libbluetooth.cr.so'),
+ (0x04740000, 0x04878f40, 0x00138f40, 'libboringssl.cr.so'),
+ (0x04879000, 0x0498466c, 0x0010b66c, 'libc++_shared.so'),
+ (0x04985000, 0x0498d93c, 0x0000893c, 'libcaptive_portal.cr.so'),
+ (0x0498e000, 0x049947cc, 0x000067cc, 'libcapture_base.cr.so'),
+ (0x04995000, 0x04b39f18, 0x001a4f18, 'libcapture_lib.cr.so'),
+ (0x04b3a000, 0x04b488ec, 0x0000e8ec, 'libcbor.cr.so'),
+ (0x04b49000, 0x04e9ea5c, 0x00355a5c, 'libcc.cr.so'),
+ (0x04e9f000, 0x04ed6404, 0x00037404, 'libcc_animation.cr.so'),
+ (0x04ed7000, 0x04ef5ab4, 0x0001eab4, 'libcc_base.cr.so'),
+ (0x04ef6000, 0x04fd9364, 0x000e3364, 'libcc_blink.cr.so'),
+ (0x04fda000, 0x04fe2758, 0x00008758, 'libcc_debug.cr.so'),
+ (0x04fe3000, 0x0500ae0c, 0x00027e0c, 'libcc_ipc.cr.so'),
+ (0x0500b000, 0x05078f38, 0x0006df38, 'libcc_paint.cr.so'),
+ (0x05079000, 0x0507e734, 0x00005734, 'libcdm_manager.cr.so'),
+ (0x0507f000, 0x06f4d744, 0x01ece744, 'libchrome.cr.so'),
+ (0x06f54000, 0x06feb830, 0x00097830, 'libchromium_sqlite3.cr.so'),
+ (0x06fec000, 0x0706f554, 0x00083554, 'libclient.cr.so'),
+ (0x07070000, 0x0708da60, 0x0001da60, 'libcloud_policy_proto_generated_compile.cr.so'),
+ (0x0708e000, 0x07121f28, 0x00093f28, 'libcodec.cr.so'),
+ (0x07122000, 0x07134ab8, 0x00012ab8, 'libcolor_space.cr.so'),
+ (0x07135000, 0x07138614, 0x00003614, 'libcommon.cr.so'),
+ (0x07139000, 0x0717c938, 0x00043938, 'libcompositor.cr.so'),
+ (0x0717d000, 0x0923d78c, 0x020c078c, 'libcontent.cr.so'),
+ (0x0923e000, 0x092ae87c, 0x0007087c, 'libcontent_common_mojo_bindings_shared.cr.so'),
+ (0x092af000, 0x092be718, 0x0000f718, 'libcontent_public_common_mojo_bindings_shared.cr.so'),
+ (0x092bf000, 0x092d9a20, 0x0001aa20, 'libcrash_key.cr.so'),
+ (0x092da000, 0x092eda58, 0x00013a58, 'libcrcrypto.cr.so'),
+ (0x092ee000, 0x092f16e0, 0x000036e0, 'libdevice_base.cr.so'),
+ (0x092f2000, 0x092fe8d8, 0x0000c8d8, 'libdevice_event_log.cr.so'),
+ (0x092ff000, 0x093026a4, 0x000036a4, 'libdevice_features.cr.so'),
+ (0x09303000, 0x093f1220, 0x000ee220, 'libdevice_gamepad.cr.so'),
+ (0x093f2000, 0x09437f54, 0x00045f54, 'libdevice_vr_mojo_bindings.cr.so'),
+ (0x09438000, 0x0954c168, 0x00114168, 'libdevice_vr_mojo_bindings_blink.cr.so'),
+ (0x0954d000, 0x0955d720, 0x00010720, 'libdevice_vr_mojo_bindings_shared.cr.so'),
+ (0x0955e000, 0x0956b9c0, 0x0000d9c0, 'libdevices.cr.so'),
+ (0x0956c000, 0x0957cae8, 0x00010ae8, 'libdiscardable_memory_client.cr.so'),
+ (0x0957d000, 0x09588854, 0x0000b854, 'libdiscardable_memory_common.cr.so'),
+ (0x09589000, 0x0959cbb4, 0x00013bb4, 'libdiscardable_memory_service.cr.so'),
+ (0x0959d000, 0x095b6b90, 0x00019b90, 'libdisplay.cr.so'),
+ (0x095b7000, 0x095be930, 0x00007930, 'libdisplay_types.cr.so'),
+ (0x095bf000, 0x095c46c4, 0x000056c4, 'libdisplay_util.cr.so'),
+ (0x095c5000, 0x095f54a4, 0x000304a4, 'libdomain_reliability.cr.so'),
+ (0x095f6000, 0x0966fe08, 0x00079e08, 'libembedder.cr.so'),
+ (0x09670000, 0x096735f8, 0x000035f8, 'libembedder_switches.cr.so'),
+ (0x09674000, 0x096a3460, 0x0002f460, 'libevents.cr.so'),
+ (0x096a4000, 0x096b6d40, 0x00012d40, 'libevents_base.cr.so'),
+ (0x096b7000, 0x0981a778, 0x00163778, 'libffmpeg.cr.so'),
+ (0x0981b000, 0x09945c94, 0x0012ac94, 'libfido.cr.so'),
+ (0x09946000, 0x09a330dc, 0x000ed0dc, 'libfingerprint.cr.so'),
+ (0x09a34000, 0x09b53170, 0x0011f170, 'libfreetype_harfbuzz.cr.so'),
+ (0x09b54000, 0x09bc5c5c, 0x00071c5c, 'libgcm.cr.so'),
+ (0x09bc6000, 0x09cc8584, 0x00102584, 'libgeolocation.cr.so'),
+ (0x09cc9000, 0x09cdc8d4, 0x000138d4, 'libgeometry.cr.so'),
+ (0x09cdd000, 0x09cec8b4, 0x0000f8b4, 'libgeometry_skia.cr.so'),
+ (0x09ced000, 0x09d10e14, 0x00023e14, 'libgesture_detection.cr.so'),
+ (0x09d11000, 0x09d7595c, 0x0006495c, 'libgfx.cr.so'),
+ (0x09d76000, 0x09d7d7cc, 0x000077cc, 'libgfx_ipc.cr.so'),
+ (0x09d7e000, 0x09d82708, 0x00004708, 'libgfx_ipc_buffer_types.cr.so'),
+ (0x09d83000, 0x09d89748, 0x00006748, 'libgfx_ipc_color.cr.so'),
+ (0x09d8a000, 0x09d8f6f4, 0x000056f4, 'libgfx_ipc_geometry.cr.so'),
+ (0x09d90000, 0x09d94754, 0x00004754, 'libgfx_ipc_skia.cr.so'),
+ (0x09d95000, 0x09d9869c, 0x0000369c, 'libgfx_switches.cr.so'),
+ (0x09d99000, 0x09dba0ac, 0x000210ac, 'libgin.cr.so'),
+ (0x09dbb000, 0x09e0a8cc, 0x0004f8cc, 'libgl_in_process_context.cr.so'),
+ (0x09e0b000, 0x09e17a18, 0x0000ca18, 'libgl_init.cr.so'),
+ (0x09e18000, 0x09ee34e4, 0x000cb4e4, 'libgl_wrapper.cr.so'),
+ (0x09ee4000, 0x0a1a2e00, 0x002bee00, 'libgles2.cr.so'),
+ (0x0a1a3000, 0x0a24556c, 0x000a256c, 'libgles2_implementation.cr.so'),
+ (0x0a246000, 0x0a267038, 0x00021038, 'libgles2_utils.cr.so'),
+ (0x0a268000, 0x0a3288e4, 0x000c08e4, 'libgpu.cr.so'),
+ (0x0a329000, 0x0a3627ec, 0x000397ec, 'libgpu_ipc_service.cr.so'),
+ (0x0a363000, 0x0a388a18, 0x00025a18, 'libgpu_util.cr.so'),
+ (0x0a389000, 0x0a506d8c, 0x0017dd8c, 'libhost.cr.so'),
+ (0x0a507000, 0x0a6f0ec0, 0x001e9ec0, 'libicui18n.cr.so'),
+ (0x0a6f1000, 0x0a83b4c8, 0x0014a4c8, 'libicuuc.cr.so'),
+ (0x0a83c000, 0x0a8416e4, 0x000056e4, 'libinterfaces_shared.cr.so'),
+ (0x0a842000, 0x0a87e2a0, 0x0003c2a0, 'libipc.cr.so'),
+ (0x0a87f000, 0x0a88c98c, 0x0000d98c, 'libipc_mojom.cr.so'),
+ (0x0a88d000, 0x0a8926e4, 0x000056e4, 'libipc_mojom_shared.cr.so'),
+ (0x0a893000, 0x0a8a1e18, 0x0000ee18, 'libkeyed_service_content.cr.so'),
+ (0x0a8a2000, 0x0a8b4a30, 0x00012a30, 'libkeyed_service_core.cr.so'),
+ (0x0a8b5000, 0x0a930a80, 0x0007ba80, 'libleveldatabase.cr.so'),
+ (0x0a931000, 0x0a9b3908, 0x00082908, 'libmanager.cr.so'),
+ (0x0a9b4000, 0x0aea9bb4, 0x004f5bb4, 'libmedia.cr.so'),
+ (0x0aeaa000, 0x0b08cb88, 0x001e2b88, 'libmedia_blink.cr.so'),
+ (0x0b08d000, 0x0b0a4728, 0x00017728, 'libmedia_devices_mojo_bindings_shared.cr.so'),
+ (0x0b0a5000, 0x0b1943ec, 0x000ef3ec, 'libmedia_gpu.cr.so'),
+ (0x0b195000, 0x0b2d07d4, 0x0013b7d4, 'libmedia_mojo_services.cr.so'),
+ (0x0b2d1000, 0x0b2d4760, 0x00003760, 'libmessage_center.cr.so'),
+ (0x0b2d5000, 0x0b2e0938, 0x0000b938, 'libmessage_support.cr.so'),
+ (0x0b2e1000, 0x0b2f3ad0, 0x00012ad0, 'libmetrics_cpp.cr.so'),
+ (0x0b2f4000, 0x0b313bb8, 0x0001fbb8, 'libmidi.cr.so'),
+ (0x0b314000, 0x0b31b848, 0x00007848, 'libmojo_base_lib.cr.so'),
+ (0x0b31c000, 0x0b3329f8, 0x000169f8, 'libmojo_base_mojom.cr.so'),
+ (0x0b333000, 0x0b34b98c, 0x0001898c, 'libmojo_base_mojom_blink.cr.so'),
+ (0x0b34c000, 0x0b354700, 0x00008700, 'libmojo_base_mojom_shared.cr.so'),
+ (0x0b355000, 0x0b3608b0, 0x0000b8b0, 'libmojo_base_shared_typemap_traits.cr.so'),
+ (0x0b361000, 0x0b3ad454, 0x0004c454, 'libmojo_edk.cr.so'),
+ (0x0b3ae000, 0x0b3c4a20, 0x00016a20, 'libmojo_edk_ports.cr.so'),
+ (0x0b3c5000, 0x0b3d38a0, 0x0000e8a0, 'libmojo_mojom_bindings.cr.so'),
+ (0x0b3d4000, 0x0b3da6e8, 0x000066e8, 'libmojo_mojom_bindings_shared.cr.so'),
+ (0x0b3db000, 0x0b3e27f0, 0x000077f0, 'libmojo_public_system.cr.so'),
+ (0x0b3e3000, 0x0b3fa9fc, 0x000179fc, 'libmojo_public_system_cpp.cr.so'),
+ (0x0b3fb000, 0x0b407728, 0x0000c728, 'libmojom_core_shared.cr.so'),
+ (0x0b408000, 0x0b421744, 0x00019744, 'libmojom_platform_shared.cr.so'),
+ (0x0b422000, 0x0b43451c, 0x0001251c, 'libnative_theme.cr.so'),
+ (0x0b435000, 0x0baaa1bc, 0x006751bc, 'libnet.cr.so'),
+ (0x0baab000, 0x0bac3c08, 0x00018c08, 'libnet_with_v8.cr.so'),
+ (0x0bac4000, 0x0bb74670, 0x000b0670, 'libnetwork_cpp.cr.so'),
+ (0x0bb75000, 0x0bbaee8c, 0x00039e8c, 'libnetwork_cpp_base.cr.so'),
+ (0x0bbaf000, 0x0bd21844, 0x00172844, 'libnetwork_service.cr.so'),
+ (0x0bd22000, 0x0bd256e4, 0x000036e4, 'libnetwork_session_configurator.cr.so'),
+ (0x0bd26000, 0x0bd33734, 0x0000d734, 'libonc.cr.so'),
+ (0x0bd34000, 0x0bd9ce18, 0x00068e18, 'libperfetto.cr.so'),
+ (0x0bd9d000, 0x0bda4854, 0x00007854, 'libplatform.cr.so'),
+ (0x0bda5000, 0x0bec5ce4, 0x00120ce4, 'libpolicy_component.cr.so'),
+ (0x0bec6000, 0x0bf5ab58, 0x00094b58, 'libpolicy_proto.cr.so'),
+ (0x0bf5b000, 0x0bf86fbc, 0x0002bfbc, 'libprefs.cr.so'),
+ (0x0bf87000, 0x0bfa5d74, 0x0001ed74, 'libprinting.cr.so'),
+ (0x0bfa6000, 0x0bfe0e80, 0x0003ae80, 'libprotobuf_lite.cr.so'),
+ (0x0bfe1000, 0x0bff0a18, 0x0000fa18, 'libproxy_config.cr.so'),
+ (0x0bff1000, 0x0c0f6654, 0x00105654, 'libpublic.cr.so'),
+ (0x0c0f7000, 0x0c0fa6a4, 0x000036a4, 'librange.cr.so'),
+ (0x0c0fb000, 0x0c118058, 0x0001d058, 'libraster.cr.so'),
+ (0x0c119000, 0x0c133d00, 0x0001ad00, 'libresource_coordinator_cpp.cr.so'),
+ (0x0c134000, 0x0c1396a0, 0x000056a0, 'libresource_coordinator_cpp_base.cr.so'),
+ (0x0c13a000, 0x0c1973b8, 0x0005d3b8, 'libresource_coordinator_public_mojom.cr.so'),
+ (0x0c198000, 0x0c2033e8, 0x0006b3e8, 'libresource_coordinator_public_mojom_blink.cr.so'),
+ (0x0c204000, 0x0c219744, 0x00015744, 'libresource_coordinator_public_mojom_shared.cr.so'),
+ (0x0c21a000, 0x0c21e700, 0x00004700, 'libsandbox.cr.so'),
+ (0x0c21f000, 0x0c22f96c, 0x0001096c, 'libsandbox_services.cr.so'),
+ (0x0c230000, 0x0c249d58, 0x00019d58, 'libseccomp_bpf.cr.so'),
+ (0x0c24a000, 0x0c24e714, 0x00004714, 'libseccomp_starter_android.cr.so'),
+ (0x0c24f000, 0x0c4ae9f0, 0x0025f9f0, 'libservice.cr.so'),
+ (0x0c4af000, 0x0c4c3ae4, 0x00014ae4, 'libservice_manager_cpp.cr.so'),
+ (0x0c4c4000, 0x0c4cb708, 0x00007708, 'libservice_manager_cpp_types.cr.so'),
+ (0x0c4cc000, 0x0c4fbe30, 0x0002fe30, 'libservice_manager_mojom.cr.so'),
+ (0x0c4fc000, 0x0c532e78, 0x00036e78, 'libservice_manager_mojom_blink.cr.so'),
+ (0x0c533000, 0x0c53669c, 0x0000369c, 'libservice_manager_mojom_constants.cr.so'),
+ (0x0c537000, 0x0c53e85c, 0x0000785c, 'libservice_manager_mojom_constants_blink.cr.so'),
+ (0x0c53f000, 0x0c542668, 0x00003668, 'libservice_manager_mojom_constants_shared.cr.so'),
+ (0x0c543000, 0x0c54d700, 0x0000a700, 'libservice_manager_mojom_shared.cr.so'),
+ (0x0c54e000, 0x0c8fc6ec, 0x003ae6ec, 'libsessions.cr.so'),
+ (0x0c8fd000, 0x0c90a924, 0x0000d924, 'libshared_memory_support.cr.so'),
+ (0x0c90b000, 0x0c9148ec, 0x000098ec, 'libshell_dialogs.cr.so'),
+ (0x0c915000, 0x0cf8de70, 0x00678e70, 'libskia.cr.so'),
+ (0x0cf8e000, 0x0cf978bc, 0x000098bc, 'libsnapshot.cr.so'),
+ (0x0cf98000, 0x0cfb7d9c, 0x0001fd9c, 'libsql.cr.so'),
+ (0x0cfb8000, 0x0cfbe744, 0x00006744, 'libstartup_tracing.cr.so'),
+ (0x0cfbf000, 0x0d19b4e4, 0x001dc4e4, 'libstorage_browser.cr.so'),
+ (0x0d19c000, 0x0d2a773c, 0x0010b73c, 'libstorage_common.cr.so'),
+ (0x0d2a8000, 0x0d2ac6fc, 0x000046fc, 'libsurface.cr.so'),
+ (0x0d2ad000, 0x0d2baa98, 0x0000da98, 'libtracing.cr.so'),
+ (0x0d2bb000, 0x0d2f36b0, 0x000386b0, 'libtracing_cpp.cr.so'),
+ (0x0d2f4000, 0x0d326e70, 0x00032e70, 'libtracing_mojom.cr.so'),
+ (0x0d327000, 0x0d33270c, 0x0000b70c, 'libtracing_mojom_shared.cr.so'),
+ (0x0d333000, 0x0d46d804, 0x0013a804, 'libui_android.cr.so'),
+ (0x0d46e000, 0x0d4cb3f8, 0x0005d3f8, 'libui_base.cr.so'),
+ (0x0d4cc000, 0x0d4dbc40, 0x0000fc40, 'libui_base_ime.cr.so'),
+ (0x0d4dc000, 0x0d4e58d4, 0x000098d4, 'libui_data_pack.cr.so'),
+ (0x0d4e6000, 0x0d51d1e0, 0x000371e0, 'libui_devtools.cr.so'),
+ (0x0d51e000, 0x0d52b984, 0x0000d984, 'libui_message_center_cpp.cr.so'),
+ (0x0d52c000, 0x0d539a48, 0x0000da48, 'libui_touch_selection.cr.so'),
+ (0x0d53a000, 0x0d55bc60, 0x00021c60, 'liburl.cr.so'),
+ (0x0d55c000, 0x0d55f6b4, 0x000036b4, 'liburl_ipc.cr.so'),
+ (0x0d560000, 0x0d5af110, 0x0004f110, 'liburl_matcher.cr.so'),
+ (0x0d5b0000, 0x0d5e2fac, 0x00032fac, 'libuser_manager.cr.so'),
+ (0x0d5e3000, 0x0d5e66e4, 0x000036e4, 'libuser_prefs.cr.so'),
+ (0x0d5e7000, 0x0e3e1cc8, 0x00dfacc8, 'libv8.cr.so'),
+ (0x0e3e2000, 0x0e400ae0, 0x0001eae0, 'libv8_libbase.cr.so'),
+ (0x0e401000, 0x0e4d91d4, 0x000d81d4, 'libviz_common.cr.so'),
+ (0x0e4da000, 0x0e4df7e4, 0x000057e4, 'libviz_resource_format.cr.so'),
+ (0x0e4e0000, 0x0e5b7120, 0x000d7120, 'libweb_dialogs.cr.so'),
+ (0x0e5b8000, 0x0e5c7a18, 0x0000fa18, 'libwebdata_common.cr.so'),
+ (0x0e5c8000, 0x0e61bfe4, 0x00053fe4, 'libwtf.cr.so'),
+]
+
+
+# A small memory map fragment extracted from a tombstone for a process that
+# had loaded the APK corresponding to _TEST_APK_LIBS above.
+_TEST_MEMORY_MAP = r'''memory map:
+12c00000-12ccafff rw- 0 cb000 /dev/ashmem/dalvik-main space (deleted)
+12ccb000-130cafff rw- cb000 400000 /dev/ashmem/dalvik-main space (deleted)
+130cb000-32bfffff --- 4cb000 1fb35000 /dev/ashmem/dalvik-main space (deleted)
+32c00000-32c00fff rw- 0 1000 /dev/ashmem/dalvik-main space 1 (deleted)
+32c01000-52bfffff --- 1000 1ffff000 /dev/ashmem/dalvik-main space 1 (deleted)
+6f3b8000-6fd90fff rw- 0 9d9000 /data/dalvik-cache/x86/system@framework@boot.art
+6fd91000-71c42fff r-- 0 1eb2000 /data/dalvik-cache/x86/system@framework@boot.oat
+71c43000-7393efff r-x 1eb2000 1cfc000 /data/dalvik-cache/x86/system@framework@boot.oat (load base 0x71c43000)
+7393f000-7393ffff rw- 3bae000 1000 /data/dalvik-cache/x86/system@framework@boot.oat
+73940000-73a1bfff rw- 0 dc000 /dev/ashmem/dalvik-zygote space (deleted)
+73a1c000-73a1cfff rw- 0 1000 /dev/ashmem/dalvik-non moving space (deleted)
+73a1d000-73a2dfff rw- 1000 11000 /dev/ashmem/dalvik-non moving space (deleted)
+73a2e000-77540fff --- 12000 3b13000 /dev/ashmem/dalvik-non moving space (deleted)
+77541000-7793ffff rw- 3b25000 3ff000 /dev/ashmem/dalvik-non moving space (deleted)
+923aa000-92538fff r-- 8a9000 18f000 /data/app/com.example.app-2/base.apk
+92539000-9255bfff r-- 0 23000 /data/data/com.example.app/app_data/paks/es.pak@162db1c6689
+9255c000-92593fff r-- 213000 38000 /data/app/com.example.app-2/base.apk
+92594000-925c0fff r-- 87d000 2d000 /data/app/com.example.app-2/base.apk
+925c1000-927d3fff r-- a37000 213000 /data/app/com.example.app-2/base.apk
+927d4000-92e07fff r-- 24a000 634000 /data/app/com.example.app-2/base.apk
+92e08000-92e37fff r-- a931000 30000 /data/app/com.example.app-2/base.apk
+92e38000-92e86fff r-x a961000 4f000 /data/app/com.example.app-2/base.apk
+92e87000-92e8afff rw- a9b0000 4000 /data/app/com.example.app-2/base.apk
+92e8b000-92e8bfff rw- 0 1000
+92e8c000-92e9dfff r-- d5b0000 12000 /data/app/com.example.app-2/base.apk
+92e9e000-92ebcfff r-x d5c2000 1f000 /data/app/com.example.app-2/base.apk
+92ebd000-92ebefff rw- d5e1000 2000 /data/app/com.example.app-2/base.apk
+92ebf000-92ebffff rw- 0 1000
+'''
+
+# list of (address, size, path, offset) tuples that must appear in
+# _TEST_MEMORY_MAP. Not all sections need to be listed.
+_TEST_MEMORY_MAP_SECTIONS = [
+ (0x923aa000, 0x18f000, '/data/app/com.example.app-2/base.apk', 0x8a9000),
+ (0x9255c000, 0x038000, '/data/app/com.example.app-2/base.apk', 0x213000),
+ (0x92594000, 0x02d000, '/data/app/com.example.app-2/base.apk', 0x87d000),
+ (0x925c1000, 0x213000, '/data/app/com.example.app-2/base.apk', 0xa37000),
+]
+
+_EXPECTED_TEST_MEMORY_MAP = r'''memory map:
+12c00000-12ccafff rw- 0 cb000 /dev/ashmem/dalvik-main space (deleted)
+12ccb000-130cafff rw- cb000 400000 /dev/ashmem/dalvik-main space (deleted)
+130cb000-32bfffff --- 4cb000 1fb35000 /dev/ashmem/dalvik-main space (deleted)
+32c00000-32c00fff rw- 0 1000 /dev/ashmem/dalvik-main space 1 (deleted)
+32c01000-52bfffff --- 1000 1ffff000 /dev/ashmem/dalvik-main space 1 (deleted)
+6f3b8000-6fd90fff rw- 0 9d9000 /data/dalvik-cache/x86/system@framework@boot.art
+6fd91000-71c42fff r-- 0 1eb2000 /data/dalvik-cache/x86/system@framework@boot.oat
+71c43000-7393efff r-x 1eb2000 1cfc000 /data/dalvik-cache/x86/system@framework@boot.oat (load base 0x71c43000)
+7393f000-7393ffff rw- 3bae000 1000 /data/dalvik-cache/x86/system@framework@boot.oat
+73940000-73a1bfff rw- 0 dc000 /dev/ashmem/dalvik-zygote space (deleted)
+73a1c000-73a1cfff rw- 0 1000 /dev/ashmem/dalvik-non moving space (deleted)
+73a1d000-73a2dfff rw- 1000 11000 /dev/ashmem/dalvik-non moving space (deleted)
+73a2e000-77540fff --- 12000 3b13000 /dev/ashmem/dalvik-non moving space (deleted)
+77541000-7793ffff rw- 3b25000 3ff000 /dev/ashmem/dalvik-non moving space (deleted)
+923aa000-92538fff r-- 8a9000 18f000 /data/app/com.example.app-2/base.apk
+92539000-9255bfff r-- 0 23000 /data/data/com.example.app/app_data/paks/es.pak@162db1c6689
+9255c000-92593fff r-- 213000 38000 /data/app/com.example.app-2/base.apk
+92594000-925c0fff r-- 87d000 2d000 /data/app/com.example.app-2/base.apk
+925c1000-927d3fff r-- a37000 213000 /data/app/com.example.app-2/base.apk
+927d4000-92e07fff r-- 24a000 634000 /data/app/com.example.app-2/base.apk
+92e08000-92e37fff r-- a931000 30000 /data/app/com.example.app-2/base.apk!lib/libmanager.cr.so (offset 0x0)
+92e38000-92e86fff r-x a961000 4f000 /data/app/com.example.app-2/base.apk!lib/libmanager.cr.so (offset 0x30000)
+92e87000-92e8afff rw- a9b0000 4000 /data/app/com.example.app-2/base.apk!lib/libmanager.cr.so (offset 0x7f000)
+92e8b000-92e8bfff rw- 0 1000
+92e8c000-92e9dfff r-- d5b0000 12000 /data/app/com.example.app-2/base.apk!lib/libuser_manager.cr.so (offset 0x0)
+92e9e000-92ebcfff r-x d5c2000 1f000 /data/app/com.example.app-2/base.apk!lib/libuser_manager.cr.so (offset 0x12000)
+92ebd000-92ebefff rw- d5e1000 2000 /data/app/com.example.app-2/base.apk!lib/libuser_manager.cr.so (offset 0x31000)
+92ebf000-92ebffff rw- 0 1000
+'''
+
+# Example stack section, taken from the same tombstone that _TEST_MEMORY_MAP
+# was extracted from.
+_TEST_STACK = r'''stack:
+ bf89a070 b7439468 /system/lib/libc.so
+ bf89a074 bf89a1e4 [stack]
+ bf89a078 932d4000 /data/app/com.example.app-2/base.apk
+ bf89a07c b73bfbc9 /system/lib/libc.so (pthread_mutex_lock+65)
+ bf89a080 00000000
+ bf89a084 4000671c /dev/ashmem/dalvik-main space 1 (deleted)
+ bf89a088 932d1d86 /data/app/com.example.app-2/base.apk
+ bf89a08c b743671c /system/lib/libc.so
+ bf89a090 b77f8c00 /system/bin/linker
+ bf89a094 b743cc90
+ bf89a098 932d1d4a /data/app/com.example.app-2/base.apk
+ bf89a09c b73bf271 /system/lib/libc.so (__pthread_internal_find(long)+65)
+ bf89a0a0 b743cc90
+ bf89a0a4 bf89a0b0 [stack]
+ bf89a0a8 bf89a0b8 [stack]
+ bf89a0ac 00000008
+ ........ ........
+ #00 bf89a0b0 00000006
+ bf89a0b4 00000002
+ bf89a0b8 b743671c /system/lib/libc.so
+ bf89a0bc b73bf5d9 /system/lib/libc.so (pthread_kill+71)
+ #01 bf89a0c0 00006937
+ bf89a0c4 00006937
+ bf89a0c8 00000006
+ bf89a0cc b77fd3a9 /system/bin/app_process32 (sigprocmask+141)
+ bf89a0d0 00000002
+ bf89a0d4 bf89a0ec [stack]
+ bf89a0d8 00000000
+ bf89a0dc b743671c /system/lib/libc.so
+ bf89a0e0 bf89a12c [stack]
+ bf89a0e4 bf89a1e4 [stack]
+ bf89a0e8 932d1d4a /data/app/com.example.app-2/base.apk
+ bf89a0ec b7365206 /system/lib/libc.so (raise+37)
+ #02 bf89a0f0 b77f8c00 /system/bin/linker
+ bf89a0f4 00000006
+ bf89a0f8 b7439468 /system/lib/libc.so
+ bf89a0fc b743671c /system/lib/libc.so
+ bf89a100 bf89a12c [stack]
+ bf89a104 b743671c /system/lib/libc.so
+ bf89a108 bf89a12c [stack]
+ bf89a10c b735e9e5 /system/lib/libc.so (abort+81)
+ #03 bf89a110 00000006
+ bf89a114 bf89a12c [stack]
+ bf89a118 00000000
+ bf89a11c b55a3d3b /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::DefaultLogHandler(google::protobuf::LogLevel, char const*, int, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&)+99)
+ bf89a120 b7439468 /system/lib/libc.so
+ bf89a124 b55ba38d /system/lib/libprotobuf-cpp-lite.so
+ bf89a128 b55ba408 /system/lib/libprotobuf-cpp-lite.so
+ bf89a12c ffffffdf
+ bf89a130 0000003d
+ bf89a134 adfedf00 [anon:libc_malloc]
+ bf89a138 bf89a158 [stack]
+ #04 bf89a13c a0cee7f0 /data/app/com.example.app-2/base.apk
+ bf89a140 b55c1cb0 /system/lib/libprotobuf-cpp-lite.so
+ bf89a144 bf89a1e4 [stack]
+'''
+
+# Expected value of _TEST_STACK after translation of addresses in the APK
+# into offsets into libraries.
+_EXPECTED_STACK = r'''stack:
+ bf89a070 b7439468 /system/lib/libc.so
+ bf89a074 bf89a1e4 [stack]
+ bf89a078 932d4000 /data/app/com.example.app-2/base.apk
+ bf89a07c b73bfbc9 /system/lib/libc.so (pthread_mutex_lock+65)
+ bf89a080 00000000
+ bf89a084 4000671c /dev/ashmem/dalvik-main space 1 (deleted)
+ bf89a088 932d1d86 /data/app/com.example.app-2/base.apk
+ bf89a08c b743671c /system/lib/libc.so
+ bf89a090 b77f8c00 /system/bin/linker
+ bf89a094 b743cc90
+ bf89a098 932d1d4a /data/app/com.example.app-2/base.apk
+ bf89a09c b73bf271 /system/lib/libc.so (__pthread_internal_find(long)+65)
+ bf89a0a0 b743cc90
+ bf89a0a4 bf89a0b0 [stack]
+ bf89a0a8 bf89a0b8 [stack]
+ bf89a0ac 00000008
+ ........ ........
+ #00 bf89a0b0 00000006
+ bf89a0b4 00000002
+ bf89a0b8 b743671c /system/lib/libc.so
+ bf89a0bc b73bf5d9 /system/lib/libc.so (pthread_kill+71)
+ #01 bf89a0c0 00006937
+ bf89a0c4 00006937
+ bf89a0c8 00000006
+ bf89a0cc b77fd3a9 /system/bin/app_process32 (sigprocmask+141)
+ bf89a0d0 00000002
+ bf89a0d4 bf89a0ec [stack]
+ bf89a0d8 00000000
+ bf89a0dc b743671c /system/lib/libc.so
+ bf89a0e0 bf89a12c [stack]
+ bf89a0e4 bf89a1e4 [stack]
+ bf89a0e8 932d1d4a /data/app/com.example.app-2/base.apk
+ bf89a0ec b7365206 /system/lib/libc.so (raise+37)
+ #02 bf89a0f0 b77f8c00 /system/bin/linker
+ bf89a0f4 00000006
+ bf89a0f8 b7439468 /system/lib/libc.so
+ bf89a0fc b743671c /system/lib/libc.so
+ bf89a100 bf89a12c [stack]
+ bf89a104 b743671c /system/lib/libc.so
+ bf89a108 bf89a12c [stack]
+ bf89a10c b735e9e5 /system/lib/libc.so (abort+81)
+ #03 bf89a110 00000006
+ bf89a114 bf89a12c [stack]
+ bf89a118 00000000
+ bf89a11c b55a3d3b /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::DefaultLogHandler(google::protobuf::LogLevel, char const*, int, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&)+99)
+ bf89a120 b7439468 /system/lib/libc.so
+ bf89a124 b55ba38d /system/lib/libprotobuf-cpp-lite.so
+ bf89a128 b55ba408 /system/lib/libprotobuf-cpp-lite.so
+ bf89a12c ffffffdf
+ bf89a130 0000003d
+ bf89a134 adfedf00 [anon:libc_malloc]
+ bf89a138 bf89a158 [stack]
+ #04 bf89a13c a0cee7f0 /data/app/com.example.app-2/base.apk
+ bf89a140 b55c1cb0 /system/lib/libprotobuf-cpp-lite.so
+ bf89a144 bf89a1e4 [stack]
+'''
+
+_TEST_BACKTRACE = r'''backtrace:
+ #00 pc 00084126 /system/lib/libc.so (tgkill+22)
+ #01 pc 000815d8 /system/lib/libc.so (pthread_kill+70)
+ #02 pc 00027205 /system/lib/libc.so (raise+36)
+ #03 pc 000209e4 /system/lib/libc.so (abort+80)
+ #04 pc 0000cf73 /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogMessage::Finish()+117)
+ #05 pc 0000cf8e /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogFinisher::operator=(google::protobuf::internal::LogMessage&)+26)
+ #06 pc 0000d27f /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::VerifyVersion(int, int, char const*)+574)
+ #07 pc 007cd236 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #08 pc 000111a9 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0xbfc2000)
+ #09 pc 00013228 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0xbfc2000)
+ #10 pc 000131de /data/app/com.google.android.apps.chrome-2/base.apk (offset 0xbfc2000)
+ #11 pc 007cd2d8 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #12 pc 007cd956 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #13 pc 007c2d4a /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #14 pc 009fc9f1 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #15 pc 009fc8ea /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #16 pc 00561c63 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #17 pc 0106fbdb /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #18 pc 004d7371 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #19 pc 004d8159 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #20 pc 004d7b96 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #21 pc 004da4b6 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #22 pc 005ab66c /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+ #23 pc 005afca2 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+ #24 pc 0000cae8 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x598d000)
+ #25 pc 00ce864f /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+ #26 pc 00ce8dfa /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+ #27 pc 00ce74c6 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+ #28 pc 00004616 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x961e000)
+ #29 pc 00ce8215 /data/app/com.google.android.apps.chrome-2/base.apk (offset 0x7daa000)
+ #30 pc 0013d8c7 /system/lib/libart.so (art_quick_generic_jni_trampoline+71)
+ #31 pc 00137c52 /system/lib/libart.so (art_quick_invoke_static_stub+418)
+ #32 pc 00143651 /system/lib/libart.so (art::ArtMethod::Invoke(art::Thread*, unsigned int*, unsigned int, art::JValue*, char const*)+353)
+ #33 pc 005e06ae /system/lib/libart.so (artInterpreterToCompiledCodeBridge+190)
+ #34 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+ #35 pc 0032cfc0 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160)
+ #36 pc 000fc703 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891)
+ #37 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+ #38 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+ #39 pc 0032cfc0 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160)
+ #40 pc 000fc703 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891)
+ #41 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+ #42 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+ #43 pc 0032ebf9 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)2, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+297)
+ #44 pc 000fc955 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+30485)
+ #45 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+ #46 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+ #47 pc 0033090c /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)4, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+636)
+ #48 pc 000fc67f /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29759)
+ #49 pc 00300700 /system/lib/libart.so (art::interpreter::EnterInterpreterFromEntryPoint(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame*)+128)
+ #50 pc 00667c73 /system/lib/libart.so (artQuickToInterpreterBridge+808)
+ #51 pc 0013d98d /system/lib/libart.so (art_quick_to_interpreter_bridge+77)
+ #52 pc 7264bc5b /data/dalvik-cache/x86/system@framework@boot.oat (offset 0x1eb2000)
+'''
+
+_EXPECTED_BACKTRACE = r'''backtrace:
+ #00 pc 00084126 /system/lib/libc.so (tgkill+22)
+ #01 pc 000815d8 /system/lib/libc.so (pthread_kill+70)
+ #02 pc 00027205 /system/lib/libc.so (raise+36)
+ #03 pc 000209e4 /system/lib/libc.so (abort+80)
+ #04 pc 0000cf73 /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogMessage::Finish()+117)
+ #05 pc 0000cf8e /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::LogFinisher::operator=(google::protobuf::internal::LogMessage&)+26)
+ #06 pc 0000d27f /system/lib/libprotobuf-cpp-lite.so (google::protobuf::internal::VerifyVersion(int, int, char const*)+574)
+ #07 pc 007cd236 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #08 pc 000111a9 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so (offset 0x1c000)
+ #09 pc 00013228 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so (offset 0x1c000)
+ #10 pc 000131de /data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so (offset 0x1c000)
+ #11 pc 007cd2d8 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #12 pc 007cd956 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #13 pc 007c2d4a /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #14 pc 009fc9f1 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #15 pc 009fc8ea /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #16 pc 00561c63 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #17 pc 0106fbdb /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #18 pc 004d7371 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #19 pc 004d8159 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #20 pc 004d7b96 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #21 pc 004da4b6 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #22 pc 005ab66c /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+ #23 pc 005afca2 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+ #24 pc 0000cae8 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so (offset 0x90e000)
+ #25 pc 00ce864f /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+ #26 pc 00ce8dfa /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+ #27 pc 00ce74c6 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+ #28 pc 00004616 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libembedder.cr.so (offset 0x28000)
+ #29 pc 00ce8215 /data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so (offset 0xc2d000)
+ #30 pc 0013d8c7 /system/lib/libart.so (art_quick_generic_jni_trampoline+71)
+ #31 pc 00137c52 /system/lib/libart.so (art_quick_invoke_static_stub+418)
+ #32 pc 00143651 /system/lib/libart.so (art::ArtMethod::Invoke(art::Thread*, unsigned int*, unsigned int, art::JValue*, char const*)+353)
+ #33 pc 005e06ae /system/lib/libart.so (artInterpreterToCompiledCodeBridge+190)
+ #34 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+ #35 pc 0032cfc0 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160)
+ #36 pc 000fc703 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891)
+ #37 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+ #38 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+ #39 pc 0032cfc0 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)0, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+160)
+ #40 pc 000fc703 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29891)
+ #41 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+ #42 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+ #43 pc 0032ebf9 /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)2, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+297)
+ #44 pc 000fc955 /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+30485)
+ #45 pc 00300af7 /system/lib/libart.so (artInterpreterToInterpreterBridge+188)
+ #46 pc 00328b5d /system/lib/libart.so (bool art::interpreter::DoCall<false, false>(art::ArtMethod*, art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+445)
+ #47 pc 0033090c /system/lib/libart.so (bool art::interpreter::DoInvoke<(art::InvokeType)4, false, false>(art::Thread*, art::ShadowFrame&, art::Instruction const*, unsigned short, art::JValue*)+636)
+ #48 pc 000fc67f /system/lib/libart.so (art::JValue art::interpreter::ExecuteGotoImpl<false, false>(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame&, art::JValue)+29759)
+ #49 pc 00300700 /system/lib/libart.so (art::interpreter::EnterInterpreterFromEntryPoint(art::Thread*, art::DexFile::CodeItem const*, art::ShadowFrame*)+128)
+ #50 pc 00667c73 /system/lib/libart.so (artQuickToInterpreterBridge+808)
+ #51 pc 0013d98d /system/lib/libart.so (art_quick_to_interpreter_bridge+77)
+ #52 pc 7264bc5b /data/dalvik-cache/x86/system@framework@boot.oat (offset 0x1eb2000)
+'''
+
+_EXPECTED_BACKTRACE_OFFSETS_MAP = {
+ '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libprotobuf_lite.cr.so':
+ set([
+ 0x1c000 + 0x111a9,
+ 0x1c000 + 0x13228,
+ 0x1c000 + 0x131de,
+ ]),
+
+ '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libchrome.cr.so':
+ set([
+ 0x90e000 + 0x7cd236,
+ 0x90e000 + 0x7cd2d8,
+ 0x90e000 + 0x7cd956,
+ 0x90e000 + 0x7c2d4a,
+ 0x90e000 + 0x9fc9f1,
+ 0x90e000 + 0x9fc8ea,
+ 0x90e000 + 0x561c63,
+ 0x90e000 + 0x106fbdb,
+ 0x90e000 + 0x4d7371,
+ 0x90e000 + 0x4d8159,
+ 0x90e000 + 0x4d7b96,
+ 0x90e000 + 0x4da4b6,
+ 0x90e000 + 0xcae8,
+ ]),
+ '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libcontent.cr.so':
+ set([
+ 0xc2d000 + 0x5ab66c,
+ 0xc2d000 + 0x5afca2,
+ 0xc2d000 + 0xce864f,
+ 0xc2d000 + 0xce8dfa,
+ 0xc2d000 + 0xce74c6,
+ 0xc2d000 + 0xce8215,
+ ]),
+ '/data/app/com.google.android.apps.chrome-2/base.apk!lib/libembedder.cr.so':
+ set([
+ 0x28000 + 0x4616,
+ ])
+}
+
+# pylint: enable=line-too-long
+
+_ONE_MB = 1024 * 1024
+_TEST_SYMBOL_DATA = {
+ # Regular symbols
+ 0: 'mock_sym_for_addr_0 [mock_src/libmock1.so.c:0]',
+ 0x1000: 'mock_sym_for_addr_4096 [mock_src/libmock1.so.c:4096]',
+
+ # Symbols without source file path.
+ _ONE_MB: 'mock_sym_for_addr_1048576 [??:0]',
+ _ONE_MB + 0x8234: 'mock_sym_for_addr_1081908 [??:0]',
+
+ # Unknown symbol.
+ 2 * _ONE_MB: '?? [??:0]',
+
+ # Inlined symbol.
+ 3 * _ONE_MB:
+ 'mock_sym_for_addr_3145728_inner [mock_src/libmock1.so.c:3145728]',
+}
+
+@contextlib.contextmanager
+def _TempDir():
+ dirname = tempfile.mkdtemp()
+ try:
+ yield dirname
+ finally:
+ shutil.rmtree(dirname)
+
+
+def _TouchFile(path):
+ # Create parent directories.
+ try:
+ os.makedirs(os.path.dirname(path))
+ except OSError:
+ pass
+ with open(path, 'a'):
+ os.utime(path, None)
+
+class MockApkTranslator(object):
+ """A mock ApkLibraryPathTranslator object used for testing."""
+
+ # Regex that matches the content of APK native library map files generated
+ # with apk_lib_dump.py.
+ _RE_MAP_FILE = re.compile(
+ r'0x(?P<file_start>[0-9a-f]+)\s+' +
+ r'0x(?P<file_end>[0-9a-f]+)\s+' +
+ r'0x(?P<file_size>[0-9a-f]+)\s+' +
+ r'0x(?P<lib_path>[0-9a-f]+)\s+')
+
+ def __init__(self, test_apk_libs=None):
+ """Initialize instance.
+
+ Args:
+ test_apk_libs: Optional list of (file_start, file_end, size, lib_path)
+ tuples, like _TEST_APK_LIBS for example. This will be used to
+ implement TranslatePath().
+ """
+ self._apk_libs = []
+ if test_apk_libs:
+ self._AddLibEntries(test_apk_libs)
+
+ def _AddLibEntries(self, entries):
+ self._apk_libs = sorted(self._apk_libs + entries,
+ lambda x, y: cmp(x[0], y[0]))
+
+ def ReadMapFile(self, file_path):
+ """Read an .apk.native-libs file that was produced with apk_lib_dump.py.
+
+ Args:
+ file_path: input path to .apk.native-libs file. Its format is
+ essentially: 0x<start> 0x<end> 0x<size> <library-path>
+ """
+ new_libs = []
+ with open(file_path) as f:
+ for line in f.readlines():
+ m = MockApkTranslator._RE_MAP_FILE.match(line)
+ if m:
+ file_start = int(m.group('file_start'), 16)
+ file_end = int(m.group('file_end'), 16)
+ file_size = int(m.group('file_size'), 16)
+ lib_path = m.group('lib_path')
+ # Sanity check
+ if file_start + file_size != file_end:
+ logging.warning('%s: Inconsistent (start, end, size) values '
+ '(0x%x, 0x%x, 0x%x)',
+ file_path, file_start, file_end, file_size)
+ else:
+ new_libs.append((file_start, file_end, file_size, lib_path))
+
+ self._AddLibEntries(new_libs)
+
+ def TranslatePath(self, lib_path, lib_offset):
+ """Translate an APK file path + offset into a library path + offset."""
+ min_pos = 0
+ max_pos = len(self._apk_libs)
+ while min_pos < max_pos:
+ mid_pos = (min_pos + max_pos) / 2
+ mid_entry = self._apk_libs[mid_pos]
+ mid_offset = mid_entry[0]
+ mid_size = mid_entry[2]
+ if lib_offset < mid_offset:
+ max_pos = mid_pos
+ elif lib_offset >= mid_offset + mid_size:
+ min_pos = mid_pos + 1
+ else:
+ # Found it
+ new_path = '%s!lib/%s' % (lib_path, mid_entry[3])
+ new_offset = lib_offset - mid_offset
+ return (new_path, new_offset)
+
+ return lib_path, lib_offset
+
+
+class HostLibraryFinderTest(unittest.TestCase):
+
+ def testEmpty(self):
+ finder = symbol_utils.HostLibraryFinder()
+ self.assertIsNone(finder.Find('/data/data/com.example.app-1/lib/libfoo.so'))
+ self.assertIsNone(
+ finder.Find('/data/data/com.example.app-1/base.apk!lib/libfoo.so'))
+
+
+ def testSimpleDirectory(self):
+ finder = symbol_utils.HostLibraryFinder()
+ with _TempDir() as tmp_dir:
+ host_libfoo_path = os.path.join(tmp_dir, 'libfoo.so')
+ host_libbar_path = os.path.join(tmp_dir, 'libbar.so')
+ _TouchFile(host_libfoo_path)
+ _TouchFile(host_libbar_path)
+
+ finder.AddSearchDir(tmp_dir)
+
+ # Regular library path (extracted at installation by the PackageManager).
+ # Note that the extraction path has changed between Android releases,
+ # i.e. it can be /data/app/, /data/data/ or /data/app-lib/ depending
+ # on the system.
+ self.assertEqual(
+ host_libfoo_path,
+ finder.Find('/data/app-lib/com.example.app-1/lib/libfoo.so'))
+
+ # Verify that the path doesn't really matter
+ self.assertEqual(
+ host_libfoo_path,
+ finder.Find('/whatever/what.apk!lib/libfoo.so'))
+
+ self.assertEqual(
+ host_libbar_path,
+ finder.Find('/data/data/com.example.app-1/lib/libbar.so'))
+
+ self.assertIsNone(
+ finder.Find('/data/data/com.example.app-1/lib/libunknown.so'))
+
+
+ def testMultipleDirectories(self):
+ with _TempDir() as tmp_dir:
+ # Create the following files:
+ # <tmp_dir>/aaa/
+ # libfoo.so
+ # <tmp_dir>/bbb/
+ # libbar.so
+ # libfoo.so (this one should never be seen because 'aaa'
+ # shall be first in the search path list).
+ #
+ aaa_dir = os.path.join(tmp_dir, 'aaa')
+ bbb_dir = os.path.join(tmp_dir, 'bbb')
+ os.makedirs(aaa_dir)
+ os.makedirs(bbb_dir)
+
+ host_libfoo_path = os.path.join(aaa_dir, 'libfoo.so')
+ host_libbar_path = os.path.join(bbb_dir, 'libbar.so')
+ host_libfoo2_path = os.path.join(bbb_dir, 'libfoo.so')
+
+ _TouchFile(host_libfoo_path)
+ _TouchFile(host_libbar_path)
+ _TouchFile(host_libfoo2_path)
+
+ finder = symbol_utils.HostLibraryFinder()
+ finder.AddSearchDir(aaa_dir)
+ finder.AddSearchDir(bbb_dir)
+
+ self.assertEqual(
+ host_libfoo_path,
+ finder.Find('/data/data/com.example.app-1/lib/libfoo.so'))
+
+ self.assertEqual(
+ host_libfoo_path,
+ finder.Find('/data/whatever/base.apk!lib/libfoo.so'))
+
+ self.assertEqual(
+ host_libbar_path,
+ finder.Find('/data/data/com.example.app-1/lib/libbar.so'))
+
+ self.assertIsNone(
+ finder.Find('/data/data/com.example.app-1/lib/libunknown.so'))
+
+
+class ElfSymbolResolverTest(unittest.TestCase):
+
+ def testCreation(self):
+ resolver = symbol_utils.ElfSymbolResolver(
+ addr2line_path_for_tests=_MOCK_A2L_PATH)
+ self.assertTrue(resolver)
+
+ def testWithSimpleOffsets(self):
+ resolver = symbol_utils.ElfSymbolResolver(
+ addr2line_path_for_tests=_MOCK_A2L_PATH)
+ resolver.SetAndroidAbi('ignored-abi')
+
+ for addr, expected_sym in _TEST_SYMBOL_DATA.iteritems():
+ self.assertEqual(resolver.FindSymbolInfo('/some/path/libmock1.so', addr),
+ expected_sym)
+
+ def testWithPreResolvedSymbols(self):
+ resolver = symbol_utils.ElfSymbolResolver(
+ addr2line_path_for_tests=_MOCK_A2L_PATH)
+ resolver.SetAndroidAbi('ignored-abi')
+ resolver.AddLibraryOffsets('/some/path/libmock1.so',
+ _TEST_SYMBOL_DATA.keys())
+
+ resolver.DisallowSymbolizerForTesting()
+
+ for addr, expected_sym in _TEST_SYMBOL_DATA.iteritems():
+ sym_info = resolver.FindSymbolInfo('/some/path/libmock1.so', addr)
+ self.assertIsNotNone(sym_info, 'None symbol info for addr %x' % addr)
+ self.assertEqual(
+ sym_info, expected_sym,
+ 'Invalid symbol info for addr %x [%s] expected [%s]' % (
+ addr, sym_info, expected_sym))
+
+
+class MemoryMapTest(unittest.TestCase):
+
+ def testCreation(self):
+ mem_map = symbol_utils.MemoryMap('test-abi32')
+ self.assertIsNone(mem_map.FindSectionForAddress(0))
+
+ def testParseLines(self):
+ mem_map = symbol_utils.MemoryMap('test-abi32')
+ mem_map.ParseLines(_TEST_MEMORY_MAP.splitlines())
+ for exp_addr, exp_size, exp_path, exp_offset in _TEST_MEMORY_MAP_SECTIONS:
+ text = '(addr:%x, size:%x, path:%s, offset=%x)' % (
+ exp_addr, exp_size, exp_path, exp_offset)
+
+ t = mem_map.FindSectionForAddress(exp_addr)
+ self.assertTrue(t, 'Could not find %s' % text)
+ self.assertEqual(t.address, exp_addr)
+ self.assertEqual(t.size, exp_size)
+ self.assertEqual(t.offset, exp_offset)
+ self.assertEqual(t.path, exp_path)
+
+ def testTranslateLine(self):
+ android_abi = 'test-abi'
+ apk_translator = MockApkTranslator(_TEST_APK_LIBS)
+ mem_map = symbol_utils.MemoryMap(android_abi)
+ for line, expected_line in zip(_TEST_MEMORY_MAP.splitlines(),
+ _EXPECTED_TEST_MEMORY_MAP.splitlines()):
+ self.assertEqual(mem_map.TranslateLine(line, apk_translator),
+ expected_line)
+
+class StackTranslatorTest(unittest.TestCase):
+
+ def testSimpleStack(self):
+ android_abi = 'test-abi32'
+ mem_map = symbol_utils.MemoryMap(android_abi)
+ mem_map.ParseLines(_TEST_MEMORY_MAP)
+ apk_translator = MockApkTranslator(_TEST_APK_LIBS)
+ stack_translator = symbol_utils.StackTranslator(android_abi, mem_map,
+ apk_translator)
+ input_stack = _TEST_STACK.splitlines()
+ expected_stack = _EXPECTED_STACK.splitlines()
+ self.assertEqual(len(input_stack), len(expected_stack))
+ for stack_line, expected_line in zip(input_stack, expected_stack):
+ new_line = stack_translator.TranslateLine(stack_line)
+ self.assertEqual(new_line, expected_line)
+
+
+class MockSymbolResolver(symbol_utils.SymbolResolver):
+
+ # A regex matching a symbol definition as it appears in a test symbol file.
+ # Format is: <hex-offset> <whitespace> <symbol-string>
+ _RE_SYMBOL_DEFINITION = re.compile(
+ r'(?P<offset>[0-9a-f]+)\s+(?P<symbol>.*)')
+
+ def __init__(self):
+ super(MockSymbolResolver, self).__init__()
+ self._map = collections.defaultdict(dict)
+
+ def AddTestLibrarySymbols(self, lib_name, offsets_map):
+ """Add a new test entry for a given library name.
+
+ Args:
+ lib_name: Library name (e.g. 'libfoo.so')
+ offsets_map: A mapping from offsets to symbol info strings.
+ """
+ self._map[lib_name] = offsets_map
+
+ def ReadTestFile(self, file_path, lib_name):
+ """Read a single test symbol file, matching a given library.
+
+ Args:
+ file_path: Input file path.
+ lib_name: Library name these symbols correspond to (e.g. 'libfoo.so')
+ """
+ with open(file_path) as f:
+ for line in f.readlines():
+ line = line.rstrip()
+ m = MockSymbolResolver._RE_SYMBOL_DEFINITION.match(line)
+ if m:
+ offset = int(m.group('offset'))
+ symbol = m.group('symbol')
+ self._map[lib_name][offset] = symbol
+
+ def ReadTestFilesInDir(self, dir_path, file_suffix):
+ """Read all symbol test files in a given directory.
+
+ Args:
+ dir_path: Directory path.
+ file_suffix: File suffix used to detect test symbol files.
+ """
+ for filename in os.listdir(dir_path):
+ if filename.endswith(file_suffix):
+ lib_name = filename[:-len(file_suffix)]
+ self.ReadTestFile(os.path.join(dir_path, filename), lib_name)
+
+ def FindSymbolInfo(self, device_path, device_offset):
+ """Implement SymbolResolver.FindSymbolInfo."""
+ lib_name = os.path.basename(device_path)
+ offsets = self._map.get(lib_name)
+ if not offsets:
+ return None
+
+ return offsets.get(device_offset)
+
+
+class BacktraceTranslatorTest(unittest.TestCase):
+
+ def testEmpty(self):
+ android_abi = 'test-abi'
+ apk_translator = MockApkTranslator()
+ backtrace_translator = symbol_utils.BacktraceTranslator(android_abi,
+ apk_translator)
+ self.assertTrue(backtrace_translator)
+
+ def testFindLibraryOffsets(self):
+ android_abi = 'test-abi'
+ apk_translator = MockApkTranslator(_TEST_APK_LIBS)
+ backtrace_translator = symbol_utils.BacktraceTranslator(android_abi,
+ apk_translator)
+ input_backtrace = _EXPECTED_BACKTRACE.splitlines()
+ expected_lib_offsets_map = _EXPECTED_BACKTRACE_OFFSETS_MAP
+ offset_map = backtrace_translator.FindLibraryOffsets(input_backtrace)
+ for lib_path, offsets in offset_map.iteritems():
+ self.assertTrue(lib_path in expected_lib_offsets_map,
+ '%s is not in expected library-offsets map!' % lib_path)
+ sorted_offsets = sorted(offsets)
+ sorted_expected_offsets = sorted(expected_lib_offsets_map[lib_path])
+ self.assertEqual(sorted_offsets, sorted_expected_offsets,
+ '%s has invalid offsets %s expected %s' % (
+ lib_path, sorted_offsets, sorted_expected_offsets))
+
+ def testTranslateLine(self):
+ android_abi = 'test-abi'
+ apk_translator = MockApkTranslator(_TEST_APK_LIBS)
+ backtrace_translator = symbol_utils.BacktraceTranslator(android_abi,
+ apk_translator)
+ input_backtrace = _TEST_BACKTRACE.splitlines()
+ expected_backtrace = _EXPECTED_BACKTRACE.splitlines()
+ self.assertEqual(len(input_backtrace), len(expected_backtrace))
+ for trace_line, expected_line in zip(input_backtrace, expected_backtrace):
+ line = backtrace_translator.TranslateLine(trace_line,
+ MockSymbolResolver())
+ self.assertEqual(line, expected_line)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/utils/__init__.py b/deps/v8/build/android/pylib/utils/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/__init__.py
diff --git a/deps/v8/build/android/pylib/utils/app_bundle_utils.py b/deps/v8/build/android/pylib/utils/app_bundle_utils.py
new file mode 100644
index 0000000000..2098f4f35d
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/app_bundle_utils.py
@@ -0,0 +1,140 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import logging
+import os
+import re
+import sys
+import tempfile
+import zipfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'gyp'))
+
+from util import build_utils
+from util import md5_check
+from util import resource_utils
+import bundletool
+
+# List of valid modes for GenerateBundleApks()
+BUILD_APKS_MODES = ('default', 'universal', 'system', 'system_compressed')
+_SYSTEM_MODES = ('system_compressed', 'system')
+
+_ALL_ABIS = ['armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64']
+
+
+def _CreateDeviceSpec(bundle_path, sdk_version, locales):
+ # Could also use "bundletool dump resources", but reading directly is faster.
+ if not sdk_version:
+ with zipfile.ZipFile(bundle_path) as f:
+ manifest_data = f.read('base/manifest/AndroidManifest.xml')
+ sdk_version = int(
+ re.search(r'minSdkVersion.*?(\d+)', manifest_data).group(1))
+
+ # Setting sdkVersion=minSdkVersion prevents multiple per-minSdkVersion .apk
+ # files from being created within the .apks file.
+ return {
+ 'screenDensity': 1000, # Ignored since we don't split on density.
+ 'sdkVersion': sdk_version,
+ 'supportedAbis': _ALL_ABIS, # Our .aab files are already split on abi.
+ 'supportedLocales': locales,
+ }
+
+
+def GenerateBundleApks(bundle_path,
+ bundle_apks_path,
+ aapt2_path,
+ keystore_path,
+ keystore_password,
+ keystore_alias,
+ mode=None,
+ minimal=False,
+ minimal_sdk_version=None,
+ check_for_noop=True,
+ system_image_locales=None):
+ """Generate an .apks archive from a an app bundle if needed.
+
+ Args:
+ bundle_path: Input bundle file path.
+ bundle_apks_path: Output bundle .apks archive path. Name must end with
+ '.apks' or this operation will fail.
+ aapt2_path: Path to aapt2 build tool.
+ keystore_path: Path to keystore.
+ keystore_password: Keystore password, as a string.
+ keystore_alias: Keystore signing key alias.
+ mode: Build mode, which must be either None or one of BUILD_APKS_MODES.
+ minimal: Create the minimal set of apks possible (english-only).
+ minimal_sdk_version: Use this sdkVersion when |minimal| or
+ |system_image_locales| args are present.
+ check_for_noop: Use md5_check to short-circuit when inputs have not changed.
+ system_image_locales: Locales to package in the APK when mode is "system"
+ or "system_compressed".
+ """
+ device_spec = None
+ if minimal:
+ # Measure with one language split installed. Use Hindi because it is
+ # popular. resource_size.py looks for splits/base-hi.apk.
+ # Note: English is always included since it's in base-master.apk.
+ device_spec = _CreateDeviceSpec(bundle_path, minimal_sdk_version, ['hi'])
+ elif mode in _SYSTEM_MODES:
+ if not system_image_locales:
+ raise Exception('system modes require system_image_locales')
+ # Bundletool doesn't seem to understand device specs with locales in the
+ # form of "<lang>-r<region>", so just provide the language code instead.
+ locales = [
+ resource_utils.ToAndroidLocaleName(l).split('-')[0]
+ for l in system_image_locales
+ ]
+ device_spec = _CreateDeviceSpec(bundle_path, minimal_sdk_version, locales)
+
+ def rebuild():
+ logging.info('Building %s', bundle_apks_path)
+ with tempfile.NamedTemporaryFile(suffix='.json') as spec_file, \
+ build_utils.AtomicOutput(bundle_apks_path, only_if_changed=False) as f:
+ cmd_args = [
+ 'build-apks',
+ '--aapt2=%s' % aapt2_path,
+ '--output=%s' % f.name,
+ '--bundle=%s' % bundle_path,
+ '--ks=%s' % keystore_path,
+ '--ks-pass=pass:%s' % keystore_password,
+ '--ks-key-alias=%s' % keystore_alias,
+ '--overwrite',
+ ]
+ if device_spec:
+ json.dump(device_spec, spec_file)
+ spec_file.flush()
+ cmd_args += ['--device-spec=' + spec_file.name]
+ if mode is not None:
+ if mode not in BUILD_APKS_MODES:
+ raise Exception('Invalid mode parameter %s (should be in %s)' %
+ (mode, BUILD_APKS_MODES))
+ cmd_args += ['--mode=' + mode]
+ bundletool.RunBundleTool(cmd_args)
+
+ if check_for_noop:
+ # NOTE: BUNDLETOOL_JAR_PATH is added to input_strings, rather than
+ # input_paths, to speed up MD5 computations by about 400ms (the .jar file
+ # contains thousands of class files which are checked independently,
+ # resulting in an .md5.stamp of more than 60000 lines!).
+ input_paths = [bundle_path, aapt2_path, keystore_path]
+ input_strings = [
+ keystore_password,
+ keystore_alias,
+ bundletool.BUNDLETOOL_JAR_PATH,
+ # NOTE: BUNDLETOOL_VERSION is already part of BUNDLETOOL_JAR_PATH, but
+ # it's simpler to assume that this may not be the case in the future.
+ bundletool.BUNDLETOOL_VERSION,
+ device_spec,
+ ]
+ if mode is not None:
+ input_strings.append(mode)
+
+ md5_check.CallAndRecordIfStale(
+ rebuild,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=[bundle_apks_path])
+ else:
+ rebuild()
diff --git a/deps/v8/build/android/pylib/utils/argparse_utils.py b/deps/v8/build/android/pylib/utils/argparse_utils.py
new file mode 100644
index 0000000000..e456d9ddab
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/argparse_utils.py
@@ -0,0 +1,50 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+
+
+class CustomHelpAction(argparse.Action):
+ '''Allows defining custom help actions.
+
+ Help actions can run even when the parser would otherwise fail on missing
+ arguments. The first help or custom help command mentioned on the command
+ line will have its help text displayed.
+
+ Usage:
+ parser = argparse.ArgumentParser(...)
+ CustomHelpAction.EnableFor(parser)
+ parser.add_argument('--foo-help',
+ action='custom_help',
+ custom_help_text='this is the help message',
+ help='What this helps with')
+ '''
+ # Derived from argparse._HelpAction from
+ # https://github.com/python/cpython/blob/master/Lib/argparse.py
+
+ # pylint: disable=redefined-builtin
+ # (complains about 'help' being redefined)
+ def __init__(self,
+ option_strings,
+ dest=argparse.SUPPRESS,
+ default=argparse.SUPPRESS,
+ custom_help_text=None,
+ help=None):
+ super(CustomHelpAction, self).__init__(option_strings=option_strings,
+ dest=dest,
+ default=default,
+ nargs=0,
+ help=help)
+
+ if not custom_help_text:
+ raise ValueError('custom_help_text is required')
+ self._help_text = custom_help_text
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ print self._help_text
+ parser.exit()
+
+ @staticmethod
+ def EnableFor(parser):
+ parser.register('action', 'custom_help', CustomHelpAction)
diff --git a/deps/v8/build/android/pylib/utils/decorators.py b/deps/v8/build/android/pylib/utils/decorators.py
new file mode 100644
index 0000000000..8eec1d1e58
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/decorators.py
@@ -0,0 +1,37 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import functools
+import logging
+
+
+def Memoize(f):
+ """Decorator to cache return values of function."""
+ memoize_dict = {}
+ @functools.wraps(f)
+ def wrapper(*args, **kwargs):
+ key = repr((args, kwargs))
+ if key not in memoize_dict:
+ memoize_dict[key] = f(*args, **kwargs)
+ return memoize_dict[key]
+ return wrapper
+
+
+def NoRaiseException(default_return_value=None, exception_message=''):
+ """Returns decorator that catches and logs uncaught Exceptions.
+
+ Args:
+ default_return_value: Value to return in the case of uncaught Exception.
+ exception_message: Message for uncaught exceptions.
+ """
+ def decorator(f):
+ @functools.wraps(f)
+ def wrapper(*args, **kwargs):
+ try:
+ return f(*args, **kwargs)
+ except Exception: # pylint: disable=broad-except
+ logging.exception(exception_message)
+ return default_return_value
+ return wrapper
+ return decorator
diff --git a/deps/v8/build/android/pylib/utils/decorators_test.py b/deps/v8/build/android/pylib/utils/decorators_test.py
new file mode 100755
index 0000000000..60f4811b4f
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/decorators_test.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for decorators.py."""
+
+import unittest
+
+from pylib.utils import decorators
+
+
+class NoRaiseExceptionDecoratorTest(unittest.TestCase):
+
+ def testFunctionDoesNotRaiseException(self):
+ """Tests that the |NoRaiseException| decorator catches exception."""
+
+ @decorators.NoRaiseException()
+ def raiseException():
+ raise Exception()
+
+ try:
+ raiseException()
+ except Exception: # pylint: disable=broad-except
+ self.fail('Exception was not caught by |NoRaiseException| decorator')
+
+ def testFunctionReturnsCorrectValues(self):
+ """Tests that the |NoRaiseException| decorator returns correct values."""
+
+ @decorators.NoRaiseException(default_return_value=111)
+ def raiseException():
+ raise Exception()
+
+ @decorators.NoRaiseException(default_return_value=111)
+ def doesNotRaiseException():
+ return 999
+
+ self.assertEquals(raiseException(), 111)
+ self.assertEquals(doesNotRaiseException(), 999)
+
+
+class MemoizeDecoratorTest(unittest.TestCase):
+
+ def testFunctionExceptionNotMemoized(self):
+ """Tests that |Memoize| decorator does not cache exception results."""
+
+ class ExceptionType1(Exception):
+ pass
+
+ class ExceptionType2(Exception):
+ pass
+
+ @decorators.Memoize
+ def raiseExceptions():
+ if raiseExceptions.count == 0:
+ raiseExceptions.count += 1
+ raise ExceptionType1()
+
+ if raiseExceptions.count == 1:
+ raise ExceptionType2()
+ raiseExceptions.count = 0
+
+ with self.assertRaises(ExceptionType1):
+ raiseExceptions()
+ with self.assertRaises(ExceptionType2):
+ raiseExceptions()
+
+ def testFunctionResultMemoized(self):
+ """Tests that |Memoize| decorator caches results."""
+
+ @decorators.Memoize
+ def memoized():
+ memoized.count += 1
+ return memoized.count
+ memoized.count = 0
+
+ def notMemoized():
+ notMemoized.count += 1
+ return notMemoized.count
+ notMemoized.count = 0
+
+ self.assertEquals(memoized(), 1)
+ self.assertEquals(memoized(), 1)
+ self.assertEquals(memoized(), 1)
+
+ self.assertEquals(notMemoized(), 1)
+ self.assertEquals(notMemoized(), 2)
+ self.assertEquals(notMemoized(), 3)
+
+ def testFunctionMemoizedBasedOnArgs(self):
+ """Tests that |Memoize| caches results based on args and kwargs."""
+
+ @decorators.Memoize
+ def returnValueBasedOnArgsKwargs(a, k=0):
+ return a + k
+
+ self.assertEquals(returnValueBasedOnArgsKwargs(1, 1), 2)
+ self.assertEquals(returnValueBasedOnArgsKwargs(1, 2), 3)
+ self.assertEquals(returnValueBasedOnArgsKwargs(2, 1), 3)
+ self.assertEquals(returnValueBasedOnArgsKwargs(3, 3), 6)
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/deps/v8/build/android/pylib/utils/device_dependencies.py b/deps/v8/build/android/pylib/utils/device_dependencies.py
new file mode 100644
index 0000000000..bccc1c37a6
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/device_dependencies.py
@@ -0,0 +1,117 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+
+from pylib import constants
+
+
+_BLACKLIST = [
+ re.compile(r'.*OWNERS'), # Should never be included.
+ re.compile(r'.*\.crx'), # Chrome extension zip files.
+ re.compile(r'.*\.so'), # Libraries packed into .apk.
+ re.compile(r'.*Mojo.*manifest\.json'), # Some source_set()s pull these in.
+ re.compile(r'.*\.py'), # Some test_support targets include python deps.
+ re.compile(r'.*\.stamp'), # Stamp files should never be included.
+ re.compile(r'.*\.apk'), # Should be installed separately.
+ re.compile(r'.*lib.java/.*'), # Never need java intermediates.
+
+ # Chrome external extensions config file.
+ re.compile(r'.*external_extensions\.json'),
+
+ # Exists just to test the compile, not to be run.
+ re.compile(r'.*jni_generator_tests'),
+
+ # v8's blobs and icu data get packaged into APKs.
+ re.compile(r'.*natives_blob.*\.bin'),
+ re.compile(r'.*snapshot_blob.*\.bin'),
+ re.compile(r'.*icudtl.bin'),
+
+ # Scripts that are needed by swarming, but not on devices:
+ re.compile(r'.*llvm-symbolizer'),
+ re.compile(r'.*md5sum_bin'),
+ re.compile(os.path.join('.*', 'development', 'scripts', 'stack')),
+]
+
+
+def _FilterDataDeps(abs_host_files):
+ blacklist = _BLACKLIST + [
+ re.compile(os.path.join(constants.GetOutDirectory(), 'bin'))]
+ return [p for p in abs_host_files
+ if not any(r.match(p) for r in blacklist)]
+
+
+def DevicePathComponentsFor(host_path, output_directory):
+ """Returns the device path components for a given host path.
+
+ This returns the device path as a list of joinable path components,
+ with None as the first element to indicate that the path should be
+ rooted at $EXTERNAL_STORAGE.
+
+ e.g., given
+
+ '$CHROMIUM_SRC/foo/bar/baz.txt'
+
+ this would return
+
+ [None, 'foo', 'bar', 'baz.txt']
+
+ This handles a couple classes of paths differently than it otherwise would:
+ - All .pak files get mapped to top-level paks/
+ - Anything in the output directory gets mapped relative to the output
+ directory rather than the source directory.
+
+ e.g. given
+
+ '$CHROMIUM_SRC/out/Release/icu_fake_dir/icudtl.dat'
+
+ this would return
+
+ [None, 'icu_fake_dir', 'icudtl.dat']
+
+ Args:
+ host_path: The absolute path to the host file.
+ Returns:
+ A list of device path components.
+ """
+ if host_path.startswith(output_directory):
+ if os.path.splitext(host_path)[1] == '.pak':
+ return [None, 'paks', os.path.basename(host_path)]
+ rel_host_path = os.path.relpath(host_path, output_directory)
+ else:
+ rel_host_path = os.path.relpath(host_path, constants.DIR_SOURCE_ROOT)
+
+ device_path_components = [None]
+ p = rel_host_path
+ while p:
+ p, d = os.path.split(p)
+ if d:
+ device_path_components.insert(1, d)
+ return device_path_components
+
+
+def GetDataDependencies(runtime_deps_path):
+ """Returns a list of device data dependencies.
+
+ Args:
+ runtime_deps_path: A str path to the .runtime_deps file.
+ Returns:
+ A list of (host_path, device_path) tuples.
+ """
+ if not runtime_deps_path:
+ return []
+
+ with open(runtime_deps_path, 'r') as runtime_deps_file:
+ rel_host_files = [l.strip() for l in runtime_deps_file if l]
+
+ output_directory = constants.GetOutDirectory()
+ abs_host_files = [
+ os.path.abspath(os.path.join(output_directory, r))
+ for r in rel_host_files]
+ filtered_abs_host_files = _FilterDataDeps(abs_host_files)
+ # TODO(crbug.com/752610): Filter out host executables, and investigate
+ # whether other files could be filtered as well.
+ return [(f, DevicePathComponentsFor(f, output_directory))
+ for f in filtered_abs_host_files]
diff --git a/deps/v8/build/android/pylib/utils/device_dependencies_test.py b/deps/v8/build/android/pylib/utils/device_dependencies_test.py
new file mode 100755
index 0000000000..aaa9ebf68a
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/device_dependencies_test.py
@@ -0,0 +1,56 @@
+#! /usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import unittest
+
+from pylib import constants
+from pylib.utils import device_dependencies
+
+
+class DevicePathComponentsForTest(unittest.TestCase):
+
+ def testCheckedInFile(self):
+ test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'foo', 'bar', 'baz.txt')
+ output_directory = os.path.join(
+ constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+ self.assertEquals(
+ [None, 'foo', 'bar', 'baz.txt'],
+ device_dependencies.DevicePathComponentsFor(
+ test_path, output_directory))
+
+ def testOutputDirectoryFile(self):
+ test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
+ 'icudtl.dat')
+ output_directory = os.path.join(
+ constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+ self.assertEquals(
+ [None, 'icudtl.dat'],
+ device_dependencies.DevicePathComponentsFor(
+ test_path, output_directory))
+
+ def testOutputDirectorySubdirFile(self):
+ test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
+ 'test_dir', 'icudtl.dat')
+ output_directory = os.path.join(
+ constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+ self.assertEquals(
+ [None, 'test_dir', 'icudtl.dat'],
+ device_dependencies.DevicePathComponentsFor(
+ test_path, output_directory))
+
+ def testOutputDirectoryPakFile(self):
+ test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
+ 'foo.pak')
+ output_directory = os.path.join(
+ constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+ self.assertEquals(
+ [None, 'paks', 'foo.pak'],
+ device_dependencies.DevicePathComponentsFor(
+ test_path, output_directory))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/utils/dexdump.py b/deps/v8/build/android/pylib/utils/dexdump.py
new file mode 100644
index 0000000000..2bb11ce198
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/dexdump.py
@@ -0,0 +1,115 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import shutil
+import tempfile
+from xml.etree import ElementTree
+
+from devil.utils import cmd_helper
+from pylib import constants
+
+DEXDUMP_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'dexdump')
+
+
+def Dump(apk_path):
+ """Dumps class and method information from a APK into a dict via dexdump.
+
+ Args:
+ apk_path: An absolute path to an APK file to dump.
+ Returns:
+ A dict in the following format:
+ {
+ <package_name>: {
+ 'classes': {
+ <class_name>: {
+ 'methods': [<method_1>, <method_2>]
+ }
+ }
+ }
+ }
+ """
+ # TODO(mikecase): Support multi-dex
+ try:
+ dexfile_dir = tempfile.mkdtemp()
+ # Python zipfile module is unable to unzip APKs.
+ cmd_helper.RunCmd(['unzip', apk_path, 'classes.dex'], cwd=dexfile_dir)
+ dexfile = os.path.join(dexfile_dir, 'classes.dex')
+ output_xml = cmd_helper.GetCmdOutput([DEXDUMP_PATH, '-l', 'xml', dexfile])
+ return _ParseRootNode(ElementTree.fromstring(output_xml))
+ finally:
+ shutil.rmtree(dexfile_dir)
+
+
+def _ParseRootNode(root):
+ """Parses the XML output of dexdump. This output is in the following format.
+
+ This is a subset of the information contained within dexdump output.
+
+ <api>
+ <package name="foo.bar">
+ <class name="Class" extends="foo.bar.SuperClass">
+ <field name="Field">
+ </field>
+ <constructor name="Method">
+ <parameter name="Param" type="int">
+ </parameter>
+ </constructor>
+ <method name="Method">
+ <parameter name="Param" type="int">
+ </parameter>
+ </method>
+ </class>
+ </package>
+ </api>
+ """
+ results = {}
+ for child in root:
+ if child.tag == 'package':
+ package_name = child.attrib['name']
+ parsed_node = _ParsePackageNode(child)
+ if package_name in results:
+ results[package_name]['classes'].update(parsed_node['classes'])
+ else:
+ results[package_name] = parsed_node
+ return results
+
+
+def _ParsePackageNode(package_node):
+ """Parses a <package> node from the dexdump xml output.
+
+ Returns:
+ A dict in the format:
+ {
+ 'classes': {
+ <class_1>: {
+ 'methods': [<method_1>, <method_2>]
+ },
+ <class_2>: {
+ 'methods': [<method_1>, <method_2>]
+ },
+ }
+ }
+ """
+ classes = {}
+ for child in package_node:
+ if child.tag == 'class':
+ classes[child.attrib['name']] = _ParseClassNode(child)
+ return {'classes': classes}
+
+
+def _ParseClassNode(class_node):
+ """Parses a <class> node from the dexdump xml output.
+
+ Returns:
+ A dict in the format:
+ {
+ 'methods': [<method_1>, <method_2>]
+ }
+ """
+ methods = []
+ for child in class_node:
+ if child.tag == 'method':
+ methods.append(child.attrib['name'])
+ return {'methods': methods, 'superclass': class_node.attrib['extends']}
diff --git a/deps/v8/build/android/pylib/utils/dexdump_test.py b/deps/v8/build/android/pylib/utils/dexdump_test.py
new file mode 100755
index 0000000000..6b2c4542f2
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/dexdump_test.py
@@ -0,0 +1,141 @@
+#! /usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+from xml.etree import ElementTree
+
+from pylib.utils import dexdump
+
+# pylint: disable=protected-access
+
+
+class DexdumpXMLParseTest(unittest.TestCase):
+
+ def testParseRootXmlNode(self):
+ example_xml_string = (
+ '<api>'
+ '<package name="com.foo.bar1">'
+ '<class'
+ ' name="Class1"'
+ ' extends="java.lang.Object"'
+ ' abstract="false"'
+ ' static="false"'
+ ' final="true"'
+ ' visibility="public">'
+ '<method'
+ ' name="class1Method1"'
+ ' return="java.lang.String"'
+ ' abstract="false"'
+ ' native="false"'
+ ' synchronized="false"'
+ ' static="false"'
+ ' final="false"'
+ ' visibility="public">'
+ '</method>'
+ '<method'
+ ' name="class1Method2"'
+ ' return="viod"'
+ ' abstract="false"'
+ ' native="false"'
+ ' synchronized="false"'
+ ' static="false"'
+ ' final="false"'
+ ' visibility="public">'
+ '</method>'
+ '</class>'
+ '<class'
+ ' name="Class2"'
+ ' extends="java.lang.Object"'
+ ' abstract="false"'
+ ' static="false"'
+ ' final="true"'
+ ' visibility="public">'
+ '<method'
+ ' name="class2Method1"'
+ ' return="java.lang.String"'
+ ' abstract="false"'
+ ' native="false"'
+ ' synchronized="false"'
+ ' static="false"'
+ ' final="false"'
+ ' visibility="public">'
+ '</method>'
+ '</class>'
+ '</package>'
+ '<package name="com.foo.bar2">'
+ '</package>'
+ '<package name="com.foo.bar3">'
+ '</package>'
+ '</api>')
+
+ actual = dexdump._ParseRootNode(
+ ElementTree.fromstring(example_xml_string))
+
+ expected = {
+ 'com.foo.bar1' : {
+ 'classes': {
+ 'Class1': {
+ 'methods': ['class1Method1', 'class1Method2'],
+ 'superclass': 'java.lang.Object',
+ },
+ 'Class2': {
+ 'methods': ['class2Method1'],
+ 'superclass': 'java.lang.Object',
+ }
+ },
+ },
+ 'com.foo.bar2' : {'classes': {}},
+ 'com.foo.bar3' : {'classes': {}},
+ }
+ self.assertEquals(expected, actual)
+
+ def testParsePackageNode(self):
+ example_xml_string = (
+ '<package name="com.foo.bar">'
+ '<class name="Class1" extends="java.lang.Object">'
+ '</class>'
+ '<class name="Class2" extends="java.lang.Object">'
+ '</class>'
+ '</package>')
+
+
+ actual = dexdump._ParsePackageNode(
+ ElementTree.fromstring(example_xml_string))
+
+ expected = {
+ 'classes': {
+ 'Class1': {
+ 'methods': [],
+ 'superclass': 'java.lang.Object',
+ },
+ 'Class2': {
+ 'methods': [],
+ 'superclass': 'java.lang.Object',
+ },
+ },
+ }
+ self.assertEquals(expected, actual)
+
+ def testParseClassNode(self):
+ example_xml_string = (
+ '<class name="Class1" extends="java.lang.Object">'
+ '<method name="method1">'
+ '</method>'
+ '<method name="method2">'
+ '</method>'
+ '</class>')
+
+ actual = dexdump._ParseClassNode(
+ ElementTree.fromstring(example_xml_string))
+
+ expected = {
+ 'methods': ['method1', 'method2'],
+ 'superclass': 'java.lang.Object',
+ }
+ self.assertEquals(expected, actual)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/utils/google_storage_helper.py b/deps/v8/build/android/pylib/utils/google_storage_helper.py
new file mode 100644
index 0000000000..d184810517
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/google_storage_helper.py
@@ -0,0 +1,126 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions to upload data to Google Storage.
+
+Text data should be streamed to logdog using |logdog_helper| module.
+Due to logdog not having image or HTML viewer, those instead should be uploaded
+to Google Storage directly using this module.
+"""
+
+import logging
+import os
+import sys
+import time
+import urlparse
+
+from pylib.constants import host_paths
+from pylib.utils import decorators
+
+if host_paths.DEVIL_PATH not in sys.path:
+ sys.path.append(host_paths.DEVIL_PATH)
+from devil.utils import cmd_helper
+
+_GSUTIL_PATH = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'third_party', 'catapult',
+ 'third_party', 'gsutil', 'gsutil.py')
+_PUBLIC_URL = 'https://storage.googleapis.com/%s/'
+_AUTHENTICATED_URL = 'https://storage.cloud.google.com/%s/'
+
+
+@decorators.NoRaiseException(default_return_value='')
+def upload(name, filepath, bucket, gs_args=None, command_args=None,
+ content_type=None, authenticated_link=True):
+ """Uploads data to Google Storage.
+
+ Args:
+ name: Name of the file on Google Storage.
+ filepath: Path to file you want to upload.
+ bucket: Bucket to upload file to.
+ content_type: Content type to upload as. If not specified, Google storage
+ will attempt to infer content type from file extension.
+ authenticated_link: Whether to return a link that requires user to
+ authenticate with a Google account. Setting this to false will return
+ a link that does not require user to be signed into Google account but
+ will only work for completely public storage buckets.
+ Returns:
+ Web link to item uploaded to Google Storage bucket.
+ """
+ bucket = _format_bucket_name(bucket)
+
+ gs_path = 'gs://%s/%s' % (bucket, name)
+ logging.info('Uploading %s to %s', filepath, gs_path)
+
+ cmd = [_GSUTIL_PATH, '-q']
+ cmd.extend(gs_args or [])
+ if content_type:
+ cmd.extend(['-h', 'Content-Type:%s' % content_type])
+ cmd.extend(['cp'] + (command_args or []) + [filepath, gs_path])
+
+ cmd_helper.RunCmd(cmd)
+
+ return get_url_link(name, bucket, authenticated_link)
+
+
+@decorators.NoRaiseException(default_return_value='')
+def read_from_link(link):
+ # Note that urlparse returns the path with an initial '/', so we only need to
+ # add one more after the 'gs;'
+ gs_path = 'gs:/%s' % urlparse.urlparse(link).path
+ cmd = [_GSUTIL_PATH, '-q', 'cat', gs_path]
+ return cmd_helper.GetCmdOutput(cmd)
+
+
+@decorators.NoRaiseException(default_return_value=False)
+def exists(name, bucket):
+ bucket = _format_bucket_name(bucket)
+ gs_path = 'gs://%s/%s' % (bucket, name)
+
+ cmd = [_GSUTIL_PATH, '-q', 'stat', gs_path]
+ return_code = cmd_helper.RunCmd(cmd)
+ return return_code == 0
+
+
+# TODO(jbudorick): Delete this function. Only one user of it.
+def unique_name(basename, suffix='', timestamp=True, device=None):
+ """Helper function for creating a unique name for a file to store in GS.
+
+ Args:
+ basename: Base of the unique filename.
+ suffix: Suffix of filename.
+ timestamp: Whether or not to add a timestamp to name.
+ device: Device to add device serial of to name.
+ """
+ return '%s%s%s%s' % (
+ basename,
+ '_%s' % time.strftime('%Y_%m_%d_T%H_%M_%S-UTC', time.gmtime())
+ if timestamp else '',
+ '_%s' % device.serial if device else '',
+ suffix)
+
+
+def get_url_link(name, bucket, authenticated_link=True):
+ """Get url link before/without uploading.
+
+ Args:
+ name: Name of the file on Google Storage.
+ bucket: Bucket to upload file to.
+ authenticated_link: Whether to return a link that requires user to
+ authenticate with a Google account. Setting this to false will return
+ a link that does not require user to be signed into Google account but
+ will only work for completely public storage buckets.
+ Returns:
+ Web link to item to be uploaded to Google Storage bucket
+ """
+ bucket = _format_bucket_name(bucket)
+ url_template = _AUTHENTICATED_URL if authenticated_link else _PUBLIC_URL
+ return os.path.join(url_template % bucket, name)
+
+
+def _format_bucket_name(bucket):
+ if bucket.startswith('gs://'):
+ bucket = bucket[len('gs://'):]
+ if bucket.endswith('/'):
+ bucket = bucket[:-1]
+ return bucket
diff --git a/deps/v8/build/android/pylib/utils/instrumentation_tracing.py b/deps/v8/build/android/pylib/utils/instrumentation_tracing.py
new file mode 100644
index 0000000000..f1d03a0dcf
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/instrumentation_tracing.py
@@ -0,0 +1,204 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions to instrument all Python function calls.
+
+This generates a JSON file readable by Chrome's about:tracing. To use it,
+either call start_instrumenting and stop_instrumenting at the appropriate times,
+or use the Instrument context manager.
+
+A function is only traced if it is from a Python module that matches at least
+one regular expression object in to_include, and does not match any in
+to_exclude. In between the start and stop events, every function call of a
+function from such a module will be added to the trace.
+"""
+
+import contextlib
+import functools
+import inspect
+import os
+import re
+import sys
+import threading
+
+from py_trace_event import trace_event
+
+
+# Modules to exclude by default (to avoid problems like infinite loops)
+DEFAULT_EXCLUDE = [r'py_trace_event\..*']
+
+class _TraceArguments(object):
+ def __init__(self):
+ """Wraps a dictionary to ensure safe evaluation of repr()."""
+ self._arguments = {}
+
+ @staticmethod
+ def _safeStringify(item):
+ try:
+ item_str = repr(item)
+ except Exception: # pylint: disable=broad-except
+ try:
+ item_str = str(item)
+ except Exception: # pylint: disable=broad-except
+ item_str = "<ERROR>"
+ return item_str
+
+ def add(self, key, val):
+ key_str = _TraceArguments._safeStringify(key)
+ val_str = _TraceArguments._safeStringify(val)
+
+ self._arguments[key_str] = val_str
+
+ def __repr__(self):
+ return repr(self._arguments)
+
+
+saved_thread_ids = set()
+
+def _shouldTrace(frame, to_include, to_exclude, included, excluded):
+ """
+ Decides whether or not the function called in frame should be traced.
+
+ Args:
+ frame: The Python frame object of this function call.
+ to_include: Set of regex objects for modules which should be traced.
+ to_exclude: Set of regex objects for modules which should not be traced.
+ included: Set of module names we've determined should be traced.
+ excluded: Set of module names we've determined should not be traced.
+ """
+ if not inspect.getmodule(frame):
+ return False
+
+ module_name = inspect.getmodule(frame).__name__
+
+ if module_name in included:
+ includes = True
+ elif to_include:
+ includes = any([pattern.match(module_name) for pattern in to_include])
+ else:
+ includes = True
+
+ if includes:
+ included.add(module_name)
+ else:
+ return False
+
+ # Find the modules of every function in the stack trace.
+ frames = inspect.getouterframes(frame)
+ calling_module_names = [inspect.getmodule(fr[0]).__name__ for fr in frames]
+
+ # Return False for anything with an excluded module's function anywhere in the
+ # stack trace (even if the function itself is in an included module).
+ if to_exclude:
+ for calling_module in calling_module_names:
+ if calling_module in excluded:
+ return False
+ for pattern in to_exclude:
+ if pattern.match(calling_module):
+ excluded.add(calling_module)
+ return False
+
+ return True
+
+def _generate_trace_function(to_include, to_exclude):
+ to_include = {re.compile(item) for item in to_include}
+ to_exclude = {re.compile(item) for item in to_exclude}
+ to_exclude.update({re.compile(item) for item in DEFAULT_EXCLUDE})
+
+ included = set()
+ excluded = set()
+
+ tracing_pid = os.getpid()
+
+ def traceFunction(frame, event, arg):
+ del arg
+
+ # Don't try to trace in subprocesses.
+ if os.getpid() != tracing_pid:
+ sys.settrace(None)
+ return None
+
+ # pylint: disable=unused-argument
+ if event not in ("call", "return"):
+ return None
+
+ function_name = frame.f_code.co_name
+ filename = frame.f_code.co_filename
+ line_number = frame.f_lineno
+
+ if _shouldTrace(frame, to_include, to_exclude, included, excluded):
+ if event == "call":
+ # This function is beginning; we save the thread name (if that hasn't
+ # been done), record the Begin event, and return this function to be
+ # used as the local trace function.
+
+ thread_id = threading.current_thread().ident
+
+ if thread_id not in saved_thread_ids:
+ thread_name = threading.current_thread().name
+
+ trace_event.trace_set_thread_name(thread_name)
+
+ saved_thread_ids.add(thread_id)
+
+ arguments = _TraceArguments()
+ # The function's argument values are stored in the frame's
+ # |co_varnames| as the first |co_argcount| elements. (Following that
+ # are local variables.)
+ for idx in range(frame.f_code.co_argcount):
+ arg_name = frame.f_code.co_varnames[idx]
+ arguments.add(arg_name, frame.f_locals[arg_name])
+ trace_event.trace_begin(function_name, arguments=arguments,
+ module=inspect.getmodule(frame).__name__,
+ filename=filename, line_number=line_number)
+
+ # Return this function, so it gets used as the "local trace function"
+ # within this function's frame (and in particular, gets called for this
+ # function's "return" event).
+ return traceFunction
+
+ if event == "return":
+ trace_event.trace_end(function_name)
+ return None
+
+ return traceFunction
+
+
+def no_tracing(f):
+ @functools.wraps(f)
+ def wrapper(*args, **kwargs):
+ trace_func = sys.gettrace()
+ try:
+ sys.settrace(None)
+ threading.settrace(None)
+ return f(*args, **kwargs)
+ finally:
+ sys.settrace(trace_func)
+ threading.settrace(trace_func)
+ return wrapper
+
+
+def start_instrumenting(output_file, to_include=(), to_exclude=()):
+ """Enable tracing of all function calls (from specified modules)."""
+ trace_event.trace_enable(output_file)
+
+ traceFunc = _generate_trace_function(to_include, to_exclude)
+ sys.settrace(traceFunc)
+ threading.settrace(traceFunc)
+
+
+def stop_instrumenting():
+ trace_event.trace_disable()
+
+ sys.settrace(None)
+ threading.settrace(None)
+
+
+@contextlib.contextmanager
+def Instrument(output_file, to_include=(), to_exclude=()):
+ try:
+ start_instrumenting(output_file, to_include, to_exclude)
+ yield None
+ finally:
+ stop_instrumenting()
diff --git a/deps/v8/build/android/pylib/utils/logdog_helper.py b/deps/v8/build/android/pylib/utils/logdog_helper.py
new file mode 100644
index 0000000000..68a7ba57ab
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/logdog_helper.py
@@ -0,0 +1,94 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions to upload data to logdog."""
+
+import logging
+import os
+import sys
+
+from pylib import constants
+from pylib.utils import decorators
+
+sys.path.insert(0, os.path.abspath(os.path.join(
+ constants.DIR_SOURCE_ROOT, 'tools', 'swarming_client')))
+from libs.logdog import bootstrap # pylint: disable=import-error
+
+
+@decorators.NoRaiseException(default_return_value='',
+ exception_message=('Ignore this exception. '
+ 'crbug.com/675666'))
+def text(name, data, content_type=None):
+ """Uploads text to logdog.
+
+ Args:
+ name: Name of the logdog stream.
+ data: String with data you want to upload.
+ content_type: The optional content type of the stream. If None, a
+ default content type will be chosen.
+
+ Returns:
+ Link to view uploaded text in logdog viewer.
+ """
+ logging.info('Writing text to logdog stream, %s', name)
+ with get_logdog_client().text(name, content_type=content_type) as stream:
+ stream.write(data)
+ return stream.get_viewer_url()
+
+
+@decorators.NoRaiseException(default_return_value=None,
+ exception_message=('Ignore this exception. '
+ 'crbug.com/675666'))
+def open_text(name):
+ """Returns a file like object which you can write to.
+
+ Args:
+ name: Name of the logdog stream.
+
+ Returns:
+ A file like object. close() file when done.
+ """
+ logging.info('Opening text logdog stream, %s', name)
+ return get_logdog_client().open_text(name)
+
+
+@decorators.NoRaiseException(default_return_value='',
+ exception_message=('Ignore this exception. '
+ 'crbug.com/675666'))
+def binary(name, binary_path):
+ """Uploads binary to logdog.
+
+ Args:
+ name: Name of the logdog stream.
+ binary_path: Path to binary you want to upload.
+
+ Returns:
+ Link to view uploaded binary in logdog viewer.
+ """
+ logging.info('Writing binary to logdog stream, %s', name)
+ with get_logdog_client().binary(name) as stream:
+ with open(binary_path, 'r') as f:
+ stream.write(f.read())
+ return stream.get_viewer_url()
+
+
+@decorators.NoRaiseException(default_return_value='',
+ exception_message=('Ignore this exception. '
+ 'crbug.com/675666'))
+def get_viewer_url(name):
+ """Get Logdog viewer URL.
+
+ Args:
+ name: Name of the logdog stream.
+
+ Returns:
+ Link to view uploaded binary in logdog viewer.
+ """
+ return get_logdog_client().get_viewer_url(name)
+
+
+@decorators.Memoize
+def get_logdog_client():
+ logging.info('Getting logdog client.')
+ return bootstrap.ButlerBootstrap.probe().stream_client()
diff --git a/deps/v8/build/android/pylib/utils/logging_utils.py b/deps/v8/build/android/pylib/utils/logging_utils.py
new file mode 100644
index 0000000000..9c4eae3fcb
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/logging_utils.py
@@ -0,0 +1,136 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import logging
+import os
+
+from pylib.constants import host_paths
+
+_COLORAMA_PATH = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src')
+
+with host_paths.SysPath(_COLORAMA_PATH, position=0):
+ import colorama
+
+BACK = colorama.Back
+FORE = colorama.Fore
+STYLE = colorama.Style
+
+
+class _ColorFormatter(logging.Formatter):
+ # pylint does not see members added dynamically in the constructor.
+ # pylint: disable=no-member
+ color_map = {
+ logging.DEBUG: (FORE.CYAN),
+ logging.WARNING: (FORE.YELLOW),
+ logging.ERROR: (FORE.RED),
+ logging.CRITICAL: (BACK.RED),
+ }
+
+ def __init__(self, wrapped_formatter=None):
+ """Wraps a |logging.Formatter| and adds color."""
+ super(_ColorFormatter, self).__init__(self)
+ self._wrapped_formatter = wrapped_formatter or logging.Formatter()
+
+ #override
+ def format(self, record):
+ message = self._wrapped_formatter.format(record)
+ return self.Colorize(message, record.levelno)
+
+ def Colorize(self, message, log_level):
+ try:
+ return (''.join(self.color_map[log_level]) + message +
+ colorama.Style.RESET_ALL)
+ except KeyError:
+ return message
+
+
+class ColorStreamHandler(logging.StreamHandler):
+ """Handler that can be used to colorize logging output.
+
+ Example using a specific logger:
+
+ logger = logging.getLogger('my_logger')
+ logger.addHandler(ColorStreamHandler())
+ logger.info('message')
+
+ Example using the root logger:
+
+ ColorStreamHandler.MakeDefault()
+ logging.info('message')
+
+ """
+ def __init__(self, force_color=False):
+ super(ColorStreamHandler, self).__init__()
+ self.force_color = force_color
+ self.setFormatter(logging.Formatter())
+
+ @property
+ def is_tty(self):
+ isatty = getattr(self.stream, 'isatty', None)
+ return isatty and isatty()
+
+ #override
+ def setFormatter(self, formatter):
+ if self.force_color or self.is_tty:
+ formatter = _ColorFormatter(formatter)
+ super(ColorStreamHandler, self).setFormatter(formatter)
+
+ @staticmethod
+ def MakeDefault(force_color=False):
+ """
+ Replaces the default logging handlers with a coloring handler. To use
+ a colorizing handler at the same time as others, either register them
+ after this call, or add the ColorStreamHandler on the logger using
+ Logger.addHandler()
+
+ Args:
+ force_color: Set to True to bypass the tty check and always colorize.
+ """
+ # If the existing handlers aren't removed, messages are duplicated
+ logging.getLogger().handlers = []
+ logging.getLogger().addHandler(ColorStreamHandler(force_color))
+
+
+@contextlib.contextmanager
+def OverrideColor(level, color):
+ """Temporarily override the logging color for a specified level.
+
+ Args:
+ level: logging level whose color gets overridden.
+ color: tuple of formats to apply to log lines.
+ """
+ prev_colors = {}
+ for handler in logging.getLogger().handlers:
+ if isinstance(handler.formatter, _ColorFormatter):
+ prev_colors[handler.formatter] = handler.formatter.color_map[level]
+ handler.formatter.color_map[level] = color
+ try:
+ yield
+ finally:
+ for formatter, prev_color in prev_colors.iteritems():
+ formatter.color_map[level] = prev_color
+
+
+@contextlib.contextmanager
+def SuppressLogging(level=logging.ERROR):
+ """Momentarilly suppress logging events from all loggers.
+
+ TODO(jbudorick): This is not thread safe. Log events from other threads might
+ also inadvertently disappear.
+
+ Example:
+
+ with logging_utils.SuppressLogging():
+ # all but CRITICAL logging messages are suppressed
+ logging.info('just doing some thing') # not shown
+ logging.critical('something really bad happened') # still shown
+
+ Args:
+ level: logging events with this or lower levels are suppressed.
+ """
+ logging.disable(level)
+ yield
+ logging.disable(logging.NOTSET)
diff --git a/deps/v8/build/android/pylib/utils/maven_downloader.py b/deps/v8/build/android/pylib/utils/maven_downloader.py
new file mode 100755
index 0000000000..c60b0140ac
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/maven_downloader.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import errno
+import logging
+import os
+import shutil
+
+from devil.utils import cmd_helper
+from devil.utils import parallelizer
+
+
+def _MakeDirsIfAbsent(path):
+ try:
+ os.makedirs(path)
+ except OSError as err:
+ if err.errno != errno.EEXIST or not os.path.isdir(path):
+ raise
+
+
+class MavenDownloader(object):
+ '''
+ Downloads and installs the requested artifacts from the Google Maven repo.
+ The artifacts are expected to be specified in the format
+ "group_id:artifact_id:version:file_type", as the default file type is JAR
+ but most Android libraries are provided as AARs, which would otherwise fail
+ downloading. See Install()
+ '''
+
+ # Remote repository to download the artifacts from. The support library and
+ # Google Play service are only distributed there, but third party libraries
+ # could use Maven Central or JCenter for example. The default Maven remote
+ # is Maven Central.
+ _REMOTE_REPO = 'https://maven.google.com'
+
+ # Default Maven repository.
+ _DEFAULT_REPO_PATH = os.path.join(
+ os.path.expanduser('~'), '.m2', 'repository')
+
+ def __init__(self, debug=False):
+ self._repo_path = MavenDownloader._DEFAULT_REPO_PATH
+ self._remote_url = MavenDownloader._REMOTE_REPO
+ self._debug = debug
+
+ def Install(self, target_repo, artifacts, include_poms=False):
+ logging.info('Installing %d artifacts...', len(artifacts))
+ downloaders = [_SingleArtifactDownloader(self, artifact, target_repo)
+ for artifact in artifacts]
+ if self._debug:
+ for downloader in downloaders:
+ downloader.Run(include_poms)
+ else:
+ parallelizer.SyncParallelizer(downloaders).Run(include_poms)
+ logging.info('%d artifacts installed to %s', len(artifacts), target_repo)
+
+ @property
+ def repo_path(self):
+ return self._repo_path
+
+ @property
+ def remote_url(self):
+ return self._remote_url
+
+ @property
+ def debug(self):
+ return self._debug
+
+
+class _SingleArtifactDownloader(object):
+ '''Handles downloading and installing a single Maven artifact.'''
+
+ _POM_FILE_TYPE = 'pom'
+
+ def __init__(self, download_manager, artifact, target_repo):
+ self._download_manager = download_manager
+ self._artifact = artifact
+ self._target_repo = target_repo
+
+ def Run(self, include_pom=False):
+ parts = self._artifact.split(':')
+ if len(parts) != 4:
+ raise Exception('Artifacts expected as '
+ '"group_id:artifact_id:version:file_type".')
+ group_id, artifact_id, version, file_type = parts
+ self._InstallArtifact(group_id, artifact_id, version, file_type)
+
+ if include_pom and file_type != _SingleArtifactDownloader._POM_FILE_TYPE:
+ self._InstallArtifact(group_id, artifact_id, version,
+ _SingleArtifactDownloader._POM_FILE_TYPE)
+
+ def _InstallArtifact(self, group_id, artifact_id, version, file_type):
+ logging.debug('Processing %s', self._artifact)
+
+ download_relpath = self._DownloadArtifact(
+ group_id, artifact_id, version, file_type)
+ logging.debug('Downloaded.')
+
+ install_path = self._ImportArtifact(download_relpath)
+ logging.debug('Installed %s', os.path.relpath(install_path))
+
+ def _DownloadArtifact(self, group_id, artifact_id, version, file_type):
+ '''
+ Downloads the specified artifact using maven, to its standard location, see
+ MavenDownloader._DEFAULT_REPO_PATH.
+ '''
+ cmd = ['mvn',
+ 'org.apache.maven.plugins:maven-dependency-plugin:RELEASE:get',
+ '-DremoteRepositories={}'.format(self._download_manager.remote_url),
+ '-Dartifact={}:{}:{}:{}'.format(group_id, artifact_id, version,
+ file_type)]
+
+ stdout = None if self._download_manager.debug else open(os.devnull, 'wb')
+
+ try:
+ ret_code = cmd_helper.Call(cmd, stdout=stdout)
+ if ret_code != 0:
+ raise Exception('Command "{}" failed'.format(' '.join(cmd)))
+ except OSError as e:
+ if e.errno == os.errno.ENOENT:
+ raise Exception('mvn command not found. Please install Maven.')
+ raise
+
+ return os.path.join(os.path.join(*group_id.split('.')),
+ artifact_id,
+ version,
+ '{}-{}.{}'.format(artifact_id, version, file_type))
+
+ def _ImportArtifact(self, artifact_path):
+ src_dir = os.path.join(self._download_manager.repo_path, artifact_path)
+ dst_dir = os.path.join(self._target_repo, os.path.dirname(artifact_path))
+
+ _MakeDirsIfAbsent(dst_dir)
+ shutil.copy(src_dir, dst_dir)
+
+ return dst_dir
diff --git a/deps/v8/build/android/pylib/utils/proguard.py b/deps/v8/build/android/pylib/utils/proguard.py
new file mode 100644
index 0000000000..2d439a52c3
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/proguard.py
@@ -0,0 +1,288 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import tempfile
+
+from devil.utils import cmd_helper
+from pylib import constants
+
+
+_PROGUARD_CLASS_RE = re.compile(r'\s*?- Program class:\s*([\S]+)$')
+_PROGUARD_SUPERCLASS_RE = re.compile(r'\s*? Superclass:\s*([\S]+)$')
+_PROGUARD_SECTION_RE = re.compile(
+ r'^(Interfaces|Constant Pool|Fields|Methods|Class file attributes) '
+ r'\(count = \d+\):$')
+_PROGUARD_METHOD_RE = re.compile(r'\s*?- Method:\s*(\S*)[(].*$')
+_PROGUARD_ANNOTATION_RE = re.compile(r'^(\s*?)- Annotation \[L(\S*);\]:$')
+_ELEMENT_PRIMITIVE = 0
+_ELEMENT_ARRAY = 1
+_ELEMENT_ANNOTATION = 2
+_PROGUARD_ELEMENT_RES = [
+ (_ELEMENT_PRIMITIVE,
+ re.compile(r'^(\s*?)- Constant element value \[(\S*) .*\]$')),
+ (_ELEMENT_ARRAY,
+ re.compile(r'^(\s*?)- Array element value \[(\S*)\]:$')),
+ (_ELEMENT_ANNOTATION,
+ re.compile(r'^(\s*?)- Annotation element value \[(\S*)\]:$'))
+]
+_PROGUARD_INDENT_WIDTH = 2
+_PROGUARD_ANNOTATION_VALUE_RE = re.compile(r'^(\s*?)- \S+? \[(.*)\]$')
+
+
+def _GetProguardPath():
+ # Use the one in lib.java rather than source tree because it is the one that
+ # is added to swarming .isolate files.
+ return os.path.join(
+ constants.GetOutDirectory(), 'lib.java', 'third_party', 'proguard',
+ 'proguard603.jar')
+
+
+def Dump(jar_path):
+ """Dumps class and method information from a JAR into a dict via proguard.
+
+ Args:
+ jar_path: An absolute path to the JAR file to dump.
+ Returns:
+ A dict in the following format:
+ {
+ 'classes': [
+ {
+ 'class': '',
+ 'superclass': '',
+ 'annotations': {/* dict -- see below */},
+ 'methods': [
+ {
+ 'method': '',
+ 'annotations': {/* dict -- see below */},
+ },
+ ...
+ ],
+ },
+ ...
+ ],
+ }
+
+ Annotations dict format:
+ {
+ 'empty-annotation-class-name': None,
+ 'annotation-class-name': {
+ 'field': 'primitive-value',
+ 'field': [ 'array-item-1', 'array-item-2', ... ],
+ 'field': {
+ /* Object value */
+ 'field': 'primitive-value',
+ 'field': [ 'array-item-1', 'array-item-2', ... ],
+ 'field': { /* Object value */ }
+ }
+ }
+ }
+
+ Note that for top-level annotations their class names are used for
+ identification, whereas for any nested annotations the corresponding
+ field names are used.
+
+ One drawback of this approach is that an array containing empty
+ annotation classes will be represented as an array of 'None' values,
+ thus it will not be possible to find out annotation class names.
+ On the other hand, storing both annotation class name and the field name
+ would produce a very complex JSON.
+ """
+
+ with tempfile.NamedTemporaryFile() as proguard_output:
+ cmd_helper.GetCmdStatusAndOutput([
+ 'java',
+ '-jar', _GetProguardPath(),
+ '-injars', jar_path,
+ '-dontshrink', '-dontoptimize', '-dontobfuscate', '-dontpreverify',
+ '-dump', proguard_output.name])
+ return Parse(proguard_output)
+
+class _AnnotationElement(object):
+ def __init__(self, name, ftype, depth):
+ self.ref = None
+ self.name = name
+ self.ftype = ftype
+ self.depth = depth
+
+class _ParseState(object):
+ _INITIAL_VALUES = (lambda: None, list, dict)
+ # Empty annotations are represented as 'None', not as an empty dictionary.
+ _LAZY_INITIAL_VALUES = (lambda: None, list, lambda: None)
+
+ def __init__(self):
+ self._class_result = None
+ self._method_result = None
+ self._parse_annotations = False
+ self._annotation_stack = []
+
+ def ResetPerSection(self, section_name):
+ self.InitMethod(None)
+ self._parse_annotations = (
+ section_name in ['Class file attributes', 'Methods'])
+
+ def ParseAnnotations(self):
+ return self._parse_annotations
+
+ def CreateAndInitClass(self, class_name):
+ self.InitMethod(None)
+ self._class_result = {
+ 'class': class_name,
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [],
+ }
+ return self._class_result
+
+ def HasCurrentClass(self):
+ return bool(self._class_result)
+
+ def SetSuperClass(self, superclass):
+ assert self.HasCurrentClass()
+ self._class_result['superclass'] = superclass
+
+ def InitMethod(self, method_name):
+ self._annotation_stack = []
+ if method_name:
+ self._method_result = {
+ 'method': method_name,
+ 'annotations': {},
+ }
+ self._class_result['methods'].append(self._method_result)
+ else:
+ self._method_result = None
+
+ def InitAnnotation(self, annotation, depth):
+ if not self._annotation_stack:
+ # Add a fake parent element comprising 'annotations' dictionary,
+ # so we can work uniformly with both top-level and nested annotations.
+ annotations = _AnnotationElement(
+ '<<<top level>>>', _ELEMENT_ANNOTATION, depth - 1)
+ if self._method_result:
+ annotations.ref = self._method_result['annotations']
+ else:
+ annotations.ref = self._class_result['annotations']
+ self._annotation_stack = [annotations]
+ self._BacktrackAnnotationStack(depth)
+ if not self.HasCurrentAnnotation():
+ self._annotation_stack.append(
+ _AnnotationElement(annotation, _ELEMENT_ANNOTATION, depth))
+ self._CreateAnnotationPlaceHolder(self._LAZY_INITIAL_VALUES)
+
+ def HasCurrentAnnotation(self):
+ return len(self._annotation_stack) > 1
+
+ def InitAnnotationField(self, field, field_type, depth):
+ self._BacktrackAnnotationStack(depth)
+ # Create the parent representation, if needed. E.g. annotations
+ # are represented with `None`, not with `{}` until they receive the first
+ # field.
+ self._CreateAnnotationPlaceHolder(self._INITIAL_VALUES)
+ if self._annotation_stack[-1].ftype == _ELEMENT_ARRAY:
+ # Nested arrays are not allowed in annotations.
+ assert not field_type == _ELEMENT_ARRAY
+ # Use array index instead of bogus field name.
+ field = len(self._annotation_stack[-1].ref)
+ self._annotation_stack.append(_AnnotationElement(field, field_type, depth))
+ self._CreateAnnotationPlaceHolder(self._LAZY_INITIAL_VALUES)
+
+ def UpdateCurrentAnnotationFieldValue(self, value, depth):
+ self._BacktrackAnnotationStack(depth)
+ self._InitOrUpdateCurrentField(value)
+
+ def _CreateAnnotationPlaceHolder(self, constructors):
+ assert self.HasCurrentAnnotation()
+ field = self._annotation_stack[-1]
+ if field.ref is None:
+ field.ref = constructors[field.ftype]()
+ self._InitOrUpdateCurrentField(field.ref)
+
+ def _BacktrackAnnotationStack(self, depth):
+ stack = self._annotation_stack
+ while len(stack) > 0 and stack[-1].depth >= depth:
+ stack.pop()
+
+ def _InitOrUpdateCurrentField(self, value):
+ assert self.HasCurrentAnnotation()
+ parent = self._annotation_stack[-2]
+ assert not parent.ref is None
+ # There can be no nested constant element values.
+ assert parent.ftype in [_ELEMENT_ARRAY, _ELEMENT_ANNOTATION]
+ field = self._annotation_stack[-1]
+ if isinstance(value, str) and not field.ftype == _ELEMENT_PRIMITIVE:
+ # The value comes from the output parser via
+ # UpdateCurrentAnnotationFieldValue, and should be a value of a constant
+ # element. If it isn't, just skip it.
+ return
+ if parent.ftype == _ELEMENT_ARRAY and field.name >= len(parent.ref):
+ parent.ref.append(value)
+ else:
+ parent.ref[field.name] = value
+
+
+def _GetDepth(prefix):
+ return len(prefix) // _PROGUARD_INDENT_WIDTH
+
+def Parse(proguard_output):
+ results = {
+ 'classes': [],
+ }
+
+ state = _ParseState()
+
+ for line in proguard_output:
+ line = line.strip('\r\n')
+
+ m = _PROGUARD_CLASS_RE.match(line)
+ if m:
+ results['classes'].append(
+ state.CreateAndInitClass(m.group(1).replace('/', '.')))
+ continue
+
+ if not state.HasCurrentClass():
+ continue
+
+ m = _PROGUARD_SUPERCLASS_RE.match(line)
+ if m:
+ state.SetSuperClass(m.group(1).replace('/', '.'))
+ continue
+
+ m = _PROGUARD_SECTION_RE.match(line)
+ if m:
+ state.ResetPerSection(m.group(1))
+ continue
+
+ m = _PROGUARD_METHOD_RE.match(line)
+ if m:
+ state.InitMethod(m.group(1))
+ continue
+
+ if not state.ParseAnnotations():
+ continue
+
+ m = _PROGUARD_ANNOTATION_RE.match(line)
+ if m:
+ # Ignore the annotation package.
+ state.InitAnnotation(m.group(2).split('/')[-1], _GetDepth(m.group(1)))
+ continue
+
+ if state.HasCurrentAnnotation():
+ m = None
+ for (element_type, element_re) in _PROGUARD_ELEMENT_RES:
+ m = element_re.match(line)
+ if m:
+ state.InitAnnotationField(
+ m.group(2), element_type, _GetDepth(m.group(1)))
+ break
+ if m:
+ continue
+ m = _PROGUARD_ANNOTATION_VALUE_RE.match(line)
+ if m:
+ state.UpdateCurrentAnnotationFieldValue(
+ m.group(2), _GetDepth(m.group(1)))
+ else:
+ state.InitMethod(None)
+
+ return results
diff --git a/deps/v8/build/android/pylib/utils/proguard_test.py b/deps/v8/build/android/pylib/utils/proguard_test.py
new file mode 100755
index 0000000000..7672476e0a
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/proguard_test.py
@@ -0,0 +1,495 @@
+#! /usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.utils import proguard
+
+class TestParse(unittest.TestCase):
+
+ def setUp(self):
+ self.maxDiff = None
+
+ def testClass(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ ' Superclass: java/lang/Object'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': 'java.lang.Object',
+ 'annotations': {},
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testMethod(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: <init>()V'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': '<init>',
+ 'annotations': {}
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testClassAnnotation(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Class file attributes (count = 3):',
+ ' - Annotation [Lorg/example/Annotation;]:',
+ ' - Annotation [Lorg/example/AnnotationWithValue;]:',
+ ' - Constant element value [attr \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationWithTwoValues;]:',
+ ' - Constant element value [attr1 \'13\']',
+ ' - Utf8 [val1]',
+ ' - Constant element value [attr2 \'13\']',
+ ' - Utf8 [val2]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {
+ 'Annotation': None,
+ 'AnnotationWithValue': {'attr': 'val'},
+ 'AnnotationWithTwoValues': {'attr1': 'val1', 'attr2': 'val2'}
+ },
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testClassAnnotationWithArrays(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Class file attributes (count = 3):',
+ ' - Annotation [Lorg/example/AnnotationWithEmptyArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Annotation [Lorg/example/AnnotationWithOneElemArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationWithTwoElemArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val2]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {
+ 'AnnotationWithEmptyArray': {'arrayAttr': []},
+ 'AnnotationWithOneElemArray': {'arrayAttr': ['val']},
+ 'AnnotationWithTwoElemArray': {'arrayAttr': ['val1', 'val2']}
+ },
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testNestedClassAnnotations(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Class file attributes (count = 1):',
+ ' - Annotation [Lorg/example/OuterAnnotation;]:',
+ ' - Constant element value [outerAttr \'13\']',
+ ' - Utf8 [outerVal]',
+ ' - Array element value [outerArr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [outerArrVal1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [outerArrVal2]',
+ ' - Annotation element value [emptyAnn]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Annotation element value [ann]:',
+ ' - Annotation [Lorg/example/InnerAnnotation;]:',
+ ' - Constant element value [innerAttr \'13\']',
+ ' - Utf8 [innerVal]',
+ ' - Array element value [innerArr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [innerArrVal1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [innerArrVal2]',
+ ' - Annotation element value [emptyInnerAnn]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {
+ 'OuterAnnotation': {
+ 'outerAttr': 'outerVal',
+ 'outerArr': ['outerArrVal1', 'outerArrVal2'],
+ 'emptyAnn': None,
+ 'ann': {
+ 'innerAttr': 'innerVal',
+ 'innerArr': ['innerArrVal1', 'innerArrVal2'],
+ 'emptyInnerAnn': None
+ }
+ }
+ },
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testClassArraysOfAnnotations(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Class file attributes (count = 1):',
+ ' - Annotation [Lorg/example/OuterAnnotation;]:',
+ ' - Array element value [arrayWithEmptyAnnotations]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Array element value [outerArray]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation;]:',
+ ' - Constant element value [innerAttr \'115\']',
+ ' - Utf8 [innerVal]',
+ ' - Array element value [arguments]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+ ' - Constant element value [arg1Attr \'115\']',
+ ' - Utf8 [arg1Val]',
+ ' - Array element value [arg1Array]:',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [11]',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [12]',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+ ' - Constant element value [arg2Attr \'115\']',
+ ' - Utf8 [arg2Val]',
+ ' - Array element value [arg2Array]:',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [21]',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [22]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {
+ 'OuterAnnotation': {
+ 'arrayWithEmptyAnnotations': [None, None],
+ 'outerArray': [
+ {
+ 'innerAttr': 'innerVal',
+ 'arguments': [
+ {'arg1Attr': 'arg1Val', 'arg1Array': ['11', '12']},
+ {'arg2Attr': 'arg2Val', 'arg2Array': ['21', '22']}
+ ]
+ }
+ ]
+ }
+ },
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testReadFullClassFileAttributes(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Class file attributes (count = 3):',
+ ' - Source file attribute:',
+ ' - Utf8 [Class.java]',
+ ' - Runtime visible annotations attribute:',
+ ' - Annotation [Lorg/example/IntValueAnnotation;]:',
+ ' - Constant element value [value \'73\']',
+ ' - Integer [19]',
+ ' - Inner classes attribute (count = 1)',
+ ' - InnerClassesInfo:',
+ ' Access flags: 0x9 = public static',
+ ' - Class [org/example/Class1]',
+ ' - Class [org/example/Class2]',
+ ' - Utf8 [OnPageFinishedHelper]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {
+ 'IntValueAnnotation': {
+ 'value': '19',
+ }
+ },
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testMethodAnnotation(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: Test()V',
+ ' - Annotation [Lorg/example/Annotation;]:',
+ ' - Annotation [Lorg/example/AnnotationWithValue;]:',
+ ' - Constant element value [attr \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationWithTwoValues;]:',
+ ' - Constant element value [attr1 \'13\']',
+ ' - Utf8 [val1]',
+ ' - Constant element value [attr2 \'13\']',
+ ' - Utf8 [val2]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': 'Test',
+ 'annotations': {
+ 'Annotation': None,
+ 'AnnotationWithValue': {'attr': 'val'},
+ 'AnnotationWithTwoValues': {'attr1': 'val1', 'attr2': 'val2'}
+ },
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testMethodAnnotationWithArrays(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: Test()V',
+ ' - Annotation [Lorg/example/AnnotationWithEmptyArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Annotation [Lorg/example/AnnotationWithOneElemArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationWithTwoElemArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val2]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': 'Test',
+ 'annotations': {
+ 'AnnotationWithEmptyArray': {'arrayAttr': []},
+ 'AnnotationWithOneElemArray': {'arrayAttr': ['val']},
+ 'AnnotationWithTwoElemArray': {'arrayAttr': ['val1', 'val2']}
+ },
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testMethodAnnotationWithPrimitivesAndArrays(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: Test()V',
+ ' - Annotation [Lorg/example/AnnotationPrimitiveThenArray;]:',
+ ' - Constant element value [attr \'13\']',
+ ' - Utf8 [val]',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationArrayThenPrimitive;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val]',
+ ' - Constant element value [attr \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationTwoArrays;]:',
+ ' - Array element value [arrayAttr1]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val1]',
+ ' - Array element value [arrayAttr2]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val2]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': 'Test',
+ 'annotations': {
+ 'AnnotationPrimitiveThenArray': {'attr': 'val',
+ 'arrayAttr': ['val']},
+ 'AnnotationArrayThenPrimitive': {'arrayAttr': ['val'],
+ 'attr': 'val'},
+ 'AnnotationTwoArrays': {'arrayAttr1': ['val1'],
+ 'arrayAttr2': ['val2']}
+ },
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testNestedMethodAnnotations(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: Test()V',
+ ' - Annotation [Lorg/example/OuterAnnotation;]:',
+ ' - Constant element value [outerAttr \'13\']',
+ ' - Utf8 [outerVal]',
+ ' - Array element value [outerArr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [outerArrVal1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [outerArrVal2]',
+ ' - Annotation element value [emptyAnn]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Annotation element value [ann]:',
+ ' - Annotation [Lorg/example/InnerAnnotation;]:',
+ ' - Constant element value [innerAttr \'13\']',
+ ' - Utf8 [innerVal]',
+ ' - Array element value [innerArr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [innerArrVal1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [innerArrVal2]',
+ ' - Annotation element value [emptyInnerAnn]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': 'Test',
+ 'annotations': {
+ 'OuterAnnotation': {
+ 'outerAttr': 'outerVal',
+ 'outerArr': ['outerArrVal1', 'outerArrVal2'],
+ 'emptyAnn': None,
+ 'ann': {
+ 'innerAttr': 'innerVal',
+ 'innerArr': ['innerArrVal1', 'innerArrVal2'],
+ 'emptyInnerAnn': None
+ }
+ }
+ },
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+ def testMethodArraysOfAnnotations(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: Test()V',
+ ' - Annotation [Lorg/example/OuterAnnotation;]:',
+ ' - Array element value [arrayWithEmptyAnnotations]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Array element value [outerArray]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation;]:',
+ ' - Constant element value [innerAttr \'115\']',
+ ' - Utf8 [innerVal]',
+ ' - Array element value [arguments]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+ ' - Constant element value [arg1Attr \'115\']',
+ ' - Utf8 [arg1Val]',
+ ' - Array element value [arg1Array]:',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [11]',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [12]',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+ ' - Constant element value [arg2Attr \'115\']',
+ ' - Utf8 [arg2Val]',
+ ' - Array element value [arg2Array]:',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [21]',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [22]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': 'Test',
+ 'annotations': {
+ 'OuterAnnotation': {
+ 'arrayWithEmptyAnnotations': [None, None],
+ 'outerArray': [
+ {
+ 'innerAttr': 'innerVal',
+ 'arguments': [
+ {'arg1Attr': 'arg1Val', 'arg1Array': ['11', '12']},
+ {'arg2Attr': 'arg2Val', 'arg2Array': ['21', '22']}
+ ]
+ }
+ ]
+ }
+ }
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEquals(expected, actual)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/v8/build/android/pylib/utils/repo_utils.py b/deps/v8/build/android/pylib/utils/repo_utils.py
new file mode 100644
index 0000000000..5a0efa8b6e
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/repo_utils.py
@@ -0,0 +1,16 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from devil.utils import cmd_helper
+
+
+def GetGitHeadSHA1(in_directory):
+ """Returns the git hash tag for the given directory.
+
+ Args:
+ in_directory: The directory where git is to be run.
+ """
+ command_line = ['git', 'log', '-1', '--pretty=format:%H']
+ output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory)
+ return output[0:40]
diff --git a/deps/v8/build/android/pylib/utils/shared_preference_utils.py b/deps/v8/build/android/pylib/utils/shared_preference_utils.py
new file mode 100644
index 0000000000..ae0d31b784
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/shared_preference_utils.py
@@ -0,0 +1,95 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions for modifying an app's settings file using JSON."""
+
+import json
+import logging
+
+
+def UnicodeToStr(data):
+ """Recursively converts any Unicode to Python strings.
+
+ Args:
+ data: The data to be converted.
+
+ Return:
+ A copy of the given data, but with instances of Unicode converted to Python
+ strings.
+ """
+ if isinstance(data, dict):
+ return {UnicodeToStr(key): UnicodeToStr(value)
+ for key, value in data.iteritems()}
+ elif isinstance(data, list):
+ return [UnicodeToStr(element) for element in data]
+ elif isinstance(data, unicode):
+ return data.encode('utf-8')
+ return data
+
+
+def ExtractSettingsFromJson(filepath):
+ """Extracts the settings data from the given JSON file.
+
+ Args:
+ filepath: The path to the JSON file to read.
+
+ Return:
+ The data read from the JSON file with strings converted to Python strings.
+ """
+ # json.load() loads strings as unicode, which causes issues when trying
+ # to edit string values in preference files, so convert to Python strings
+ with open(filepath) as prefs_file:
+ return UnicodeToStr(json.load(prefs_file))
+
+
+def ApplySharedPreferenceSetting(shared_pref, setting):
+ """Applies the given app settings to the given device.
+
+ Modifies an installed app's settings by modifying its shared preference
+ settings file. Provided settings data must be a settings dictionary,
+ which are in the following format:
+ {
+ "package": "com.example.package",
+ "filename": "AppSettingsFile.xml",
+ "supports_encrypted_path": true,
+ "set": {
+ "SomeBoolToSet": true,
+ "SomeStringToSet": "StringValue",
+ },
+ "remove": [
+ "list",
+ "of",
+ "keys",
+ "to",
+ "remove",
+ ]
+ }
+
+ Example JSON files that can be read with ExtractSettingsFromJson and passed to
+ this function are in //chrome/android/shared_preference_files/test/.
+
+ Args:
+ shared_pref: The devil SharedPrefs object for the device the settings will
+ be applied to.
+ setting: A settings dictionary to apply.
+ """
+ shared_pref.Load()
+ for key in setting.get('remove', []):
+ try:
+ shared_pref.Remove(key)
+ except KeyError:
+ logging.warning("Attempted to remove non-existent key %s", key)
+ for key, value in setting.get('set', {}).iteritems():
+ if isinstance(value, bool):
+ shared_pref.SetBoolean(key, value)
+ elif isinstance(value, basestring):
+ shared_pref.SetString(key, value)
+ elif isinstance(value, long) or isinstance(value, int):
+ shared_pref.SetLong(key, value)
+ elif isinstance(value, list):
+ shared_pref.SetStringSet(key, value)
+ else:
+ raise ValueError("Given invalid value type %s for key %s" % (
+ str(type(value)), key))
+ shared_pref.Commit()
diff --git a/deps/v8/build/android/pylib/utils/simpleperf.py b/deps/v8/build/android/pylib/utils/simpleperf.py
new file mode 100644
index 0000000000..be259d621f
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/simpleperf.py
@@ -0,0 +1,259 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+from devil import devil_env
+from devil.android import device_signal
+from devil.android.sdk import version_codes
+
+
+def _ProcessType(proc):
+ _, _, suffix = proc.name.partition(':')
+ if not suffix:
+ return 'browser'
+ if suffix.startswith('sandboxed_process'):
+ return 'renderer'
+ if suffix.startswith('privileged_process'):
+ return 'gpu'
+ return None
+
+
+def _GetSpecifiedPID(device, package_name, process_specifier):
+ if process_specifier is None:
+ return None
+
+ # Check for numeric PID
+ try:
+ pid = int(process_specifier)
+ return pid
+ except ValueError:
+ pass
+
+ # Check for exact process name; can be any of these formats:
+ # <package>:<process name>, i.e. 'org.chromium.chrome:sandboxed_process0'
+ # :<process name>, i.e. ':sandboxed_process0'
+ # <process name>, i.e. 'sandboxed_process0'
+ full_process_name = process_specifier
+ if process_specifier.startswith(':'):
+ full_process_name = package_name + process_specifier
+ elif ':' not in process_specifier:
+ full_process_name = '%s:%s' % (package_name, process_specifier)
+ matching_processes = device.ListProcesses(full_process_name)
+ if len(matching_processes) == 1:
+ return matching_processes[0].pid
+ if len(matching_processes) > 1:
+ raise RuntimeError('Found %d processes with name "%s".' % (
+ len(matching_processes), process_specifier))
+
+ # Check for process type (i.e. 'renderer')
+ package_processes = device.ListProcesses(package_name)
+ matching_processes = [p for p in package_processes if (
+ _ProcessType(p) == process_specifier)]
+ if process_specifier == 'renderer' and len(matching_processes) > 1:
+ raise RuntimeError('Found %d renderer processes; please re-run with only '
+ 'one open tab.' % len(matching_processes))
+ if len(matching_processes) != 1:
+ raise RuntimeError('Found %d processes of type "%s".' % (
+ len(matching_processes), process_specifier))
+ return matching_processes[0].pid
+
+
+def _ThreadsForProcess(device, pid):
+ # The thread list output format for 'ps' is the same regardless of version.
+ # Here's the column headers, and a sample line for a thread belonging to
+ # pid 12345 (note that the last few columns are not aligned with headers):
+ #
+ # USER PID TID PPID VSZ RSS WCHAN ADDR S CMD
+ # u0_i101 12345 24680 567 1357902 97531 futex_wait_queue_me e85acd9c S \
+ # CrRendererMain
+ if device.build_version_sdk >= version_codes.OREO:
+ pid_regex = (
+ r'^[[:graph:]]\{1,\}[[:blank:]]\{1,\}%d[[:blank:]]\{1,\}' % pid)
+ ps_cmd = "ps -T -e | grep '%s'" % pid_regex
+ ps_output_lines = device.RunShellCommand(
+ ps_cmd, shell=True, check_return=True)
+ else:
+ ps_cmd = ['ps', '-p', str(pid), '-t']
+ ps_output_lines = device.RunShellCommand(ps_cmd, check_return=True)
+ result = []
+ for l in ps_output_lines:
+ fields = l.split()
+ # fields[2] is tid, fields[-1] is thread name. Output may include an entry
+ # for the process itself with tid=pid; omit that one.
+ if fields[2] == str(pid):
+ continue
+ result.append((int(fields[2]), fields[-1]))
+ return result
+
+
+def _ThreadType(thread_name):
+ if not thread_name:
+ return 'unknown'
+ if (thread_name.startswith('Chrome_ChildIO') or
+ thread_name.startswith('Chrome_IO')):
+ return 'io'
+ if thread_name.startswith('Compositor'):
+ return 'compositor'
+ if (thread_name.startswith('ChildProcessMai') or
+ thread_name.startswith('CrGpuMain') or
+ thread_name.startswith('CrRendererMain')):
+ return 'main'
+ if thread_name.startswith('RenderThread'):
+ return 'render'
+
+
+def _GetSpecifiedTID(device, pid, thread_specifier):
+ if thread_specifier is None:
+ return None
+
+ # Check for numeric TID
+ try:
+ tid = int(thread_specifier)
+ return tid
+ except ValueError:
+ pass
+
+ # Check for thread type
+ if pid is not None:
+ matching_threads = [t for t in _ThreadsForProcess(device, pid) if (
+ _ThreadType(t[1]) == thread_specifier)]
+ if len(matching_threads) != 1:
+ raise RuntimeError('Found %d threads of type "%s".' % (
+ len(matching_threads), thread_specifier))
+ return matching_threads[0][0]
+
+ return None
+
+
+def PrepareDevice(device):
+ if device.build_version_sdk < version_codes.NOUGAT:
+ raise RuntimeError('Simpleperf profiling is only supported on Android N '
+ 'and later.')
+
+ # Necessary for profiling
+ # https://android-review.googlesource.com/c/platform/system/sepolicy/+/234400
+ device.SetProp('security.perf_harden', '0')
+
+
+def InstallSimpleperf(device, package_name):
+ package_arch = device.GetPackageArchitecture(package_name) or 'armeabi-v7a'
+ host_simpleperf_path = devil_env.config.LocalPath('simpleperf', package_arch)
+ if not host_simpleperf_path:
+ raise Exception('Could not get path to simpleperf executable on host.')
+ device_simpleperf_path = '/'.join(
+ ('/data/local/tmp/profilers', package_arch, 'simpleperf'))
+ device.PushChangedFiles([(host_simpleperf_path, device_simpleperf_path)])
+ return device_simpleperf_path
+
+
+@contextlib.contextmanager
+def RunSimpleperf(device, device_simpleperf_path, package_name,
+ process_specifier, thread_specifier, profiler_args,
+ host_out_path):
+ pid = _GetSpecifiedPID(device, package_name, process_specifier)
+ tid = _GetSpecifiedTID(device, pid, thread_specifier)
+ if pid is None and tid is None:
+ raise RuntimeError('Could not find specified process/thread running on '
+ 'device. Make sure the apk is already running before '
+ 'attempting to profile.')
+ profiler_args = list(profiler_args)
+ if profiler_args and profiler_args[0] == 'record':
+ profiler_args.pop(0)
+ if '--call-graph' not in profiler_args and '-g' not in profiler_args:
+ profiler_args.append('-g')
+ if '-f' not in profiler_args:
+ profiler_args.extend(('-f', '1000'))
+ device_out_path = '/data/local/tmp/perf.data'
+ if '-o' in profiler_args:
+ device_out_path = profiler_args[profiler_args.index('-o') + 1]
+ else:
+ profiler_args.extend(('-o', device_out_path))
+
+ if tid:
+ profiler_args.extend(('-t', str(tid)))
+ else:
+ profiler_args.extend(('-p', str(pid)))
+
+ adb_shell_simpleperf_process = device.adb.StartShell(
+ [device_simpleperf_path, 'record'] + profiler_args)
+
+ completed = False
+ try:
+ yield
+ completed = True
+
+ finally:
+ device.KillAll('simpleperf', signum=device_signal.SIGINT, blocking=True,
+ quiet=True)
+ if completed:
+ adb_shell_simpleperf_process.wait()
+ device.PullFile(device_out_path, host_out_path)
+
+
+def ConvertSimpleperfToPprof(simpleperf_out_path, build_directory,
+ pprof_out_path):
+ # The simpleperf scripts require the unstripped libs to be installed in the
+ # same directory structure as the libs on the device. Much of the logic here
+ # is just figuring out and creating the necessary directory structure, and
+ # symlinking the unstripped shared libs.
+
+ # Get the set of libs that we can symbolize
+ unstripped_lib_dir = os.path.join(build_directory, 'lib.unstripped')
+ unstripped_libs = set(
+ f for f in os.listdir(unstripped_lib_dir) if f.endswith('.so'))
+
+ # report.py will show the directory structure above the shared libs;
+ # that is the directory structure we need to recreate on the host.
+ script_dir = devil_env.config.LocalPath('simpleperf_scripts')
+ report_path = os.path.join(script_dir, 'report.py')
+ report_cmd = [sys.executable, report_path, '-i', simpleperf_out_path]
+ device_lib_path = None
+ for line in subprocess.check_output(
+ report_cmd, stderr=subprocess.STDOUT).splitlines():
+ fields = line.split()
+ if len(fields) < 5:
+ continue
+ shlib_path = fields[4]
+ shlib_dirname, shlib_basename = shlib_path.rpartition('/')[::2]
+ if shlib_basename in unstripped_libs:
+ device_lib_path = shlib_dirname
+ break
+ if not device_lib_path:
+ raise RuntimeError('No chrome-related symbols in profiling data in %s. '
+ 'Either the process was idle for the entire profiling '
+ 'period, or something went very wrong (and you should '
+ 'file a bug at crbug.com/new with component '
+ 'Speed>Tracing, and assign it to szager@chromium.org).'
+ % simpleperf_out_path)
+
+ # Recreate the directory structure locally, and symlink unstripped libs.
+ processing_dir = tempfile.mkdtemp()
+ try:
+ processing_lib_dir = os.path.join(
+ processing_dir, 'binary_cache', device_lib_path.lstrip('/'))
+ os.makedirs(processing_lib_dir)
+ for lib in unstripped_libs:
+ unstripped_lib_path = os.path.join(unstripped_lib_dir, lib)
+ processing_lib_path = os.path.join(processing_lib_dir, lib)
+ os.symlink(unstripped_lib_path, processing_lib_path)
+
+ # Run the script to annotate symbols and convert from simpleperf format to
+ # pprof format.
+ llvm_symbolizer_path = devil_env.config.LocalPath('llvm-symbolizer')
+ pprof_converter_script = os.path.join(
+ script_dir, 'pprof_proto_generator.py')
+ pprof_converter_cmd = [sys.executable, pprof_converter_script,
+ '-i', simpleperf_out_path,
+ '-o', os.path.abspath(pprof_out_path),
+ '--addr2line', llvm_symbolizer_path]
+ subprocess.check_output(pprof_converter_cmd, stderr=subprocess.STDOUT,
+ cwd=processing_dir)
+ finally:
+ shutil.rmtree(processing_dir, ignore_errors=True)
diff --git a/deps/v8/build/android/pylib/utils/test_filter.py b/deps/v8/build/android/pylib/utils/test_filter.py
new file mode 100644
index 0000000000..430b4c598d
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/test_filter.py
@@ -0,0 +1,139 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+
+
+_CMDLINE_NAME_SEGMENT_RE = re.compile(
+ r' with(?:out)? \{[^\}]*\}')
+
+class ConflictingPositiveFiltersException(Exception):
+ """Raised when both filter file and filter argument have positive filters."""
+
+
+def ParseFilterFile(input_lines):
+ """Converts test filter file contents to positive and negative pattern lists.
+
+ See //testing/buildbot/filters/README.md for description of the
+ syntax that |input_lines| are expected to follow.
+
+ See
+ https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md#running-a-subset-of-the-tests
+ for description of the syntax that --gtest_filter argument should follow.
+
+ Args:
+ input_lines: An iterable (e.g. a list or a file) containing input lines.
+ Returns:
+ tuple containing the lists of positive patterns and negative patterns
+ """
+ # Strip comments and whitespace from each line and filter non-empty lines.
+ stripped_lines = (l.split('#', 1)[0].strip() for l in input_lines)
+ filter_lines = [l for l in stripped_lines if l]
+
+ # Split the tests into positive and negative patterns (gtest treats
+ # every pattern after the first '-' sign as an exclusion).
+ positive_patterns = [l for l in filter_lines if l[0] != '-']
+ negative_patterns = [l[1:] for l in filter_lines if l[0] == '-']
+ return positive_patterns, negative_patterns
+
+
+def AddFilterOptions(parser):
+ """Adds filter command-line options to the provided parser.
+
+ Args:
+ parser: an argparse.ArgumentParser instance.
+ """
+ parser.add_argument(
+ # Deprecated argument.
+ '--gtest-filter-file',
+ # New argument.
+ '--test-launcher-filter-file',
+ dest='test_filter_file', type=os.path.realpath,
+ help='Path to file that contains googletest-style filter strings. '
+ 'See also //testing/buildbot/filters/README.md.')
+
+ filter_group = parser.add_mutually_exclusive_group()
+ filter_group.add_argument(
+ '-f', '--test-filter', '--gtest_filter', '--gtest-filter',
+ dest='test_filter',
+ help='googletest-style filter string.',
+ default=os.environ.get('GTEST_FILTER'))
+ filter_group.add_argument(
+ '--isolated-script-test-filter',
+ help='isolated script filter string. '
+ 'Like gtest filter strings, but with :: separators instead of :')
+
+
+def AppendPatternsToFilter(test_filter, positive_patterns=None,
+ negative_patterns=None):
+ """Returns a test-filter string with additional patterns.
+
+ Args:
+ test_filter: test filter string
+ positive_patterns: list of positive patterns to add to string
+ negative_patterns: list of negative patterns to add to string
+ """
+ positives = []
+ negatives = []
+ positive = ''
+ negative = ''
+
+ split_filter = test_filter.split('-', 1)
+ if len(split_filter) == 1:
+ positive = split_filter[0]
+ else:
+ positive, negative = split_filter
+
+ positives += [f for f in positive.split(':') if f]
+ negatives += [f for f in negative.split(':') if f]
+
+ positives += positive_patterns if positive_patterns else []
+ negatives += negative_patterns if negative_patterns else []
+
+ final_filter = ':'.join([p.replace('#', '.') for p in positives])
+ if negatives:
+ final_filter += '-' + ':'.join([n.replace('#', '.') for n in negatives])
+ return final_filter
+
+
+def HasPositivePatterns(test_filter):
+ """Returns True if test_filter contains a positive pattern, else False
+
+ Args:
+ test_filter: test-filter style string
+ """
+ return bool(len(test_filter) > 0 and test_filter[0] != '-')
+
+
+def InitializeFilterFromArgs(args):
+ """Returns a filter string from the command-line option values.
+
+ Args:
+ args: an argparse.Namespace instance resulting from a using parser
+ to which the filter options above were added.
+
+ Raises:
+ ConflictingPositiveFiltersException if both filter file and command line
+ specify positive filters.
+ """
+ test_filter = ''
+ if args.isolated_script_test_filter:
+ args.test_filter = args.isolated_script_test_filter.replace('::', ':')
+ if args.test_filter:
+ test_filter = _CMDLINE_NAME_SEGMENT_RE.sub(
+ '', args.test_filter.replace('#', '.'))
+
+ if args.test_filter_file:
+ with open(args.test_filter_file, 'r') as f:
+ positive_file_patterns, negative_file_patterns = ParseFilterFile(f)
+ if positive_file_patterns and HasPositivePatterns(test_filter):
+ raise ConflictingPositiveFiltersException(
+ 'Cannot specify positive pattern in both filter file and ' +
+ 'filter command line argument')
+ test_filter = AppendPatternsToFilter(test_filter,
+ positive_patterns=positive_file_patterns,
+ negative_patterns=negative_file_patterns)
+
+ return test_filter
diff --git a/deps/v8/build/android/pylib/utils/test_filter_test.py b/deps/v8/build/android/pylib/utils/test_filter_test.py
new file mode 100755
index 0000000000..1ae5a7ebe0
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/test_filter_test.py
@@ -0,0 +1,233 @@
+#!/usr/bin/env vpython
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import sys
+import tempfile
+import unittest
+
+from pylib.utils import test_filter
+
+class ParseFilterFileTest(unittest.TestCase):
+
+ def testParseFilterFile_commentsAndBlankLines(self):
+ input_lines = [
+ 'positive1',
+ '# comment',
+ 'positive2 # Another comment',
+ ''
+ 'positive3'
+ ]
+ actual = test_filter.ParseFilterFile(input_lines)
+ expected = ['positive1', 'positive2', 'positive3'], []
+ self.assertEquals(expected, actual)
+
+ def testParseFilterFile_onlyPositive(self):
+ input_lines = [
+ 'positive1',
+ 'positive2'
+ ]
+ actual = test_filter.ParseFilterFile(input_lines)
+ expected = ['positive1', 'positive2'], []
+ self.assertEquals(expected, actual)
+
+ def testParseFilterFile_onlyNegative(self):
+ input_lines = [
+ '-negative1',
+ '-negative2'
+ ]
+ actual = test_filter.ParseFilterFile(input_lines)
+ expected = [], ['negative1', 'negative2']
+ self.assertEquals(expected, actual)
+
+ def testParseFilterFile_positiveAndNegative(self):
+ input_lines = [
+ 'positive1',
+ 'positive2',
+ '-negative1',
+ '-negative2'
+ ]
+ actual = test_filter.ParseFilterFile(input_lines)
+ expected = ['positive1', 'positive2'], ['negative1', 'negative2']
+ self.assertEquals(expected, actual)
+
+
+class InitializeFilterFromArgsTest(unittest.TestCase):
+
+ def testInitializeBasicFilter(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ args = parser.parse_args([
+ '--test-filter',
+ 'FooTest.testFoo:BarTest.testBar'])
+ expected = 'FooTest.testFoo:BarTest.testBar'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEquals(actual, expected)
+
+ def testInitializeJavaStyleFilter(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ args = parser.parse_args([
+ '--test-filter',
+ 'FooTest#testFoo:BarTest#testBar'])
+ expected = 'FooTest.testFoo:BarTest.testBar'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEquals(actual, expected)
+
+ def testInitializeBasicIsolatedScript(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ args = parser.parse_args([
+ '--isolated-script-test-filter',
+ 'FooTest.testFoo::BarTest.testBar'])
+ expected = 'FooTest.testFoo:BarTest.testBar'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEquals(actual, expected)
+
+ def testFilterArgWithPositiveFilterInFilterFile(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ with tempfile.NamedTemporaryFile() as tmp_file:
+ tmp_file.write('positive1\npositive2\n-negative2\n-negative3\n')
+ tmp_file.seek(0)
+ args = parser.parse_args([
+ '--test-filter=-negative1',
+ '--test-launcher-filter-file',
+ tmp_file.name])
+ expected = 'positive1:positive2-negative1:negative2:negative3'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEquals(actual, expected)
+
+ def testFilterFileWithPositiveFilterInFilterArg(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ with tempfile.NamedTemporaryFile() as tmp_file:
+ tmp_file.write('-negative2\n-negative3\n')
+ tmp_file.seek(0)
+ args = parser.parse_args([
+ '--test-filter',
+ 'positive1:positive2-negative1',
+ '--test-launcher-filter-file',
+ tmp_file.name])
+ expected = 'positive1:positive2-negative1:negative2:negative3'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEquals(actual, expected)
+
+ def testPositiveFilterInBothFileAndArg(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ with tempfile.NamedTemporaryFile() as tmp_file:
+ tmp_file.write('positive1\n')
+ tmp_file.seek(0)
+ args = parser.parse_args([
+ '--test-filter',
+ 'positive2',
+ '--test-launcher-filter-file',
+ tmp_file.name])
+ with self.assertRaises(test_filter.ConflictingPositiveFiltersException):
+ test_filter.InitializeFilterFromArgs(args)
+
+ def testFilterArgWithFilterFileAllNegative(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ with tempfile.NamedTemporaryFile() as tmp_file:
+ tmp_file.write('-negative3\n-negative4\n')
+ tmp_file.seek(0)
+ args = parser.parse_args([
+ '--test-filter=-negative1:negative2',
+ '--test-launcher-filter-file',
+ tmp_file.name])
+ expected = '-negative1:negative2:negative3:negative4'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEquals(actual, expected)
+
+
+class AppendPatternsToFilter(unittest.TestCase):
+ def testAllEmpty(self):
+ expected = ''
+ actual = test_filter.AppendPatternsToFilter('', [], [])
+ self.assertEquals(actual, expected)
+ def testAppendOnlyPositiveToEmptyFilter(self):
+ expected = 'positive'
+ actual = test_filter.AppendPatternsToFilter('', ['positive'])
+ self.assertEquals(actual, expected)
+ def testAppendOnlyNegativeToEmptyFilter(self):
+ expected = '-negative'
+ actual = test_filter.AppendPatternsToFilter('',
+ negative_patterns=['negative'])
+ self.assertEquals(actual, expected)
+ def testAppendToEmptyFilter(self):
+ expected = 'positive-negative'
+ actual = test_filter.AppendPatternsToFilter('', ['positive'], ['negative'])
+ self.assertEquals(actual, expected)
+ def testAppendToPositiveOnlyFilter(self):
+ expected = 'positive1:positive2-negative'
+ actual = test_filter.AppendPatternsToFilter('positive1', ['positive2'],
+ ['negative'])
+ self.assertEquals(actual, expected)
+ def testAppendToNegativeOnlyFilter(self):
+ expected = 'positive-negative1:negative2'
+ actual = test_filter.AppendPatternsToFilter('-negative1', ['positive'],
+ ['negative2'])
+ self.assertEquals(actual, expected)
+ def testAppendPositiveToFilter(self):
+ expected = 'positive1:positive2-negative1'
+ actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+ ['positive2'])
+ self.assertEquals(actual, expected)
+ def testAppendNegativeToFilter(self):
+ expected = 'positive1-negative1:negative2'
+ actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+ negative_patterns=['negative2'])
+ self.assertEquals(actual, expected)
+ def testAppendBothToFilter(self):
+ expected = 'positive1:positive2-negative1:negative2'
+ actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+ positive_patterns=['positive2'],
+ negative_patterns=['negative2'])
+ self.assertEquals(actual, expected)
+ def testAppendMultipleToFilter(self):
+ expected = 'positive1:positive2:positive3-negative1:negative2:negative3'
+ actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+ ['positive2', 'positive3'],
+ ['negative2', 'negative3'])
+ self.assertEquals(actual, expected)
+ def testRepeatedAppendToFilter(self):
+ expected = 'positive1:positive2:positive3-negative1:negative2:negative3'
+ filter_string = test_filter.AppendPatternsToFilter('positive1-negative1',
+ ['positive2'],
+ ['negative2'])
+ actual = test_filter.AppendPatternsToFilter(filter_string, ['positive3'],
+ ['negative3'])
+ self.assertEquals(actual, expected)
+ def testAppendHashSeparatedPatternsToFilter(self):
+ expected = 'positive.test1:positive.test2-negative.test1:negative.test2'
+ actual = test_filter.AppendPatternsToFilter('positive#test1-negative#test1',
+ ['positive#test2'],
+ ['negative#test2'])
+ self.assertEquals(actual, expected)
+
+
+class HasPositivePatterns(unittest.TestCase):
+ def testEmpty(self):
+ expected = False
+ actual = test_filter.HasPositivePatterns('')
+ self.assertEquals(actual, expected)
+ def testHasOnlyPositive(self):
+ expected = True
+ actual = test_filter.HasPositivePatterns('positive')
+ self.assertEquals(actual, expected)
+ def testHasOnlyNegative(self):
+ expected = False
+ actual = test_filter.HasPositivePatterns('-negative')
+ self.assertEquals(actual, expected)
+ def testHasBoth(self):
+ expected = True
+ actual = test_filter.HasPositivePatterns('positive-negative')
+ self.assertEquals(actual, expected)
+
+
+if __name__ == '__main__':
+ sys.exit(unittest.main())
diff --git a/deps/v8/build/android/pylib/utils/time_profile.py b/deps/v8/build/android/pylib/utils/time_profile.py
new file mode 100644
index 0000000000..094799c4f2
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/time_profile.py
@@ -0,0 +1,45 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+
+class TimeProfile(object):
+ """Class for simple profiling of action, with logging of cost."""
+
+ def __init__(self, description='operation'):
+ self._starttime = None
+ self._endtime = None
+ self._description = description
+ self.Start()
+
+ def Start(self):
+ self._starttime = time.time()
+ self._endtime = None
+
+ def GetDelta(self):
+ """Returns the rounded delta.
+
+ Also stops the timer if Stop() has not already been called.
+ """
+ if self._endtime is None:
+ self.Stop(log=False)
+ delta = self._endtime - self._starttime
+ delta = round(delta, 2) if delta < 10 else round(delta, 1)
+ return delta
+
+ def LogResult(self):
+ """Logs the result."""
+ logging.info('%s seconds to perform %s', self.GetDelta(), self._description)
+
+ def Stop(self, log=True):
+ """Stop profiling.
+
+ Args:
+ log: Log the delta (defaults to true).
+ """
+ self._endtime = time.time()
+ if log:
+ self.LogResult()
diff --git a/deps/v8/build/android/pylib/utils/xvfb.py b/deps/v8/build/android/pylib/utils/xvfb.py
new file mode 100644
index 0000000000..cb9d50e8fd
--- /dev/null
+++ b/deps/v8/build/android/pylib/utils/xvfb.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=W0702
+
+import os
+import signal
+import subprocess
+import sys
+import time
+
+
+def _IsLinux():
+ """Return True if on Linux; else False."""
+ return sys.platform.startswith('linux')
+
+
+class Xvfb(object):
+ """Class to start and stop Xvfb if relevant. Nop if not Linux."""
+
+ def __init__(self):
+ self._pid = 0
+
+ def Start(self):
+ """Start Xvfb and set an appropriate DISPLAY environment. Linux only.
+
+ Copied from tools/code_coverage/coverage_posix.py
+ """
+ if not _IsLinux():
+ return
+ proc = subprocess.Popen(['Xvfb', ':9', '-screen', '0', '1024x768x24',
+ '-ac'],
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ self._pid = proc.pid
+ if not self._pid:
+ raise Exception('Could not start Xvfb')
+ os.environ['DISPLAY'] = ':9'
+
+ # Now confirm, giving a chance for it to start if needed.
+ for _ in range(10):
+ proc = subprocess.Popen('xdpyinfo >/dev/null', shell=True)
+ _, retcode = os.waitpid(proc.pid, 0)
+ if retcode == 0:
+ break
+ time.sleep(0.25)
+ if retcode != 0:
+ raise Exception('Could not confirm Xvfb happiness')
+
+ def Stop(self):
+ """Stop Xvfb if needed. Linux only."""
+ if self._pid:
+ try:
+ os.kill(self._pid, signal.SIGKILL)
+ except:
+ pass
+ del os.environ['DISPLAY']
+ self._pid = 0
diff --git a/deps/v8/build/android/pylib/valgrind_tools.py b/deps/v8/build/android/pylib/valgrind_tools.py
new file mode 100644
index 0000000000..4c27b083b7
--- /dev/null
+++ b/deps/v8/build/android/pylib/valgrind_tools.py
@@ -0,0 +1,129 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=R0201
+
+import glob
+import logging
+import os.path
+import subprocess
+import sys
+
+from devil.android import device_errors
+from devil.android.valgrind_tools import base_tool
+from pylib.constants import DIR_SOURCE_ROOT
+
+
+def SetChromeTimeoutScale(device, scale):
+ """Sets the timeout scale in /data/local/tmp/chrome_timeout_scale to scale."""
+ path = '/data/local/tmp/chrome_timeout_scale'
+ if not scale or scale == 1.0:
+ # Delete if scale is None/0.0/1.0 since the default timeout scale is 1.0
+ device.RemovePath(path, force=True, as_root=True)
+ else:
+ device.WriteFile(path, '%f' % scale, as_root=True)
+
+
+
+class AddressSanitizerTool(base_tool.BaseTool):
+ """AddressSanitizer tool."""
+
+ WRAPPER_NAME = '/system/bin/asanwrapper'
+ # Disable memcmp overlap check.There are blobs (gl drivers)
+ # on some android devices that use memcmp on overlapping regions,
+ # nothing we can do about that.
+ EXTRA_OPTIONS = 'strict_memcmp=0,use_sigaltstack=1'
+
+ def __init__(self, device):
+ super(AddressSanitizerTool, self).__init__()
+ self._device = device
+
+ @classmethod
+ def CopyFiles(cls, device):
+ """Copies ASan tools to the device."""
+ libs = glob.glob(os.path.join(DIR_SOURCE_ROOT,
+ 'third_party/llvm-build/Release+Asserts/',
+ 'lib/clang/*/lib/linux/',
+ 'libclang_rt.asan-arm-android.so'))
+ assert len(libs) == 1
+ subprocess.call(
+ [os.path.join(
+ DIR_SOURCE_ROOT,
+ 'tools/android/asan/third_party/asan_device_setup.sh'),
+ '--device', str(device),
+ '--lib', libs[0],
+ '--extra-options', AddressSanitizerTool.EXTRA_OPTIONS])
+ device.WaitUntilFullyBooted()
+
+ def GetTestWrapper(self):
+ return AddressSanitizerTool.WRAPPER_NAME
+
+ def GetUtilWrapper(self):
+ """Returns the wrapper for utilities, such as forwarder.
+
+ AddressSanitizer wrapper must be added to all instrumented binaries,
+ including forwarder and the like. This can be removed if such binaries
+ were built without instrumentation. """
+ return self.GetTestWrapper()
+
+ def SetupEnvironment(self):
+ try:
+ self._device.EnableRoot()
+ except device_errors.CommandFailedError as e:
+ # Try to set the timeout scale anyway.
+ # TODO(jbudorick) Handle this exception appropriately after interface
+ # conversions are finished.
+ logging.error(str(e))
+ SetChromeTimeoutScale(self._device, self.GetTimeoutScale())
+
+ def CleanUpEnvironment(self):
+ SetChromeTimeoutScale(self._device, None)
+
+ def GetTimeoutScale(self):
+ # Very slow startup.
+ return 20.0
+
+
+TOOL_REGISTRY = {
+ 'asan': AddressSanitizerTool,
+}
+
+
+def CreateTool(tool_name, device):
+ """Creates a tool with the specified tool name.
+
+ Args:
+ tool_name: Name of the tool to create.
+ device: A DeviceUtils instance.
+ Returns:
+ A tool for the specified tool_name.
+ """
+ if not tool_name:
+ return base_tool.BaseTool()
+
+ ctor = TOOL_REGISTRY.get(tool_name)
+ if ctor:
+ return ctor(device)
+ else:
+ print 'Unknown tool %s, available tools: %s' % (
+ tool_name, ', '.join(sorted(TOOL_REGISTRY.keys())))
+ sys.exit(1)
+
+def PushFilesForTool(tool_name, device):
+ """Pushes the files required for |tool_name| to |device|.
+
+ Args:
+ tool_name: Name of the tool to create.
+ device: A DeviceUtils instance.
+ """
+ if not tool_name:
+ return
+
+ clazz = TOOL_REGISTRY.get(tool_name)
+ if clazz:
+ clazz.CopyFiles(device)
+ else:
+ print 'Unknown tool %s, available tools: %s' % (
+ tool_name, ', '.join(sorted(TOOL_REGISTRY.keys())))
+ sys.exit(1)
diff --git a/deps/v8/build/android/pylintrc b/deps/v8/build/android/pylintrc
new file mode 100644
index 0000000000..2a721bf270
--- /dev/null
+++ b/deps/v8/build/android/pylintrc
@@ -0,0 +1,15 @@
+[FORMAT]
+
+max-line-length=80
+
+[MESSAGES CONTROL]
+
+disable=abstract-class-not-used,bad-continuation,bad-indentation,duplicate-code,fixme,invalid-name,locally-disabled,locally-enabled,missing-docstring,star-args,too-few-public-methods,too-many-arguments,too-many-branches,too-many-instance-attributes,too-many-lines,too-many-locals,too-many-public-methods,too-many-statements,wrong-import-position
+
+[REPORTS]
+
+reports=no
+
+[VARIABLES]
+
+dummy-variables-rgx=^_.*$|dummy
diff --git a/deps/v8/build/android/resource_sizes.gni b/deps/v8/build/android/resource_sizes.gni
new file mode 100644
index 0000000000..a22b7a23b8
--- /dev/null
+++ b/deps/v8/build/android/resource_sizes.gni
@@ -0,0 +1,39 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/internal_rules.gni")
+
+# Generates a script in the bin directory that runs
+# //build/android/resource_sizes.py against the provided apk.
+#
+# Variables:
+# apk: The APK target against which resource_sizes.py should run.
+template("android_resource_sizes_test") {
+ generate_android_wrapper(target_name) {
+ executable = "//build/android/resource_sizes.py"
+ wrapper_script = "$root_out_dir/bin/${target_name}"
+
+ # Getting the _apk_path below at build time requires the APK's
+ # build config.
+ deps = [
+ invoker.apk,
+ ]
+
+ data_deps = [
+ "//build/android:resource_sizes_py",
+ ]
+
+ _apk_build_config = get_label_info(invoker.apk, "target_gen_dir") + "/" +
+ get_label_info(invoker.apk, "name") + ".build_config"
+ _rebased_apk_build_config = rebase_path(_apk_build_config, root_build_dir)
+ _apk_path =
+ "@WrappedPath(@FileArg($_rebased_apk_build_config:deps_info:apk_path))"
+ executable_args = [
+ "--chartjson",
+ _apk_path,
+ "--chromium-output-directory",
+ "@WrappedPath(.)",
+ ]
+ }
+}
diff --git a/deps/v8/build/android/resource_sizes.py b/deps/v8/build/android/resource_sizes.py
new file mode 100755
index 0000000000..8d763b41be
--- /dev/null
+++ b/deps/v8/build/android/resource_sizes.py
@@ -0,0 +1,769 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Reports binary size metrics for an APK.
+
+More information at //docs/speed/binary_size/metrics.md.
+"""
+
+from __future__ import print_function
+
+import argparse
+import collections
+from contextlib import contextmanager
+import json
+import logging
+import os
+import posixpath
+import re
+import struct
+import sys
+import tempfile
+import zipfile
+import zlib
+
+from binary_size import apk_downloader
+import devil_chromium
+from devil.android.sdk import build_tools
+from devil.utils import cmd_helper
+from devil.utils import lazy
+import method_count
+from pylib import constants
+from pylib.constants import host_paths
+
+_AAPT_PATH = lazy.WeakConstant(lambda: build_tools.GetPath('aapt'))
+_BUILD_UTILS_PATH = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'gyp')
+_APK_PATCH_SIZE_ESTIMATOR_PATH = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'third_party', 'apk-patch-size-estimator')
+
+with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
+ import perf_tests_results_helper # pylint: disable=import-error
+
+with host_paths.SysPath(host_paths.TRACING_PATH):
+ from tracing.value import convert_chart_json # pylint: disable=import-error
+
+with host_paths.SysPath(_BUILD_UTILS_PATH, 0):
+ from util import build_utils # pylint: disable=import-error
+
+with host_paths.SysPath(_APK_PATCH_SIZE_ESTIMATOR_PATH):
+ import apk_patch_size_estimator # pylint: disable=import-error
+
+
+# Python had a bug in zipinfo parsing that triggers on ChromeModern.apk
+# https://bugs.python.org/issue14315
+def _PatchedDecodeExtra(self):
+ # Try to decode the extra field.
+ extra = self.extra
+ unpack = struct.unpack
+ while len(extra) >= 4:
+ tp, ln = unpack('<HH', extra[:4])
+ if tp == 1:
+ if ln >= 24:
+ counts = unpack('<QQQ', extra[4:28])
+ elif ln == 16:
+ counts = unpack('<QQ', extra[4:20])
+ elif ln == 8:
+ counts = unpack('<Q', extra[4:12])
+ elif ln == 0:
+ counts = ()
+ else:
+ raise RuntimeError, "Corrupt extra field %s"%(ln,)
+
+ idx = 0
+
+ # ZIP64 extension (large files and/or large archives)
+ if self.file_size in (0xffffffffffffffffL, 0xffffffffL):
+ self.file_size = counts[idx]
+ idx += 1
+
+ if self.compress_size == 0xFFFFFFFFL:
+ self.compress_size = counts[idx]
+ idx += 1
+
+ if self.header_offset == 0xffffffffL:
+ self.header_offset = counts[idx]
+ idx += 1
+
+ extra = extra[ln + 4:]
+
+zipfile.ZipInfo._decodeExtra = ( # pylint: disable=protected-access
+ _PatchedDecodeExtra)
+
+# Captures an entire config from aapt output.
+_AAPT_CONFIG_PATTERN = r'config %s:(.*?)config [a-zA-Z-]+:'
+# Matches string resource entries from aapt output.
+_AAPT_ENTRY_RE = re.compile(
+ r'resource (?P<id>\w{10}) [\w\.]+:string/.*?"(?P<val>.+?)"', re.DOTALL)
+_BASE_CHART = {
+ 'format_version': '0.1',
+ 'benchmark_name': 'resource_sizes',
+ 'benchmark_description': 'APK resource size information.',
+ 'trace_rerun_options': [],
+ 'charts': {}
+}
+# Macro definitions look like (something, 123) when
+# enable_resource_whitelist_generation=true.
+_RC_HEADER_RE = re.compile(r'^#define (?P<name>\w+).* (?P<id>\d+)\)?$')
+_RE_NON_LANGUAGE_PAK = re.compile(r'^assets/.*(resources|percent)\.pak$')
+_READELF_SIZES_METRICS = {
+ 'text': ['.text'],
+ 'data': ['.data', '.rodata', '.data.rel.ro', '.data.rel.ro.local'],
+ 'relocations': ['.rel.dyn', '.rel.plt', '.rela.dyn', '.rela.plt'],
+ 'unwind': [
+ '.ARM.extab', '.ARM.exidx', '.eh_frame', '.eh_frame_hdr',
+ '.ARM.exidxsentinel_section_after_text'
+ ],
+ 'symbols': [
+ '.dynsym', '.dynstr', '.dynamic', '.shstrtab', '.got', '.plt',
+ '.got.plt', '.hash', '.gnu.hash'
+ ],
+ 'bss': ['.bss'],
+ 'other': [
+ '.init_array', '.preinit_array', '.ctors', '.fini_array', '.comment',
+ '.note.gnu.gold-version', '.note.crashpad.info', '.note.android.ident',
+ '.ARM.attributes', '.note.gnu.build-id', '.gnu.version',
+ '.gnu.version_d', '.gnu.version_r', '.interp', '.gcc_except_table'
+ ]
+}
+
+
+def _PercentageDifference(a, b):
+ if a == 0:
+ return 0
+ return float(b - a) / a
+
+
+def _RunReadelf(so_path, options, tool_prefix=''):
+ return cmd_helper.GetCmdOutput(
+ [tool_prefix + 'readelf'] + options + [so_path])
+
+
+def _ExtractLibSectionSizesFromApk(apk_path, lib_path, tool_prefix):
+ with Unzip(apk_path, filename=lib_path) as extracted_lib_path:
+ grouped_section_sizes = collections.defaultdict(int)
+ section_sizes = _CreateSectionNameSizeMap(extracted_lib_path, tool_prefix)
+ for group_name, section_names in _READELF_SIZES_METRICS.iteritems():
+ for section_name in section_names:
+ if section_name in section_sizes:
+ grouped_section_sizes[group_name] += section_sizes.pop(section_name)
+
+ # Group any unknown section headers into the "other" group.
+ for section_header, section_size in section_sizes.iteritems():
+ print('Unknown elf section header: %s' % section_header)
+ grouped_section_sizes['other'] += section_size
+
+ return grouped_section_sizes
+
+
+def _CreateSectionNameSizeMap(so_path, tool_prefix):
+ stdout = _RunReadelf(so_path, ['-S', '--wide'], tool_prefix)
+ section_sizes = {}
+ # Matches [ 2] .hash HASH 00000000006681f0 0001f0 003154 04 A 3 0 8
+ for match in re.finditer(r'\[[\s\d]+\] (\..*)$', stdout, re.MULTILINE):
+ items = match.group(1).split()
+ section_sizes[items[0]] = int(items[4], 16)
+
+ return section_sizes
+
+
+def _ParseManifestAttributes(apk_path):
+ # Check if the manifest specifies whether or not to extract native libs.
+ skip_extract_lib = False
+ output = cmd_helper.GetCmdOutput([
+ _AAPT_PATH.read(), 'd', 'xmltree', apk_path, 'AndroidManifest.xml'])
+ m = re.search(r'extractNativeLibs\(.*\)=\(.*\)(\w)', output)
+ if m:
+ skip_extract_lib = not bool(int(m.group(1)))
+
+ # Dex decompression overhead varies by Android version.
+ m = re.search(r'android:minSdkVersion\(\w+\)=\(type \w+\)(\w+)', output)
+ sdk_version = int(m.group(1), 16)
+
+ return sdk_version, skip_extract_lib
+
+
+def _NormalizeLanguagePaks(translations, factor):
+ english_pak = translations.FindByPattern(r'.*/en[-_][Uu][Ss]\.l?pak')
+ num_translations = translations.GetNumEntries()
+ ret = 0
+ if english_pak:
+ ret -= translations.ComputeZippedSize()
+ ret += int(english_pak.compress_size * num_translations * factor)
+ return ret
+
+
+def _NormalizeResourcesArsc(apk_path, num_arsc_files, num_translations,
+ out_dir):
+ """Estimates the expected overhead of untranslated strings in resources.arsc.
+
+ See http://crbug.com/677966 for why this is necessary.
+ """
+ # If there are multiple .arsc files, use the resource packaged APK instead.
+ if num_arsc_files > 1:
+ if not out_dir:
+ print('Skipping resources.arsc normalization (output directory required)')
+ return 0
+ ap_name = os.path.basename(apk_path).replace('.apk', '.intermediate.ap_')
+ ap_path = os.path.join(out_dir, 'arsc/apks', ap_name)
+ if not os.path.exists(ap_path):
+ raise Exception('Missing expected file: %s, try rebuilding.' % ap_path)
+ apk_path = ap_path
+
+ aapt_output = _RunAaptDumpResources(apk_path)
+ # en-rUS is in the default config and may be cluttered with non-translatable
+ # strings, so en-rGB is a better baseline for finding missing translations.
+ en_strings = _CreateResourceIdValueMap(aapt_output, 'en-rGB')
+ fr_strings = _CreateResourceIdValueMap(aapt_output, 'fr')
+
+ # en-US and en-GB will never be translated.
+ config_count = num_translations - 2
+
+ size = 0
+ for res_id, string_val in en_strings.iteritems():
+ if string_val == fr_strings[res_id]:
+ string_size = len(string_val)
+ # 7 bytes is the per-entry overhead (not specific to any string). See
+ # https://android.googlesource.com/platform/frameworks/base.git/+/android-4.2.2_r1/tools/aapt/StringPool.cpp#414.
+ # The 1.5 factor was determined experimentally and is meant to account for
+ # other languages generally having longer strings than english.
+ size += config_count * (7 + string_size * 1.5)
+
+ return size
+
+
+def _CreateResourceIdValueMap(aapt_output, lang):
+ """Return a map of resource ids to string values for the given |lang|."""
+ config_re = _AAPT_CONFIG_PATTERN % lang
+ return {entry.group('id'): entry.group('val')
+ for config_section in re.finditer(config_re, aapt_output, re.DOTALL)
+ for entry in re.finditer(_AAPT_ENTRY_RE, config_section.group(0))}
+
+
+def _RunAaptDumpResources(apk_path):
+ cmd = [_AAPT_PATH.read(), 'dump', '--values', 'resources', apk_path]
+ status, output = cmd_helper.GetCmdStatusAndOutput(cmd)
+ if status != 0:
+ raise Exception('Failed running aapt command: "%s" with output "%s".' %
+ (' '.join(cmd), output))
+ return output
+
+
+class _FileGroup(object):
+ """Represents a category that apk files can fall into."""
+
+ def __init__(self, name):
+ self.name = name
+ self._zip_infos = []
+ self._extracted_multipliers = []
+
+ def AddZipInfo(self, zip_info, extracted_multiplier=0):
+ self._zip_infos.append(zip_info)
+ self._extracted_multipliers.append(extracted_multiplier)
+
+ def AllEntries(self):
+ return iter(self._zip_infos)
+
+ def GetNumEntries(self):
+ return len(self._zip_infos)
+
+ def FindByPattern(self, pattern):
+ return next((i for i in self._zip_infos if re.match(pattern, i.filename)),
+ None)
+
+ def FindLargest(self):
+ if not self._zip_infos:
+ return None
+ return max(self._zip_infos, key=lambda i: i.file_size)
+
+ def ComputeZippedSize(self):
+ return sum(i.compress_size for i in self._zip_infos)
+
+ def ComputeUncompressedSize(self):
+ return sum(i.file_size for i in self._zip_infos)
+
+ def ComputeExtractedSize(self):
+ ret = 0
+ for zi, multiplier in zip(self._zip_infos, self._extracted_multipliers):
+ ret += zi.file_size * multiplier
+ return ret
+
+ def ComputeInstallSize(self):
+ return self.ComputeExtractedSize() + self.ComputeZippedSize()
+
+
+def _DoApkAnalysis(apk_filename, apks_path, tool_prefix, out_dir, report_func):
+ """Analyse APK to determine size contributions of different file classes."""
+ file_groups = []
+
+ def make_group(name):
+ group = _FileGroup(name)
+ file_groups.append(group)
+ return group
+
+ native_code = make_group('Native code')
+ java_code = make_group('Java code')
+ native_resources_no_translations = make_group('Native resources (no l10n)')
+ translations = make_group('Native resources (l10n)')
+ stored_translations = make_group('Native resources stored (l10n)')
+ icu_data = make_group('ICU (i18n library) data')
+ v8_snapshots = make_group('V8 Snapshots')
+ png_drawables = make_group('PNG drawables')
+ res_directory = make_group('Non-compiled Android resources')
+ arsc = make_group('Compiled Android resources')
+ metadata = make_group('Package metadata')
+ unknown = make_group('Unknown files')
+ notices = make_group('licenses.notice file')
+ unwind_cfi = make_group('unwind_cfi (dev and canary only)')
+
+ with zipfile.ZipFile(apk_filename, 'r') as apk:
+ apk_contents = apk.infolist()
+
+ sdk_version, skip_extract_lib = _ParseManifestAttributes(apk_filename)
+
+ # Pre-L: Dalvik - .odex file is simply decompressed/optimized dex file (~1x).
+ # L, M: ART - .odex file is compiled version of the dex file (~4x).
+ # N: ART - Uses Dalvik-like JIT for normal apps (~1x), full compilation for
+ # shared apps (~4x).
+ # Actual multipliers calculated using "apk_operations.py disk-usage".
+ # Will need to update multipliers once apk obfuscation is enabled.
+ # E.g. with obfuscation, the 4.04 changes to 4.46.
+ speed_profile_dex_multiplier = 1.17
+ orig_filename = apks_path or apk_filename
+ is_monochrome = 'Monochrome' in orig_filename
+ is_webview = 'WebView' in orig_filename
+ is_shared_apk = sdk_version >= 24 and (is_monochrome or is_webview)
+ if sdk_version < 21:
+ # JellyBean & KitKat
+ dex_multiplier = 1.16
+ elif sdk_version < 24:
+ # Lollipop & Marshmallow
+ dex_multiplier = 4.04
+ elif is_shared_apk:
+ # Oreo and above, compilation_filter=speed
+ dex_multiplier = 4.04
+ else:
+ # Oreo and above, compilation_filter=speed-profile
+ dex_multiplier = speed_profile_dex_multiplier
+
+ total_apk_size = os.path.getsize(apk_filename)
+ for member in apk_contents:
+ filename = member.filename
+ if filename.endswith('/'):
+ continue
+ if filename.endswith('.so'):
+ basename = posixpath.basename(filename)
+ should_extract_lib = not skip_extract_lib and basename.startswith('lib')
+ native_code.AddZipInfo(
+ member, extracted_multiplier=int(should_extract_lib))
+ elif filename.endswith('.dex'):
+ java_code.AddZipInfo(member, extracted_multiplier=dex_multiplier)
+ elif re.search(_RE_NON_LANGUAGE_PAK, filename):
+ native_resources_no_translations.AddZipInfo(member)
+ elif filename.endswith('.pak') or filename.endswith('.lpak'):
+ compressed = member.compress_type != zipfile.ZIP_STORED
+ bucket = translations if compressed else stored_translations
+ extracted_multiplier = 0
+ if compressed:
+ extracted_multiplier = int('en_' in filename or 'en-' in filename)
+ bucket.AddZipInfo(member, extracted_multiplier=extracted_multiplier)
+ elif filename == 'assets/icudtl.dat':
+ icu_data.AddZipInfo(member)
+ elif filename.endswith('.bin'):
+ v8_snapshots.AddZipInfo(member)
+ elif filename.endswith('.png') or filename.endswith('.webp'):
+ png_drawables.AddZipInfo(member)
+ elif filename.startswith('res/'):
+ res_directory.AddZipInfo(member)
+ elif filename.endswith('.arsc'):
+ arsc.AddZipInfo(member)
+ elif filename.startswith('META-INF') or filename == 'AndroidManifest.xml':
+ metadata.AddZipInfo(member)
+ elif filename.endswith('.notice'):
+ notices.AddZipInfo(member)
+ elif filename.startswith('assets/unwind_cfi'):
+ unwind_cfi.AddZipInfo(member)
+ else:
+ unknown.AddZipInfo(member)
+
+ if apks_path:
+ # We're mostly focused on size of Chrome for non-English locales, so assume
+ # Hindi (arbitrarily chosen) locale split is installed.
+ with zipfile.ZipFile(apks_path) as z:
+ hindi_apk_info = z.getinfo('splits/base-hi.apk')
+ total_apk_size += hindi_apk_info.file_size
+
+ total_install_size = total_apk_size
+ total_install_size_android_go = total_apk_size
+ zip_overhead = total_apk_size
+
+ for group in file_groups:
+ actual_size = group.ComputeZippedSize()
+ install_size = group.ComputeInstallSize()
+ uncompressed_size = group.ComputeUncompressedSize()
+ extracted_size = group.ComputeExtractedSize()
+ total_install_size += extracted_size
+ zip_overhead -= actual_size
+
+ report_func('Breakdown', group.name + ' size', actual_size, 'bytes')
+ report_func('InstallBreakdown', group.name + ' size', int(install_size),
+ 'bytes')
+ # Only a few metrics are compressed in the first place.
+ # To avoid over-reporting, track uncompressed size only for compressed
+ # entries.
+ if uncompressed_size != actual_size:
+ report_func('Uncompressed', group.name + ' size', uncompressed_size,
+ 'bytes')
+
+ if group is java_code and is_shared_apk:
+ # Updates are compiled using quicken, but system image uses speed-profile.
+ extracted_size = int(uncompressed_size * speed_profile_dex_multiplier)
+ total_install_size_android_go += extracted_size
+ report_func('InstallBreakdownGo', group.name + ' size',
+ actual_size + extracted_size, 'bytes')
+ elif group is translations and apks_path:
+ # Assume Hindi rather than English (accounted for above in total_apk_size)
+ total_install_size_android_go += actual_size
+ else:
+ total_install_size_android_go += extracted_size
+
+ # Per-file zip overhead is caused by:
+ # * 30 byte entry header + len(file name)
+ # * 46 byte central directory entry + len(file name)
+ # * 0-3 bytes for zipalign.
+ report_func('Breakdown', 'Zip Overhead', zip_overhead, 'bytes')
+ report_func('InstallSize', 'APK size', total_apk_size, 'bytes')
+ report_func('InstallSize', 'Estimated installed size',
+ int(total_install_size), 'bytes')
+ if is_shared_apk:
+ report_func('InstallSize', 'Estimated installed size (Android Go)',
+ int(total_install_size_android_go), 'bytes')
+ transfer_size = _CalculateCompressedSize(apk_filename)
+ report_func('TransferSize', 'Transfer size (deflate)', transfer_size, 'bytes')
+
+ # Size of main dex vs remaining.
+ main_dex_info = java_code.FindByPattern('classes.dex')
+ if main_dex_info:
+ main_dex_size = main_dex_info.file_size
+ report_func('Specifics', 'main dex size', main_dex_size, 'bytes')
+ secondary_size = java_code.ComputeUncompressedSize() - main_dex_size
+ report_func('Specifics', 'secondary dex size', secondary_size, 'bytes')
+
+ main_lib_info = native_code.FindLargest()
+ native_code_unaligned_size = 0
+ for lib_info in native_code.AllEntries():
+ section_sizes = _ExtractLibSectionSizesFromApk(
+ apk_filename, lib_info.filename, tool_prefix)
+ native_code_unaligned_size += sum(
+ v for k, v in section_sizes.iteritems() if k != 'bss')
+ # Size of main .so vs remaining.
+ if lib_info == main_lib_info:
+ main_lib_size = lib_info.file_size
+ report_func('Specifics', 'main lib size', main_lib_size, 'bytes')
+ secondary_size = native_code.ComputeUncompressedSize() - main_lib_size
+ report_func('Specifics', 'other lib size', secondary_size, 'bytes')
+
+ for metric_name, size in section_sizes.iteritems():
+ report_func('MainLibInfo', metric_name, size, 'bytes')
+
+ # Main metric that we want to monitor for jumps.
+ normalized_apk_size = total_apk_size
+ # unwind_cfi exists only in dev, canary, and non-channel builds.
+ normalized_apk_size -= unwind_cfi.ComputeZippedSize()
+ # Sections within .so files get 4kb aligned, so use section sizes rather than
+ # file size. Also gets rid of compression.
+ normalized_apk_size -= native_code.ComputeZippedSize()
+ normalized_apk_size += native_code_unaligned_size
+ # Unaligned size should be ~= uncompressed size or something is wrong.
+ # As of now, padding_fraction ~= .007
+ padding_fraction = -_PercentageDifference(
+ native_code.ComputeUncompressedSize(), native_code_unaligned_size)
+ assert 0 <= padding_fraction < .02, 'Padding was: {}'.format(padding_fraction)
+ # Normalized dex size: size within the zip + size on disk for Android Go
+ # devices (which ~= uncompressed dex size).
+ normalized_apk_size += java_code.ComputeUncompressedSize()
+ if apks_path:
+ # Locale normalization not needed when measuring only one locale.
+ # E.g. a change that adds 300 chars of unstranslated strings would cause the
+ # metric to be off by only 390 bytes (assuming a multiplier of 2.3 for
+ # Hindi).
+ pass
+ else:
+ # Avoid noise caused when strings change and translations haven't yet been
+ # updated.
+ num_translations = translations.GetNumEntries()
+ num_stored_translations = stored_translations.GetNumEntries()
+
+ if num_translations > 1:
+ # Multipliers found by looking at MonochromePublic.apk and seeing how much
+ # smaller en-US.pak is relative to the average locale.pak.
+ normalized_apk_size += _NormalizeLanguagePaks(translations, 1.17)
+ if num_stored_translations > 1:
+ normalized_apk_size += _NormalizeLanguagePaks(stored_translations, 1.43)
+ if num_translations + num_stored_translations > 1:
+ if num_translations == 0:
+ # WebView stores all locale paks uncompressed.
+ num_arsc_translations = num_stored_translations
+ else:
+ # Monochrome has more configurations than Chrome since it includes
+ # WebView (which supports more locales), but these should mostly be
+ # empty so ignore them here.
+ num_arsc_translations = num_translations
+ normalized_apk_size += int(
+ _NormalizeResourcesArsc(apk_filename, arsc.GetNumEntries(),
+ num_arsc_translations, out_dir))
+
+ report_func('Specifics', 'normalized apk size', normalized_apk_size, 'bytes')
+ # The "file count" metric cannot be grouped with any other metrics when the
+ # end result is going to be uploaded to the perf dashboard in the HistogramSet
+ # format due to mixed units (bytes vs. zip entries) causing malformed
+ # summaries to be generated.
+ # TODO(https://crbug.com/903970): Remove this workaround if unit mixing is
+ # ever supported.
+ report_func('FileCount', 'file count', len(apk_contents), 'zip entries')
+
+ for info in unknown.AllEntries():
+ sys.stderr.write(
+ 'Unknown entry: %s %d\n' % (info.filename, info.compress_size))
+
+
+def _AnnotatePakResources(out_dir):
+ """Returns a pair of maps: id_name_map, id_header_map."""
+ print('Looking at resources in: %s' % out_dir)
+
+ grit_headers = []
+ for root, _, files in os.walk(out_dir):
+ if root.endswith('grit'):
+ grit_headers += [os.path.join(root, f) for f in files if f.endswith('.h')]
+ assert grit_headers, 'Failed to find grit headers in %s' % out_dir
+
+ id_name_map = {}
+ id_header_map = {}
+ for header in grit_headers:
+ with open(header, 'r') as f:
+ for line in f.readlines():
+ m = _RC_HEADER_RE.match(line.strip())
+ if m:
+ i = int(m.group('id'))
+ name = m.group('name')
+ if i in id_name_map and name != id_name_map[i]:
+ print('WARNING: Resource ID conflict %s (%s vs %s)' % (
+ i, id_name_map[i], name))
+ id_name_map[i] = name
+ id_header_map[i] = os.path.relpath(header, out_dir)
+ return id_name_map, id_header_map
+
+
+def _CalculateCompressedSize(file_path):
+ CHUNK_SIZE = 256 * 1024
+ compressor = zlib.compressobj()
+ total_size = 0
+ with open(file_path, 'rb') as f:
+ for chunk in iter(lambda: f.read(CHUNK_SIZE), ''):
+ total_size += len(compressor.compress(chunk))
+ total_size += len(compressor.flush())
+ return total_size
+
+
+def _DoDexAnalysis(apk_filename, report_func):
+ sizes, total_size = method_count.ExtractSizesFromZip(apk_filename)
+
+ dex_metrics = method_count.CONTRIBUTORS_TO_DEX_CACHE
+ cumulative_sizes = collections.defaultdict(int)
+ for classes_dex_sizes in sizes.values():
+ for key in dex_metrics:
+ cumulative_sizes[key] += classes_dex_sizes[key]
+ for key, label in dex_metrics.iteritems():
+ report_func('Dex', label, cumulative_sizes[key], 'entries')
+
+ report_func('DexCache', 'DexCache', total_size, 'bytes')
+
+
+def _PrintPatchSizeEstimate(new_apk, builder, bucket, report_func):
+ apk_name = os.path.basename(new_apk)
+ # Reference APK paths have spaces replaced by underscores.
+ builder = builder.replace(' ', '_')
+ old_apk = apk_downloader.MaybeDownloadApk(
+ builder, apk_downloader.CURRENT_MILESTONE, apk_name,
+ apk_downloader.DEFAULT_DOWNLOAD_PATH, bucket)
+ if old_apk:
+ # Use a temp dir in case patch size functions fail to clean up temp files.
+ with build_utils.TempDir() as tmp:
+ tmp_name = os.path.join(tmp, 'patch.tmp')
+ bsdiff = apk_patch_size_estimator.calculate_bsdiff(
+ old_apk, new_apk, None, tmp_name)
+ report_func('PatchSizeEstimate', 'BSDiff (gzipped)', bsdiff, 'bytes')
+ fbf = apk_patch_size_estimator.calculate_filebyfile(
+ old_apk, new_apk, None, tmp_name)
+ report_func('PatchSizeEstimate', 'FileByFile (gzipped)', fbf, 'bytes')
+
+
+@contextmanager
+def Unzip(zip_file, filename=None):
+ """Utility for temporary use of a single file in a zip archive."""
+ with build_utils.TempDir() as unzipped_dir:
+ unzipped_files = build_utils.ExtractAll(
+ zip_file, unzipped_dir, True, pattern=filename)
+ if len(unzipped_files) == 0:
+ raise Exception(
+ '%s not found in %s' % (filename, zip_file))
+ yield unzipped_files[0]
+
+
+def _ConfigOutDirAndToolsPrefix(out_dir):
+ if out_dir:
+ constants.SetOutputDirectory(out_dir)
+ else:
+ out_dir = constants.GetOutDirectory()
+ if out_dir:
+ build_vars = build_utils.ReadBuildVars(
+ os.path.join(out_dir, "build_vars.txt"))
+ tool_prefix = os.path.join(out_dir, build_vars['android_tool_prefix'])
+ else:
+ tool_prefix = ''
+ return out_dir, tool_prefix
+
+
+def _Analyze(apk_path, chartjson, args):
+
+ def report_func(*args):
+ # Do not add any new metrics without also documenting them in:
+ # //docs/speed/binary_size/metrics.md.
+ perf_tests_results_helper.ReportPerfResult(chartjson, *args)
+
+ out_dir, tool_prefix = _ConfigOutDirAndToolsPrefix(args.out_dir)
+ apks_path = args.input if args.input.endswith('.apks') else None
+ _DoApkAnalysis(apk_path, apks_path, tool_prefix, out_dir, report_func)
+ _DoDexAnalysis(apk_path, report_func)
+ if args.estimate_patch_size:
+ _PrintPatchSizeEstimate(apk_path, args.reference_apk_builder,
+ args.reference_apk_bucket, report_func)
+
+
+def ResourceSizes(args):
+ chartjson = _BASE_CHART.copy() if args.output_format else None
+
+ if args.input.endswith('.apk'):
+ _Analyze(args.input, chartjson, args)
+ elif args.input.endswith('.apks'):
+ with tempfile.NamedTemporaryFile(suffix='.apk') as f:
+ with zipfile.ZipFile(args.input) as z:
+ # Currently bundletool is creating two apks when .apks is created
+ # without specifying an sdkVersion. Always measure the one with an
+ # uncompressed shared library.
+ try:
+ info = z.getinfo('splits/base-master_2.apk')
+ except KeyError:
+ info = z.getinfo('splits/base-master.apk')
+ f.write(z.read(info))
+ f.flush()
+ _Analyze(f.name, chartjson, args)
+ else:
+ raise Exception('Unknown file type: ' + args.input)
+
+ if chartjson:
+ results_path = os.path.join(args.output_dir, 'results-chart.json')
+ logging.critical('Dumping chartjson to %s', results_path)
+ with open(results_path, 'w') as json_file:
+ json.dump(chartjson, json_file)
+
+ # We would ideally generate a histogram set directly instead of generating
+ # chartjson then converting. However, perf_tests_results_helper is in
+ # //build, which doesn't seem to have any precedent for depending on
+ # anything in Catapult. This can probably be fixed, but since this doesn't
+ # need to be super fast or anything, converting is a good enough solution
+ # for the time being.
+ if args.output_format == 'histograms':
+ histogram_result = convert_chart_json.ConvertChartJson(results_path)
+ if histogram_result.returncode != 0:
+ logging.error('chartjson conversion failed with error: %s',
+ histogram_result.stdout)
+ return 1
+
+ histogram_path = os.path.join(args.output_dir, 'perf_results.json')
+ logging.critical('Dumping histograms to %s', histogram_path)
+ with open(histogram_path, 'w') as json_file:
+ json_file.write(histogram_result.stdout)
+
+ return 0
+
+
+def main():
+ argparser = argparse.ArgumentParser(description='Print APK size metrics.')
+ argparser.add_argument(
+ '--min-pak-resource-size',
+ type=int,
+ default=20 * 1024,
+ help='Minimum byte size of displayed pak resources.')
+ argparser.add_argument(
+ '--chromium-output-directory',
+ dest='out_dir',
+ type=os.path.realpath,
+ help='Location of the build artifacts.')
+ argparser.add_argument(
+ '--chartjson',
+ action='store_true',
+ help='DEPRECATED. Use --output-format=chartjson '
+ 'instead.')
+ argparser.add_argument(
+ '--output-format',
+ choices=['chartjson', 'histograms'],
+ help='Output the results to a file in the given '
+ 'format instead of printing the results.')
+ argparser.add_argument(
+ '--output-dir', default='.', help='Directory to save chartjson to.')
+ argparser.add_argument('--loadable_module', help='Obsolete (ignored).')
+ argparser.add_argument(
+ '--estimate-patch-size',
+ action='store_true',
+ help='Include patch size estimates. Useful for perf '
+ 'builders where a reference APK is available but adds '
+ '~3 mins to run time.')
+ argparser.add_argument(
+ '--reference-apk-builder',
+ default=apk_downloader.DEFAULT_BUILDER,
+ help='Builder name to use for reference APK for patch '
+ 'size estimates.')
+ argparser.add_argument(
+ '--reference-apk-bucket',
+ default=apk_downloader.DEFAULT_BUCKET,
+ help='Storage bucket holding reference APKs.')
+
+ # Accepted to conform to the isolated script interface, but ignored.
+ argparser.add_argument(
+ '--isolated-script-test-filter', help=argparse.SUPPRESS)
+ argparser.add_argument(
+ '--isolated-script-test-output',
+ type=os.path.realpath,
+ help='File to which results will be written in the '
+ 'simplified JSON output format.')
+
+ argparser.add_argument('input', help='Path to .apk or .apks file to measure.')
+ args = argparser.parse_args()
+
+ devil_chromium.Initialize(output_directory=args.out_dir)
+
+ # TODO(bsheedy): Remove this once uses of --chartjson have been removed.
+ if args.chartjson:
+ args.output_format = 'chartjson'
+
+ isolated_script_output = {'valid': False, 'failures': []}
+
+ try:
+ result = ResourceSizes(args)
+ isolated_script_output = {
+ 'valid': True,
+ 'failures': ['resource_sizes'] if result else [],
+ }
+ finally:
+ if args.isolated_script_test_output:
+ with open(args.isolated_script_test_output, 'w') as output_file:
+ json.dump(isolated_script_output, output_file)
+
+ return result
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/resource_sizes.pydeps b/deps/v8/build/android/resource_sizes.pydeps
new file mode 100644
index 0000000000..7c075c2477
--- /dev/null
+++ b/deps/v8/build/android/resource_sizes.pydeps
@@ -0,0 +1,63 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android --output build/android/resource_sizes.pydeps build/android/resource_sizes.py
+../../third_party/apk-patch-size-estimator/apk_patch_size_estimator.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/android/__init__.py
+../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../third_party/catapult/devil/devil/android/sdk/build_tools.py
+../../third_party/catapult/devil/devil/android/sdk/dexdump.py
+../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/constants/__init__.py
+../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../third_party/catapult/devil/devil/devil_env.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/cmd_helper.py
+../../third_party/catapult/devil/devil/utils/lazy/__init__.py
+../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/third_party/vinn/vinn/__init__.py
+../../third_party/catapult/third_party/vinn/vinn/_vinn.py
+../../third_party/catapult/third_party/zipfile/zipfile_2_7_13.py
+../../third_party/catapult/tracing/tracing/__init__.py
+../../third_party/catapult/tracing/tracing/value/__init__.py
+../../third_party/catapult/tracing/tracing/value/convert_chart_json.py
+../../third_party/catapult/tracing/tracing_project.py
+../../third_party/depot_tools/download_from_google_storage.py
+../../third_party/depot_tools/subprocess2.py
+../../third_party/depot_tools/upload_to_google_storage.py
+../gn_helpers.py
+../util/lib/common/perf_result_data_type.py
+../util/lib/common/perf_tests_results_helper.py
+binary_size/__init__.py
+binary_size/apk_downloader.py
+devil_chromium.py
+gyp/util/__init__.py
+gyp/util/build_utils.py
+gyp/util/md5_check.py
+method_count.py
+pylib/__init__.py
+pylib/constants/__init__.py
+pylib/constants/host_paths.py
+resource_sizes.py
diff --git a/deps/v8/build/android/screenshot.py b/deps/v8/build/android/screenshot.py
new file mode 100755
index 0000000000..6ab906086d
--- /dev/null
+++ b/deps/v8/build/android/screenshot.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import devil_chromium
+from devil.android.tools import screenshot
+
+if __name__ == '__main__':
+ devil_chromium.Initialize()
+ sys.exit(screenshot.main())
diff --git a/deps/v8/build/android/stacktrace/BUILD.gn b/deps/v8/build/android/stacktrace/BUILD.gn
new file mode 100644
index 0000000000..a3957fec3d
--- /dev/null
+++ b/deps/v8/build/android/stacktrace/BUILD.gn
@@ -0,0 +1,17 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+java_binary("java_deobfuscate") {
+ main_class = "org.chromium.build.FlushingReTrace"
+ java_files = [ "java/org/chromium/build/FlushingReTrace.java" ]
+ deps = [
+ "//third_party/proguard:retrace_java",
+ ]
+ data = [
+ "$root_build_dir/lib.java/build/android/stacktrace/java_deobfuscate.jar",
+ "$root_build_dir/bin/java_deobfuscate",
+ ]
+}
diff --git a/deps/v8/build/android/stacktrace/README.md b/deps/v8/build/android/stacktrace/README.md
new file mode 100644
index 0000000000..bfa537c5ad
--- /dev/null
+++ b/deps/v8/build/android/stacktrace/README.md
@@ -0,0 +1,23 @@
+# java_deobfuscate
+
+A wrapper around ProGuard's ReTrace tool, which:
+
+1) Updates the regular expression used to identify stack lines, and
+2) Streams its output.
+
+The second point here is what allows you to run:
+
+ adb logcat | out/Default/bin/java_deobfuscate out/Default/apks/ChromePublic.apk.mapping
+
+And have it actually show output without logcat terminating.
+
+
+# stackwalker.py
+
+Extracts Breakpad microdumps from a log file and uses `stackwalker` to symbolize
+them.
+
+
+# crashpad_stackwalker.py
+
+Fetches Crashpad dumps from a given device, walks and symbolizes the stacks.
diff --git a/deps/v8/build/android/stacktrace/crashpad_stackwalker.py b/deps/v8/build/android/stacktrace/crashpad_stackwalker.py
new file mode 100755
index 0000000000..a538105be4
--- /dev/null
+++ b/deps/v8/build/android/stacktrace/crashpad_stackwalker.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Fetches Crashpad dumps from a given device, walks and symbolizes the stacks.
+# All the non-trivial operations are performed by generate_breakpad_symbols.py,
+# dump_syms, minidump_dump and minidump_stackwalk.
+
+import argparse
+import logging
+import os
+import posixpath
+import re
+import sys
+import shutil
+import subprocess
+import tempfile
+
+_BUILD_ANDROID_PATH = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..'))
+sys.path.append(_BUILD_ANDROID_PATH)
+import devil_chromium
+from devil.android import device_utils
+from devil.utils import timeout_retry
+
+
+def _CreateSymbolsDir(build_path, dynamic_library_names):
+ generator = os.path.normpath(
+ os.path.join(_BUILD_ANDROID_PATH, '..', '..', 'components', 'crash',
+ 'content', 'tools', 'generate_breakpad_symbols.py'))
+ syms_dir = os.path.join(build_path, 'crashpad_syms')
+ shutil.rmtree(syms_dir, ignore_errors=True)
+ os.mkdir(syms_dir)
+ for lib in dynamic_library_names:
+ unstripped_library_path = os.path.join(build_path, 'lib.unstripped', lib)
+ if not os.path.exists(unstripped_library_path):
+ continue
+ logging.info('Generating symbols for: %s', unstripped_library_path)
+ cmd = [
+ generator,
+ '--symbols-dir',
+ syms_dir,
+ '--build-dir',
+ build_path,
+ '--binary',
+ unstripped_library_path,
+ ]
+ return_code = subprocess.call(cmd)
+ if return_code != 0:
+ logging.error('Could not extract symbols, command failed: %s',
+ ' '.join(cmd))
+ return syms_dir
+
+
+def _ChooseLatestCrashpadDump(device, crashpad_dump_path):
+ if not device.PathExists(crashpad_dump_path):
+ logging.warning('Crashpad dump directory does not exist: %s',
+ crashpad_dump_path)
+ return None
+ latest = None
+ latest_timestamp = 0
+ for crashpad_file in device.ListDirectory(crashpad_dump_path):
+ if crashpad_file.endswith('.dmp'):
+ stat = device.StatPath(posixpath.join(crashpad_dump_path, crashpad_file))
+ current_timestamp = stat['st_mtime']
+ if current_timestamp > latest_timestamp:
+ latest_timestamp = current_timestamp
+ latest = crashpad_file
+ return latest
+
+
+def _ExtractLibraryNamesFromDump(build_path, dump_path):
+ default_library_name = 'libmonochrome.so'
+ dumper_path = os.path.join(build_path, 'minidump_dump')
+ if not os.access(dumper_path, os.X_OK):
+ logging.warning(
+ 'Cannot extract library name from dump because %s is not found, '
+ 'default to: %s', dumper_path, default_library_name)
+ return [default_library_name]
+ p = subprocess.Popen([dumper_path, dump_path],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ stdout, stderr = p.communicate()
+ if p.returncode != 0:
+ # Dumper errors often do not affect stack walkability, just a warning.
+ logging.warning('Reading minidump failed with output:\n%s', stderr)
+
+ library_names = []
+ module_library_line_re = re.compile(r'[(]code_file[)]\s+= '
+ r'"(?P<library_name>lib[^. ]+.so)"')
+ in_module = False
+ for line in stdout.splitlines():
+ line = line.lstrip().rstrip('\n')
+ if line == 'MDRawModule':
+ in_module = True
+ continue
+ if line == '':
+ in_module = False
+ continue
+ if in_module:
+ m = module_library_line_re.match(line)
+ if m:
+ library_names.append(m.group('library_name'))
+ if not library_names:
+ logging.warning(
+ 'Could not find any library name in the dump, '
+ 'default to: %s', default_library_name)
+ return [default_library_name]
+ return library_names
+
+
+def main():
+ logging.basicConfig(level=logging.INFO)
+ parser = argparse.ArgumentParser(
+ description='Fetches Crashpad dumps from a given device, '
+ 'walks and symbolizes the stacks.')
+ parser.add_argument('--device', required=True, help='Device serial number')
+ parser.add_argument(
+ '--adb-path', required=True, help='Path to the "adb" command')
+ parser.add_argument(
+ '--build-path',
+ required=True,
+ help='Build output directory, equivalent to CHROMIUM_OUTPUT_DIR')
+ parser.add_argument(
+ '--chrome-cache-path',
+ required=True,
+ help='Directory on the device where Chrome stores cached files,'
+ ' crashpad stores dumps in a subdirectory of it')
+ args = parser.parse_args()
+
+ stackwalk_path = os.path.join(args.build_path, 'minidump_stackwalk')
+ if not os.path.exists(stackwalk_path):
+ logging.error('Missing minidump_stackwalk executable')
+ return 1
+
+ devil_chromium.Initialize(adb_path=args.adb_path)
+ device = device_utils.DeviceUtils(args.device)
+
+ device_crashpad_path = posixpath.join(args.chrome_cache_path, 'Crashpad',
+ 'pending')
+
+ def CrashpadDumpExists():
+ return _ChooseLatestCrashpadDump(device, device_crashpad_path)
+
+ crashpad_file = timeout_retry.WaitFor(
+ CrashpadDumpExists, wait_period=1, max_tries=9)
+ if not crashpad_file:
+ logging.error('Could not locate a crashpad dump')
+ return 1
+
+ dump_dir = tempfile.mkdtemp()
+ symbols_dir = None
+ try:
+ device.PullFile(
+ device_path=posixpath.join(device_crashpad_path, crashpad_file),
+ host_path=dump_dir)
+ dump_full_path = os.path.join(dump_dir, crashpad_file)
+ library_names = _ExtractLibraryNamesFromDump(args.build_path,
+ dump_full_path)
+ symbols_dir = _CreateSymbolsDir(args.build_path, library_names)
+ stackwalk_cmd = [stackwalk_path, dump_full_path, symbols_dir]
+ subprocess.call(stackwalk_cmd)
+ finally:
+ shutil.rmtree(dump_dir, ignore_errors=True)
+ if symbols_dir:
+ shutil.rmtree(symbols_dir, ignore_errors=True)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java b/deps/v8/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java
new file mode 100644
index 0000000000..baa931328b
--- /dev/null
+++ b/deps/v8/build/android/stacktrace/java/org/chromium/build/FlushingReTrace.java
@@ -0,0 +1,116 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.build;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.LineNumberReader;
+import java.io.OutputStreamWriter;
+import java.io.PrintWriter;
+
+import proguard.retrace.ReTrace;
+
+/**
+ * A wrapper around ReTrace that:
+ * 1. Hardcodes a more useful line regular expression
+ * 2. Disables output buffering
+ */
+public class FlushingReTrace {
+ // E.g.: D/ConnectivityService(18029): Message
+ // E.g.: W/GCM ( 151): Message
+ // E.g.: 09-08 14:22:59.995 18029 18055 I ProcessStatsService: Message
+ // E.g.: 09-08 14:30:59.145 17731 18020 D MDnsDS : Message
+ private static final String LOGCAT_PREFIX =
+ "(?:[VDIWEF]/.*?\\( *\\d+\\): |\\d\\d-\\d\\d [0-9:. ]+[VDIWEF] .*?: )?";
+
+ // Note: Order of these sub-patterns defines their precedence.
+ // Note: Deobfuscation of methods without the presense of line numbers basically never works.
+ // There is a test for these pattern at //build/android/stacktrace/java_deobfuscate_test.py
+ private static final String LINE_PARSE_REGEX =
+ // Eagerly match logcat prefix to avoid conflicting with the patterns below.
+ LOGCAT_PREFIX
+ + "(?:"
+ // Based on default ReTrace regex, but with whitespaces allowed in file:line parentheses
+ // and "at" changed to to allow :
+ // E.g.: 06-22 13:58:02.895 4674 4674 E THREAD_STATE: bLA.a( PG : 173 )
+ // Normal stack trace lines look like:
+ // \tat org.chromium.chrome.browser.tab.Tab.handleJavaCrash(Tab.java:682)
+ + "(?:.*?(?::|\\bat)\\s+%c\\.%m\\s*\\(\\s*%s(?:\\s*:\\s*%l\\s*)?\\))|"
+ // E.g.: Caused by: java.lang.NullPointerException: Attempt to read from field 'int bLA'
+ // on a null object reference
+ + "(?:.*java\\.lang\\.NullPointerException.*[\"']%t\\s*%c\\.(?:%f|%m\\(%a\\))[\"'].*)|"
+ // E.g.: java.lang.VerifyError: bLA
+ + "(?:java\\.lang\\.VerifyError: %c)|"
+ // E.g.: java.lang.NoSuchFieldError: No instance field e of type L...; in class LbxK;
+ + "(?:java\\.lang\\.NoSuchFieldError: No instance field %f of type .*? in class L%C;)|"
+ // E.g.: Object of type Clazz was not destroyed... (See LifetimeAssert.java)
+ + "(?:.*?Object of type %c .*)|"
+ // E.g.: VFY: unable to resolve new-instance 3810 (LSome/Framework/Class;) in Lfoo/Bar;
+ + "(?:.*L%C;.*)|"
+ // E.g.: END SomeTestClass#someMethod
+ + "(?:.*?%c#%m.*?)|"
+ // Special-case for a common junit logcat message:
+ // E.g.: java.lang.NoClassDefFoundError: SomeFrameworkClass in isTestClass for Foo
+ + "(?:.* isTestClass for %c)|"
+ // E.g.: Caused by: java.lang.RuntimeException: Intentional Java Crash
+ + "(?:Caused by: %c:.*)|"
+ // Quoted values and lines that end with a class / class+method:
+ // E.g.: The class: Foo
+ // E.g.: INSTRUMENTATION_STATUS: class=Foo
+ // E.g.: NoClassDefFoundError: SomeFrameworkClass in isTestClass for Foo
+ // E.g.: Could not find class 'SomeFrameworkClass', referenced from method Foo.bar
+ // E.g.: Could not find method SomeFrameworkMethod, referenced from method Foo.bar
+ // E.g.: The member "Foo.bar"
+ // E.g.: The class "Foobar"
+ // Be careful about matching %c without %m since language tags look like class names.
+ + "(?:.*?%c\\.%m)|"
+ + "(?:.*?\"%c\\.%m\".*)|"
+ + "(?:.*\\b(?:[Cc]lass|[Tt]ype)\\b.*?\"%c\".*)|"
+ + "(?:.*\\b(?:[Cc]lass|[Tt]ype)\\b.*?%c)|"
+ // E.g.: java.lang.RuntimeException: Intentional Java Crash
+ + "(?:%c:.*)|"
+ // See if entire line matches a class name (e.g. for manual deobfuscation)
+ + "(?:%c)"
+ + ")";
+
+ private static void usage() {
+ System.err.println("Usage: echo $OBFUSCATED_CLASS | java_deobfuscate Foo.apk.mapping");
+ System.err.println("Usage: java_deobfuscate Foo.apk.mapping < foo.log");
+ System.err.println("Note: Deobfuscation of symbols outside the context of stack "
+ + "traces will work only when lines match the regular expression defined "
+ + "in FlushingReTrace.java.");
+ System.err.println("Also: Deobfuscation of method names without associated line "
+ + "numbers does not seem to work.");
+ System.exit(1);
+ }
+
+ public static void main(String[] args) {
+ if (args.length != 1 || args[0].startsWith("-")) {
+ usage();
+ }
+
+ File mappingFile = new File(args[0]);
+ try {
+ LineNumberReader reader = new LineNumberReader(
+ new BufferedReader(new InputStreamReader(System.in, "UTF-8")));
+
+ // Enabling autoFlush is the main difference from ReTrace.main().
+ boolean autoFlush = true;
+ PrintWriter writer =
+ new PrintWriter(new OutputStreamWriter(System.out, "UTF-8"), autoFlush);
+
+ boolean verbose = false;
+ new ReTrace(LINE_PARSE_REGEX, verbose, mappingFile).retrace(reader, writer);
+ } catch (IOException ex) {
+ // Print a verbose stack trace.
+ ex.printStackTrace();
+ System.exit(1);
+ }
+
+ System.exit(0);
+ }
+}
diff --git a/deps/v8/build/android/stacktrace/java_deobfuscate_test.py b/deps/v8/build/android/stacktrace/java_deobfuscate_test.py
new file mode 100755
index 0000000000..98b66dd02e
--- /dev/null
+++ b/deps/v8/build/android/stacktrace/java_deobfuscate_test.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env python
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for java_deobfuscate."""
+
+import argparse
+import os
+import subprocess
+import sys
+import tempfile
+import unittest
+
+# Set by command-line argument.
+_JAVA_DEOBFUSCATE_PATH = None
+
+LINE_PREFIXES = [
+ '',
+ # logcat -v threadtime
+ '09-08 14:38:35.535 18029 18084 E qcom_sensors_hal: ',
+ # logcat
+ 'W/GCM (15158): ',
+ 'W/GCM ( 158): ',
+]
+
+TEST_MAP = """\
+this.was.Deobfuscated -> FOO:
+ int[] mFontFamily -> a
+ 1:3:void someMethod(int,android.os.Bundle):65:67 -> bar
+never.Deobfuscated -> NOTFOO:
+ int[] mFontFamily -> a
+ 1:3:void someMethod(int,android.os.Bundle):65:67 -> bar
+"""
+
+TEST_DATA = [
+ '',
+ 'FOO',
+ 'FOO.bar',
+ 'Here is a FOO',
+ 'Here is a class FOO',
+ 'Here is a class FOO baz',
+ 'Here is a "FOO" baz',
+ 'Here is a type "FOO" baz',
+ 'Here is a "FOO.bar" baz',
+ 'SomeError: SomeFrameworkClass in isTestClass for FOO',
+ 'Here is a FOO.bar',
+ 'Here is a FOO.bar baz',
+ 'END FOO#bar',
+ 'new-instance 3810 (LSome/Framework/Class;) in LFOO;',
+ 'FOO: Error message',
+ 'Caused by: FOO: Error message',
+ '\tat FOO.bar(PG:1)',
+ '\t at\t FOO.bar\t (\t PG:\t 1\t )',
+ ('Unable to start activity ComponentInfo{garbage.in/here.test}:'
+ ' java.lang.NullPointerException: Attempt to invoke interface method'
+ ' \'void FOO.bar(int,android.os.Bundle)\' on a null object reference'),
+ ('Caused by: java.lang.NullPointerException: Attempt to read from field'
+ ' \'int[] FOO.a\' on a null object reference'),
+ 'java.lang.VerifyError: FOO',
+ ('java.lang.NoSuchFieldError: No instance field a of type '
+ 'Ljava/lang/Class; in class LFOO;'),
+ 'NOTFOO: Object of type FOO was not destroyed...',
+]
+
+EXPECTED_OUTPUT = [
+ '',
+ 'this.was.Deobfuscated',
+ 'this.was.Deobfuscated.someMethod',
+ 'Here is a FOO',
+ 'Here is a class this.was.Deobfuscated',
+ 'Here is a class FOO baz',
+ 'Here is a "FOO" baz',
+ 'Here is a type "this.was.Deobfuscated" baz',
+ 'Here is a "this.was.Deobfuscated.someMethod" baz',
+ 'SomeError: SomeFrameworkClass in isTestClass for this.was.Deobfuscated',
+ 'Here is a this.was.Deobfuscated.someMethod',
+ 'Here is a FOO.bar baz',
+ 'END this.was.Deobfuscated#someMethod',
+ 'new-instance 3810 (LSome/Framework/Class;) in Lthis/was/Deobfuscated;',
+ 'this.was.Deobfuscated: Error message',
+ 'Caused by: this.was.Deobfuscated: Error message',
+ '\tat this.was.Deobfuscated.someMethod(Deobfuscated.java:65)',
+ ('\t at\t this.was.Deobfuscated.someMethod\t '
+ '(\t Deobfuscated.java:\t 65\t )'),
+ ('Unable to start activity ComponentInfo{garbage.in/here.test}:'
+ ' java.lang.NullPointerException: Attempt to invoke interface method'
+ ' \'void this.was.Deobfuscated.someMethod(int,android.os.Bundle)\' on a'
+ ' null object reference'),
+ ('Caused by: java.lang.NullPointerException: Attempt to read from field'
+ ' \'int[] this.was.Deobfuscated.mFontFamily\' on a null object reference'),
+ 'java.lang.VerifyError: this.was.Deobfuscated',
+ ('java.lang.NoSuchFieldError: No instance field mFontFamily of type '
+ 'Ljava/lang/Class; in class Lthis/was/Deobfuscated;'),
+ 'NOTFOO: Object of type this.was.Deobfuscated was not destroyed...',
+]
+TEST_DATA = [s + '\n' for s in TEST_DATA]
+EXPECTED_OUTPUT = [s + '\n' for s in EXPECTED_OUTPUT]
+
+
+class JavaDeobfuscateTest(unittest.TestCase):
+
+ def __init__(self, *args, **kwargs):
+ super(JavaDeobfuscateTest, self).__init__(*args, **kwargs)
+ self._map_file = None
+
+ def setUp(self):
+ self._map_file = tempfile.NamedTemporaryFile()
+ self._map_file.write(TEST_MAP)
+ self._map_file.flush()
+
+ def tearDown(self):
+ if self._map_file:
+ self._map_file.close()
+
+ def _testImpl(self, input_lines=None, expected_output_lines=None,
+ prefix=''):
+ self.assertTrue(bool(input_lines) == bool(expected_output_lines))
+
+ if not input_lines:
+ input_lines = [prefix + x for x in TEST_DATA]
+ if not expected_output_lines:
+ expected_output_lines = [prefix + x for x in EXPECTED_OUTPUT]
+
+ cmd = [_JAVA_DEOBFUSCATE_PATH, self._map_file.name]
+ proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ proc_output, _ = proc.communicate(''.join(input_lines))
+ actual_output_lines = proc_output.splitlines(True)
+ for actual, expected in zip(actual_output_lines, expected_output_lines):
+ self.assertTrue(
+ actual == expected or actual.replace('bar', 'someMethod') == expected,
+ msg=''.join([
+ 'Deobfuscation failed.\n',
+ ' actual: %s' % actual,
+ ' expected: %s' % expected]))
+
+ def testNoPrefix(self):
+ self._testImpl(prefix='')
+
+ def testThreadtimePrefix(self):
+ self._testImpl(prefix='09-08 14:38:35.535 18029 18084 E qcom_sensors_hal: ')
+
+ def testStandardPrefix(self):
+ self._testImpl(prefix='W/GCM (15158): ')
+
+ def testStandardPrefixWithPadding(self):
+ self._testImpl(prefix='W/GCM ( 158): ')
+
+ @unittest.skip('causes java_deobfuscate to hang, see crbug.com/876539')
+ def testIndefiniteHang(self):
+ # Test for crbug.com/876539.
+ self._testImpl(
+ input_lines=[
+ 'VFY: unable to resolve virtual method 2: LFOO;'
+ + '.onDescendantInvalidated '
+ + '(Landroid/view/View;Landroid/view/View;)V',
+ ],
+ expected_output_lines=[
+ 'VFY: unable to resolve virtual method 2: Lthis.was.Deobfuscated;'
+ + '.onDescendantInvalidated '
+ + '(Landroid/view/View;Landroid/view/View;)V',
+ ])
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--java-deobfuscate-path', type=os.path.realpath,
+ required=True)
+ known_args, unittest_args = parser.parse_known_args()
+ _JAVA_DEOBFUSCATE_PATH = known_args.java_deobfuscate_path
+ unittest_args = [sys.argv[0]] + unittest_args
+ unittest.main(argv=unittest_args)
diff --git a/deps/v8/build/android/stacktrace/stackwalker.py b/deps/v8/build/android/stacktrace/stackwalker.py
new file mode 100755
index 0000000000..db54354e3a
--- /dev/null
+++ b/deps/v8/build/android/stacktrace/stackwalker.py
@@ -0,0 +1,135 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import sys
+import tempfile
+
+if __name__ == '__main__':
+ sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from pylib.constants import host_paths
+
+if host_paths.DEVIL_PATH not in sys.path:
+ sys.path.append(host_paths.DEVIL_PATH)
+from devil.utils import cmd_helper
+
+
+_MICRODUMP_BEGIN = re.compile(
+ '.*google-breakpad: -----BEGIN BREAKPAD MICRODUMP-----')
+_MICRODUMP_END = re.compile(
+ '.*google-breakpad: -----END BREAKPAD MICRODUMP-----')
+
+""" Example Microdump
+<timestamp> 6270 6131 F google-breakpad: -----BEGIN BREAKPAD MICRODUMP-----
+<timestamp> 6270 6131 F google-breakpad: V Chrome_Android:54.0.2790.0
+...
+<timestamp> 6270 6131 F google-breakpad: -----END BREAKPAD MICRODUMP-----
+
+"""
+
+
+def GetMicroDumps(dump_path):
+ """Returns all microdumps found in given log file
+
+ Args:
+ dump_path: Path to the log file.
+
+ Returns:
+ List of all microdumps as lists of lines.
+ """
+ with open(dump_path, 'r') as d:
+ data = d.read()
+ all_dumps = []
+ current_dump = None
+ for line in data.splitlines():
+ if current_dump is not None:
+ if _MICRODUMP_END.match(line):
+ current_dump.append(line)
+ all_dumps.append(current_dump)
+ current_dump = None
+ else:
+ current_dump.append(line)
+ elif _MICRODUMP_BEGIN.match(line):
+ current_dump = []
+ current_dump.append(line)
+ return all_dumps
+
+
+def SymbolizeMicroDump(stackwalker_binary_path, dump, symbols_path):
+ """Runs stackwalker on microdump.
+
+ Runs the stackwalker binary at stackwalker_binary_path on a given microdump
+ using the symbols at symbols_path.
+
+ Args:
+ stackwalker_binary_path: Path to the stackwalker binary.
+ dump: The microdump to run the stackwalker on.
+ symbols_path: Path the the symbols file to use.
+
+ Returns:
+ Output from stackwalker tool.
+ """
+ with tempfile.NamedTemporaryFile() as tf:
+ for l in dump:
+ tf.write('%s\n' % l)
+ cmd = [stackwalker_binary_path, tf.name, symbols_path]
+ return cmd_helper.GetCmdOutput(cmd)
+
+
+def AddArguments(parser):
+ parser.add_argument('--stackwalker-binary-path', required=True,
+ help='Path to stackwalker binary.')
+ parser.add_argument('--stack-trace-path', required=True,
+ help='Path to stacktrace containing microdump.')
+ parser.add_argument('--symbols-path', required=True,
+ help='Path to symbols file.')
+ parser.add_argument('--output-file',
+ help='Path to dump stacktrace output to')
+
+
+def _PrintAndLog(line, fp):
+ if fp:
+ fp.write('%s\n' % line)
+ print line
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ AddArguments(parser)
+ args = parser.parse_args()
+
+ micro_dumps = GetMicroDumps(args.stack_trace_path)
+ if not micro_dumps:
+ print 'No microdump found. Exiting.'
+ return 0
+
+ symbolized_dumps = []
+ for micro_dump in micro_dumps:
+ symbolized_dumps.append(SymbolizeMicroDump(
+ args.stackwalker_binary_path, micro_dump, args.symbols_path))
+
+ try:
+ fp = open(args.output_file, 'w') if args.output_file else None
+ _PrintAndLog('%d microdumps found.' % len(micro_dumps), fp)
+ _PrintAndLog('---------- Start output from stackwalker ----------', fp)
+ for index, symbolized_dump in list(enumerate(symbolized_dumps)):
+ _PrintAndLog(
+ '------------------ Start dump %d ------------------' % index, fp)
+ _PrintAndLog(symbolized_dump, fp)
+ _PrintAndLog(
+ '------------------- End dump %d -------------------' % index, fp)
+ _PrintAndLog('----------- End output from stackwalker -----------', fp)
+ except Exception:
+ if fp:
+ fp.close()
+ raise
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/test_runner.py b/deps/v8/build/android/test_runner.py
new file mode 100755
index 0000000000..b26bade595
--- /dev/null
+++ b/deps/v8/build/android/test_runner.py
@@ -0,0 +1,1065 @@
+#!/usr/bin/env vpython
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs all types of tests from one unified interface."""
+
+import argparse
+import collections
+import contextlib
+import itertools
+import logging
+import os
+import shutil
+import signal
+import sys
+import tempfile
+import threading
+import traceback
+import unittest
+
+# Import _strptime before threaded code. datetime.datetime.strptime is
+# threadsafe except for the initial import of the _strptime module.
+# See http://crbug.com/724524 and https://bugs.python.org/issue7980.
+import _strptime # pylint: disable=unused-import
+
+# pylint: disable=ungrouped-imports
+from pylib.constants import host_paths
+
+if host_paths.DEVIL_PATH not in sys.path:
+ sys.path.append(host_paths.DEVIL_PATH)
+
+from devil import base_error
+from devil.utils import reraiser_thread
+from devil.utils import run_tests_helper
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import environment_factory
+from pylib.base import output_manager
+from pylib.base import output_manager_factory
+from pylib.base import test_instance_factory
+from pylib.base import test_run_factory
+from pylib.results import json_results
+from pylib.results import report_results
+from pylib.results.presentation import test_results_presentation
+from pylib.utils import logdog_helper
+from pylib.utils import logging_utils
+from pylib.utils import test_filter
+
+from py_utils import contextlib_ext
+
+_DEVIL_STATIC_CONFIG_FILE = os.path.abspath(os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'devil_config.json'))
+
+
+def _RealPath(arg):
+ if arg.startswith('//'):
+ arg = os.path.abspath(os.path.join(host_paths.DIR_SOURCE_ROOT,
+ arg[2:].replace('/', os.sep)))
+ return os.path.realpath(arg)
+
+
+def AddTestLauncherOptions(parser):
+ """Adds arguments mirroring //base/test/launcher.
+
+ Args:
+ parser: The parser to which arguments should be added.
+ Returns:
+ The given parser.
+ """
+ parser.add_argument(
+ '--test-launcher-retry-limit',
+ '--test_launcher_retry_limit',
+ '--num_retries', '--num-retries',
+ '--isolated-script-test-launcher-retry-limit',
+ dest='num_retries', type=int, default=2,
+ help='Number of retries for a test before '
+ 'giving up (default: %(default)s).')
+ parser.add_argument(
+ '--test-launcher-summary-output',
+ '--json-results-file',
+ dest='json_results_file', type=os.path.realpath,
+ help='If set, will dump results in JSON form to the specified file. '
+ 'Note that this will also trigger saving per-test logcats to '
+ 'logdog.')
+ parser.add_argument(
+ '--test-launcher-shard-index',
+ type=int, default=os.environ.get('GTEST_SHARD_INDEX', 0),
+ help='Index of the external shard to run.')
+ parser.add_argument(
+ '--test-launcher-total-shards',
+ type=int, default=os.environ.get('GTEST_TOTAL_SHARDS', 1),
+ help='Total number of external shards.')
+
+ test_filter.AddFilterOptions(parser)
+
+ return parser
+
+
+def AddCommandLineOptions(parser):
+ """Adds arguments to support passing command-line flags to the device."""
+ parser.add_argument(
+ '--device-flags-file',
+ type=os.path.realpath,
+ help='The relative filepath to a file containing '
+ 'command-line flags to set on the device')
+ parser.add_argument(
+ '--use-apk-under-test-flags-file',
+ action='store_true',
+ help='Wether to use the flags file for the apk under test. If set, '
+ "the filename will be looked up in the APK's PackageInfo.")
+ parser.set_defaults(allow_unknown=True)
+ parser.set_defaults(command_line_flags=None)
+
+
+def AddTracingOptions(parser):
+ # TODO(shenghuazhang): Move this into AddCommonOptions once it's supported
+ # for all test types.
+ parser.add_argument(
+ '--trace-output',
+ metavar='FILENAME', type=os.path.realpath,
+ help='Path to save test_runner trace json output to.')
+
+ parser.add_argument(
+ '--trace-all',
+ action='store_true',
+ help='Whether to trace all function calls.')
+
+
+def AddCommonOptions(parser):
+ """Adds all common options to |parser|."""
+
+ default_build_type = os.environ.get('BUILDTYPE', 'Debug')
+
+ debug_or_release_group = parser.add_mutually_exclusive_group()
+ debug_or_release_group.add_argument(
+ '--debug',
+ action='store_const', const='Debug', dest='build_type',
+ default=default_build_type,
+ help='If set, run test suites under out/Debug. '
+ 'Default is env var BUILDTYPE or Debug.')
+ debug_or_release_group.add_argument(
+ '--release',
+ action='store_const', const='Release', dest='build_type',
+ help='If set, run test suites under out/Release. '
+ 'Default is env var BUILDTYPE or Debug.')
+
+ parser.add_argument(
+ '--break-on-failure', '--break_on_failure',
+ dest='break_on_failure', action='store_true',
+ help='Whether to break on failure.')
+
+ # TODO(jbudorick): Remove this once everything has switched to platform
+ # mode.
+ parser.add_argument(
+ '--enable-platform-mode',
+ action='store_true',
+ help='Run the test scripts in platform mode, which '
+ 'conceptually separates the test runner from the '
+ '"device" (local or remote, real or emulated) on '
+ 'which the tests are running. [experimental]')
+
+ parser.add_argument(
+ '-e', '--environment',
+ default='local', choices=constants.VALID_ENVIRONMENTS,
+ help='Test environment to run in (default: %(default)s).')
+
+ parser.add_argument(
+ '--local-output',
+ action='store_true',
+ help='Whether to archive test output locally and generate '
+ 'a local results detail page.')
+
+ class FastLocalDevAction(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ namespace.verbose_count = max(namespace.verbose_count, 1)
+ namespace.num_retries = 0
+ namespace.enable_device_cache = True
+ namespace.enable_concurrent_adb = True
+ namespace.skip_clear_data = True
+ namespace.extract_test_list_from_filter = True
+
+ parser.add_argument(
+ '--fast-local-dev',
+ type=bool, nargs=0, action=FastLocalDevAction,
+ help='Alias for: --verbose --num-retries=0 '
+ '--enable-device-cache --enable-concurrent-adb '
+ '--skip-clear-data --extract-test-list-from-filter')
+
+ # TODO(jbudorick): Remove this once downstream bots have switched to
+ # api.test_results.
+ parser.add_argument(
+ '--flakiness-dashboard-server',
+ dest='flakiness_dashboard_server',
+ help=argparse.SUPPRESS)
+ parser.add_argument(
+ '--gs-results-bucket',
+ help='Google Storage bucket to upload results to.')
+
+ parser.add_argument(
+ '--output-directory',
+ dest='output_directory', type=os.path.realpath,
+ help='Path to the directory in which build files are'
+ ' located (must include build type). This will take'
+ ' precedence over --debug and --release')
+ parser.add_argument(
+ '-v', '--verbose',
+ dest='verbose_count', default=0, action='count',
+ help='Verbose level (multiple times for more)')
+
+ parser.add_argument(
+ '--repeat', '--gtest_repeat', '--gtest-repeat',
+ '--isolated-script-test-repeat',
+ dest='repeat', type=int, default=0,
+ help='Number of times to repeat the specified set of tests.')
+ # This is currently only implemented for gtests and instrumentation tests.
+ parser.add_argument(
+ '--gtest_also_run_disabled_tests', '--gtest-also-run-disabled-tests',
+ '--isolated-script-test-also-run-disabled-tests',
+ dest='run_disabled', action='store_true',
+ help='Also run disabled tests if applicable.')
+
+ AddTestLauncherOptions(parser)
+
+
+def ProcessCommonOptions(args):
+ """Processes and handles all common options."""
+ run_tests_helper.SetLogLevel(args.verbose_count, add_handler=False)
+ # pylint: disable=redefined-variable-type
+ if args.verbose_count > 0:
+ handler = logging_utils.ColorStreamHandler()
+ else:
+ handler = logging.StreamHandler(sys.stdout)
+ # pylint: enable=redefined-variable-type
+ handler.setFormatter(run_tests_helper.CustomFormatter())
+ logging.getLogger().addHandler(handler)
+
+ constants.SetBuildType(args.build_type)
+ if args.output_directory:
+ constants.SetOutputDirectory(args.output_directory)
+
+
+def AddDeviceOptions(parser):
+ """Adds device options to |parser|."""
+
+ parser = parser.add_argument_group('device arguments')
+
+ parser.add_argument(
+ '--adb-path',
+ type=os.path.realpath,
+ help='Specify the absolute path of the adb binary that '
+ 'should be used.')
+ parser.add_argument(
+ '--blacklist-file',
+ type=os.path.realpath,
+ help='Device blacklist file.')
+ parser.add_argument(
+ '-d', '--device', nargs='+',
+ dest='test_devices',
+ help='Target device(s) for the test suite to run on.')
+ parser.add_argument(
+ '--enable-concurrent-adb',
+ action='store_true',
+ help='Run multiple adb commands at the same time, even '
+ 'for the same device.')
+ parser.add_argument(
+ '--enable-device-cache',
+ action='store_true',
+ help='Cache device state to disk between runs')
+ parser.add_argument(
+ '--skip-clear-data',
+ action='store_true',
+ help='Do not wipe app data between tests. Use this to '
+ 'speed up local development and never on bots '
+ '(increases flakiness)')
+ parser.add_argument(
+ '--recover-devices',
+ action='store_true',
+ help='Attempt to recover devices prior to the final retry. Warning: '
+ 'this will cause all devices to reboot.')
+ parser.add_argument(
+ '--tool',
+ dest='tool',
+ help='Run the test under a tool '
+ '(use --tool help to list them)')
+
+ parser.add_argument(
+ '--upload-logcats-file',
+ action='store_true',
+ dest='upload_logcats_file',
+ help='Whether to upload logcat file to logdog.')
+
+ logcat_output_group = parser.add_mutually_exclusive_group()
+ logcat_output_group.add_argument(
+ '--logcat-output-dir', type=os.path.realpath,
+ help='If set, will dump logcats recorded during test run to directory. '
+ 'File names will be the device ids with timestamps.')
+ logcat_output_group.add_argument(
+ '--logcat-output-file', type=os.path.realpath,
+ help='If set, will merge logcats recorded during test run and dump them '
+ 'to the specified file.')
+
+
+def AddGTestOptions(parser):
+ """Adds gtest options to |parser|."""
+
+ parser = parser.add_argument_group('gtest arguments')
+
+ parser.add_argument(
+ '--app-data-file',
+ action='append', dest='app_data_files',
+ help='A file path relative to the app data directory '
+ 'that should be saved to the host.')
+ parser.add_argument(
+ '--app-data-file-dir',
+ help='Host directory to which app data files will be'
+ ' saved. Used with --app-data-file.')
+ parser.add_argument(
+ '--isolated-script-test-perf-output',
+ help='If present, store chartjson results on this path.')
+ parser.add_argument(
+ '--delete-stale-data',
+ dest='delete_stale_data', action='store_true',
+ help='Delete stale test data on the device.')
+ parser.add_argument(
+ '--enable-xml-result-parsing',
+ action='store_true', help=argparse.SUPPRESS)
+ parser.add_argument(
+ '--executable-dist-dir',
+ type=os.path.realpath,
+ help="Path to executable's dist directory for native"
+ " (non-apk) tests.")
+ parser.add_argument(
+ '--extract-test-list-from-filter',
+ action='store_true',
+ help='When a test filter is specified, and the list of '
+ 'tests can be determined from it, skip querying the '
+ 'device for the list of all tests. Speeds up local '
+ 'development, but is not safe to use on bots ('
+ 'http://crbug.com/549214')
+ parser.add_argument(
+ '--gs-test-artifacts-bucket',
+ help=('If present, test artifacts will be uploaded to this Google '
+ 'Storage bucket.'))
+ parser.add_argument(
+ '--runtime-deps-path',
+ dest='runtime_deps_path', type=os.path.realpath,
+ help='Runtime data dependency file from GN.')
+ parser.add_argument(
+ '-t', '--shard-timeout',
+ dest='shard_timeout', type=int, default=120,
+ help='Timeout to wait for each test (default: %(default)s).')
+ parser.add_argument(
+ '--store-tombstones',
+ dest='store_tombstones', action='store_true',
+ help='Add tombstones in results if crash.')
+ parser.add_argument(
+ '-s', '--suite',
+ dest='suite_name', nargs='+', metavar='SUITE_NAME', required=True,
+ help='Executable name of the test suite to run.')
+ parser.add_argument(
+ '--test-apk-incremental-install-json',
+ type=os.path.realpath,
+ help='Path to install json for the test apk.')
+ parser.add_argument(
+ '-w', '--wait-for-java-debugger', action='store_true',
+ help='Wait for java debugger to attach before running any application '
+ 'code. Also disables test timeouts and sets retries=0.')
+
+
+def AddInstrumentationTestOptions(parser):
+ """Adds Instrumentation test options to |parser|."""
+
+ parser.add_argument_group('instrumentation arguments')
+
+ parser.add_argument(
+ '--additional-apk',
+ action='append', dest='additional_apks', default=[],
+ type=_RealPath,
+ help='Additional apk that must be installed on '
+ 'the device when the tests are run')
+ parser.add_argument(
+ '-A', '--annotation',
+ dest='annotation_str',
+ help='Comma-separated list of annotations. Run only tests with any of '
+ 'the given annotations. An annotation can be either a key or a '
+ 'key-values pair. A test that has no annotation is considered '
+ '"SmallTest".')
+ # TODO(jbudorick): Remove support for name-style APK specification once
+ # bots are no longer doing it.
+ parser.add_argument(
+ '--apk-under-test',
+ help='Path or name of the apk under test.')
+ parser.add_argument(
+ '--coverage-dir',
+ type=os.path.realpath,
+ help='Directory in which to place all generated '
+ 'EMMA coverage files.')
+ parser.add_argument(
+ '--delete-stale-data',
+ action='store_true', dest='delete_stale_data',
+ help='Delete stale test data on the device.')
+ parser.add_argument(
+ '--disable-dalvik-asserts',
+ dest='set_asserts', action='store_false', default=True,
+ help='Removes the dalvik.vm.enableassertions property')
+ parser.add_argument(
+ '--enable-java-deobfuscation',
+ action='store_true',
+ help='Deobfuscate java stack traces in test output and logcat.')
+ parser.add_argument(
+ '-E', '--exclude-annotation',
+ dest='exclude_annotation_str',
+ help='Comma-separated list of annotations. Exclude tests with these '
+ 'annotations.')
+ def package_replacement(arg):
+ split_arg = arg.split(',')
+ if len(split_arg) != 2:
+ raise argparse.ArgumentError(
+ arg,
+ 'Expected two comma-separated strings for --replace-system-package, '
+ 'received %d' % len(split_arg))
+ PackageReplacement = collections.namedtuple('PackageReplacement',
+ ['package', 'replacement_apk'])
+ return PackageReplacement(package=split_arg[0],
+ replacement_apk=_RealPath(split_arg[1]))
+ parser.add_argument(
+ '--replace-system-package',
+ type=package_replacement, default=None,
+ help='Specifies a system package to replace with a given APK for the '
+ 'duration of the test. Given as a comma-separated pair of strings, '
+ 'the first element being the package and the second the path to the '
+ 'replacement APK. Only supports replacing one package. Example: '
+ '--replace-system-package com.example.app,path/to/some.apk')
+
+ parser.add_argument(
+ '--use-webview-provider',
+ type=_RealPath, default=None,
+ help='Use this apk as the webview provider during test. '
+ 'The original provider will be restored if possible, '
+ "on Nougat the provider can't be determined and so "
+ 'the system will choose the default provider.')
+ parser.add_argument(
+ '--runtime-deps-path',
+ dest='runtime_deps_path', type=os.path.realpath,
+ help='Runtime data dependency file from GN.')
+ parser.add_argument(
+ '--screenshot-directory',
+ dest='screenshot_dir', type=os.path.realpath,
+ help='Capture screenshots of test failures')
+ parser.add_argument(
+ '--shared-prefs-file',
+ dest='shared_prefs_file', type=_RealPath,
+ help='The relative path to a file containing JSON list of shared '
+ 'preference files to edit and how to do so. Example list: '
+ '[{'
+ ' "package": "com.package.example",'
+ ' "filename": "ExampleSettings.xml",'
+ ' "set": {'
+ ' "boolean_key_in_xml": true,'
+ ' "string_key_in_xml": "string_value"'
+ ' },'
+ ' "remove": ['
+ ' "key_in_xml_to_remove"'
+ ' ]'
+ '}]')
+ parser.add_argument(
+ '--store-tombstones',
+ action='store_true', dest='store_tombstones',
+ help='Add tombstones in results if crash.')
+ parser.add_argument(
+ '--strict-mode',
+ dest='strict_mode', default='testing',
+ help='StrictMode command-line flag set on the device, '
+ 'death/testing to kill the process, off to stop '
+ 'checking, flash to flash only. (default: %(default)s)')
+ parser.add_argument(
+ '--test-apk',
+ required=True,
+ help='Path or name of the apk containing the tests.')
+ parser.add_argument(
+ '--test-jar',
+ help='Path of jar containing test java files.')
+ parser.add_argument(
+ '--timeout-scale',
+ type=float,
+ help='Factor by which timeouts should be scaled.')
+ parser.add_argument(
+ '-w', '--wait-for-java-debugger', action='store_true',
+ help='Wait for java debugger to attach before running any application '
+ 'code. Also disables test timeouts and sets retries=0.')
+
+ # These arguments are suppressed from the help text because they should
+ # only ever be specified by an intermediate script.
+ parser.add_argument(
+ '--apk-under-test-incremental-install-json',
+ help=argparse.SUPPRESS)
+ parser.add_argument(
+ '--test-apk-incremental-install-json',
+ type=os.path.realpath,
+ help=argparse.SUPPRESS)
+
+
+def AddJUnitTestOptions(parser):
+ """Adds junit test options to |parser|."""
+
+ parser = parser.add_argument_group('junit arguments')
+
+ parser.add_argument(
+ '--jacoco', action='store_true',
+ help='Generate jacoco report.')
+ parser.add_argument(
+ '--coverage-dir', type=os.path.realpath,
+ help='Directory to store coverage info.')
+ parser.add_argument(
+ '--package-filter',
+ help='Filters tests by package.')
+ parser.add_argument(
+ '--runner-filter',
+ help='Filters tests by runner class. Must be fully qualified.')
+ parser.add_argument(
+ '-s', '--test-suite', required=True,
+ help='JUnit test suite to run.')
+ debug_group = parser.add_mutually_exclusive_group()
+ debug_group.add_argument(
+ '-w', '--wait-for-java-debugger', action='store_const', const='8701',
+ dest='debug_socket', help='Alias for --debug-socket=8701')
+ debug_group.add_argument(
+ '--debug-socket',
+ help='Wait for java debugger to attach at specified socket address '
+ 'before running any application code. Also disables test timeouts '
+ 'and sets retries=0.')
+
+ # These arguments are for Android Robolectric tests.
+ parser.add_argument(
+ '--android-manifest-path',
+ help='Path to Android Manifest to configure Robolectric.')
+ parser.add_argument(
+ '--package-name',
+ help='Default app package name for Robolectric tests.')
+ parser.add_argument(
+ '--resource-zip',
+ action='append', dest='resource_zips', default=[],
+ help='Path to resource zips to configure Robolectric.')
+ parser.add_argument(
+ '--robolectric-runtime-deps-dir',
+ help='Path to runtime deps for Robolectric.')
+
+
+def AddLinkerTestOptions(parser):
+
+ parser.add_argument_group('linker arguments')
+
+ parser.add_argument(
+ '--test-apk',
+ type=os.path.realpath,
+ help='Path to the linker test APK.')
+
+
+def AddMonkeyTestOptions(parser):
+ """Adds monkey test options to |parser|."""
+
+ parser = parser.add_argument_group('monkey arguments')
+
+ parser.add_argument(
+ '--browser',
+ required=True, choices=constants.PACKAGE_INFO.keys(),
+ metavar='BROWSER', help='Browser under test.')
+ parser.add_argument(
+ '--category',
+ nargs='*', dest='categories', default=[],
+ help='A list of allowed categories. Monkey will only visit activities '
+ 'that are listed with one of the specified categories.')
+ parser.add_argument(
+ '--event-count',
+ default=10000, type=int,
+ help='Number of events to generate (default: %(default)s).')
+ parser.add_argument(
+ '--seed',
+ type=int,
+ help='Seed value for pseudo-random generator. Same seed value generates '
+ 'the same sequence of events. Seed is randomized by default.')
+ parser.add_argument(
+ '--throttle',
+ default=100, type=int,
+ help='Delay between events (ms) (default: %(default)s). ')
+
+
+def AddPerfTestOptions(parser):
+ """Adds perf test options to |parser|."""
+
+ parser = parser.add_argument_group('perf arguments')
+
+ class SingleStepAction(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ if values and not namespace.single_step:
+ parser.error('single step command provided, '
+ 'but --single-step not specified.')
+ elif namespace.single_step and not values:
+ parser.error('--single-step specified, '
+ 'but no single step command provided.')
+ setattr(namespace, self.dest, values)
+
+ step_group = parser.add_mutually_exclusive_group(required=True)
+ # TODO(jbudorick): Revise --single-step to use argparse.REMAINDER.
+ # This requires removing "--" from client calls.
+ step_group.add_argument(
+ '--print-step',
+ help='The name of a previously executed perf step to print.')
+ step_group.add_argument(
+ '--single-step',
+ action='store_true',
+ help='Execute the given command with retries, but only print the result '
+ 'for the "most successful" round.')
+ step_group.add_argument(
+ '--steps',
+ help='JSON file containing the list of commands to run.')
+
+ parser.add_argument(
+ '--collect-chartjson-data',
+ action='store_true',
+ help='Cache the telemetry chartjson output from each step for later use.')
+ parser.add_argument(
+ '--dry-run',
+ action='store_true',
+ help='Just print the steps without executing.')
+ # TODO(rnephew): Remove this when everything moves to new option in platform
+ # mode.
+ parser.add_argument(
+ '--get-output-dir-archive',
+ metavar='FILENAME', type=os.path.realpath,
+ help='Write the cached output directory archived by a step into the'
+ ' given ZIP file.')
+ parser.add_argument(
+ '--known-devices-file',
+ help='Path to known device list.')
+ # Uses 0.1 degrees C because that's what Android does.
+ parser.add_argument(
+ '--max-battery-temp',
+ type=int,
+ help='Only start tests when the battery is at or below the given '
+ 'temperature (0.1 C)')
+ parser.add_argument(
+ '--min-battery-level',
+ type=int,
+ help='Only starts tests when the battery is charged above '
+ 'given level.')
+ parser.add_argument(
+ '--no-timeout',
+ action='store_true',
+ help='Do not impose a timeout. Each perf step is responsible for '
+ 'implementing the timeout logic.')
+ parser.add_argument(
+ '--output-chartjson-data',
+ type=os.path.realpath,
+ help='Writes telemetry chartjson formatted output into the given file.')
+ parser.add_argument(
+ '--output-dir-archive-path',
+ metavar='FILENAME', type=os.path.realpath,
+ help='Write the cached output directory archived by a step into the'
+ ' given ZIP file.')
+ parser.add_argument(
+ '--output-json-list',
+ type=os.path.realpath,
+ help='Writes a JSON list of information for each --steps into the given '
+ 'file. Information includes runtime and device affinity for each '
+ '--steps.')
+ parser.add_argument(
+ '--write-buildbot-json',
+ action='store_true',
+ help='Whether to output buildbot json.')
+
+ parser.add_argument(
+ 'single_step_command',
+ nargs='*', action=SingleStepAction,
+ help='If --single-step is specified, the command to run.')
+
+
+def AddPythonTestOptions(parser):
+
+ parser = parser.add_argument_group('python arguments')
+
+ parser.add_argument(
+ '-s', '--suite',
+ dest='suite_name', metavar='SUITE_NAME',
+ choices=constants.PYTHON_UNIT_TEST_SUITES.keys(),
+ help='Name of the test suite to run.')
+
+
+def _RunPythonTests(args):
+ """Subcommand of RunTestsCommand which runs python unit tests."""
+ suite_vars = constants.PYTHON_UNIT_TEST_SUITES[args.suite_name]
+ suite_path = suite_vars['path']
+ suite_test_modules = suite_vars['test_modules']
+
+ sys.path = [suite_path] + sys.path
+ try:
+ suite = unittest.TestSuite()
+ suite.addTests(unittest.defaultTestLoader.loadTestsFromName(m)
+ for m in suite_test_modules)
+ runner = unittest.TextTestRunner(verbosity=1+args.verbose_count)
+ return 0 if runner.run(suite).wasSuccessful() else 1
+ finally:
+ sys.path = sys.path[1:]
+
+
+_DEFAULT_PLATFORM_MODE_TESTS = ['gtest', 'instrumentation', 'junit',
+ 'linker', 'monkey', 'perf']
+
+
+def RunTestsCommand(args):
+ """Checks test type and dispatches to the appropriate function.
+
+ Args:
+ args: argparse.Namespace object.
+
+ Returns:
+ Integer indicated exit code.
+
+ Raises:
+ Exception: Unknown command name passed in, or an exception from an
+ individual test runner.
+ """
+ command = args.command
+
+ ProcessCommonOptions(args)
+ logging.info('command: %s', ' '.join(sys.argv))
+ if args.enable_platform_mode or command in _DEFAULT_PLATFORM_MODE_TESTS:
+ return RunTestsInPlatformMode(args)
+
+ if command == 'python':
+ return _RunPythonTests(args)
+ else:
+ raise Exception('Unknown test type.')
+
+
+_SUPPORTED_IN_PLATFORM_MODE = [
+ # TODO(jbudorick): Add support for more test types.
+ 'gtest',
+ 'instrumentation',
+ 'junit',
+ 'linker',
+ 'monkey',
+ 'perf',
+]
+
+
+def RunTestsInPlatformMode(args):
+
+ def infra_error(message):
+ logging.fatal(message)
+ sys.exit(constants.INFRA_EXIT_CODE)
+
+ if args.command not in _SUPPORTED_IN_PLATFORM_MODE:
+ infra_error('%s is not yet supported in platform mode' % args.command)
+
+ ### Set up sigterm handler.
+
+ contexts_to_notify_on_sigterm = []
+ def unexpected_sigterm(_signum, _frame):
+ msg = [
+ 'Received SIGTERM. Shutting down.',
+ ]
+ for live_thread in threading.enumerate():
+ # pylint: disable=protected-access
+ thread_stack = ''.join(traceback.format_stack(
+ sys._current_frames()[live_thread.ident]))
+ msg.extend([
+ 'Thread "%s" (ident: %s) is currently running:' % (
+ live_thread.name, live_thread.ident),
+ thread_stack])
+
+ for context in contexts_to_notify_on_sigterm:
+ context.ReceivedSigterm()
+
+ infra_error('\n'.join(msg))
+
+ signal.signal(signal.SIGTERM, unexpected_sigterm)
+
+ ### Set up results handling.
+ # TODO(jbudorick): Rewrite results handling.
+
+ # all_raw_results is a list of lists of
+ # base_test_result.TestRunResults objects. Each instance of
+ # TestRunResults contains all test results produced by a single try,
+ # while each list of TestRunResults contains all tries in a single
+ # iteration.
+ all_raw_results = []
+
+ # all_iteration_results is a list of base_test_result.TestRunResults
+ # objects. Each instance of TestRunResults contains the last test
+ # result for each test run in that iteration.
+ all_iteration_results = []
+
+ global_results_tags = set()
+
+ json_file = tempfile.NamedTemporaryFile(delete=False)
+ json_file.close()
+
+ @contextlib.contextmanager
+ def json_finalizer():
+ try:
+ yield
+ finally:
+ if args.json_results_file and os.path.exists(json_file.name):
+ shutil.move(json_file.name, args.json_results_file)
+ else:
+ os.remove(json_file.name)
+
+ @contextlib.contextmanager
+ def json_writer():
+ try:
+ yield
+ except Exception:
+ global_results_tags.add('UNRELIABLE_RESULTS')
+ raise
+ finally:
+ json_results.GenerateJsonResultsFile(
+ all_raw_results, json_file.name,
+ global_tags=list(global_results_tags),
+ indent=2)
+
+ @contextlib.contextmanager
+ def upload_logcats_file():
+ try:
+ yield
+ finally:
+ if not args.logcat_output_file:
+ logging.critical('Cannot upload logcat file: no file specified.')
+ elif not os.path.exists(args.logcat_output_file):
+ logging.critical("Cannot upload logcat file: file doesn't exist.")
+ else:
+ with open(args.logcat_output_file) as src:
+ dst = logdog_helper.open_text('unified_logcats')
+ if dst:
+ shutil.copyfileobj(src, dst)
+ dst.close()
+ logging.critical(
+ 'Logcat: %s', logdog_helper.get_viewer_url('unified_logcats'))
+
+
+ logcats_uploader = contextlib_ext.Optional(
+ upload_logcats_file(),
+ 'upload_logcats_file' in args and args.upload_logcats_file)
+
+ ### Set up test objects.
+
+ out_manager = output_manager_factory.CreateOutputManager(args)
+ env = environment_factory.CreateEnvironment(
+ args, out_manager, infra_error)
+ test_instance = test_instance_factory.CreateTestInstance(args, infra_error)
+ test_run = test_run_factory.CreateTestRun(
+ args, env, test_instance, infra_error)
+
+ contexts_to_notify_on_sigterm.append(env)
+ contexts_to_notify_on_sigterm.append(test_run)
+
+ ### Run.
+ with out_manager, json_finalizer():
+ with json_writer(), logcats_uploader, env, test_instance, test_run:
+
+ repetitions = (xrange(args.repeat + 1) if args.repeat >= 0
+ else itertools.count())
+ result_counts = collections.defaultdict(
+ lambda: collections.defaultdict(int))
+ iteration_count = 0
+ for _ in repetitions:
+ # raw_results will be populated with base_test_result.TestRunResults by
+ # test_run.RunTests(). It is immediately added to all_raw_results so
+ # that in the event of an exception, all_raw_results will already have
+ # the up-to-date results and those can be written to disk.
+ raw_results = []
+ all_raw_results.append(raw_results)
+
+ test_run.RunTests(raw_results)
+ if not raw_results:
+ all_raw_results.pop()
+ continue
+
+ iteration_results = base_test_result.TestRunResults()
+ for r in reversed(raw_results):
+ iteration_results.AddTestRunResults(r)
+ all_iteration_results.append(iteration_results)
+
+ iteration_count += 1
+ for r in iteration_results.GetAll():
+ result_counts[r.GetName()][r.GetType()] += 1
+ report_results.LogFull(
+ results=iteration_results,
+ test_type=test_instance.TestType(),
+ test_package=test_run.TestPackage(),
+ annotation=getattr(args, 'annotations', None),
+ flakiness_server=getattr(args, 'flakiness_dashboard_server',
+ None))
+ if args.break_on_failure and not iteration_results.DidRunPass():
+ break
+
+ if iteration_count > 1:
+ # display summary results
+ # only display results for a test if at least one test did not pass
+ all_pass = 0
+ tot_tests = 0
+ for test_name in result_counts:
+ tot_tests += 1
+ if any(result_counts[test_name][x] for x in (
+ base_test_result.ResultType.FAIL,
+ base_test_result.ResultType.CRASH,
+ base_test_result.ResultType.TIMEOUT,
+ base_test_result.ResultType.UNKNOWN)):
+ logging.critical(
+ '%s: %s',
+ test_name,
+ ', '.join('%s %s' % (str(result_counts[test_name][i]), i)
+ for i in base_test_result.ResultType.GetTypes()))
+ else:
+ all_pass += 1
+
+ logging.critical('%s of %s tests passed in all %s runs',
+ str(all_pass),
+ str(tot_tests),
+ str(iteration_count))
+
+ if args.local_output:
+ with out_manager.ArchivedTempfile(
+ 'test_results_presentation.html',
+ 'test_results_presentation',
+ output_manager.Datatype.HTML) as results_detail_file:
+ result_html_string, _, _ = test_results_presentation.result_details(
+ json_path=json_file.name,
+ test_name=args.command,
+ cs_base_url='http://cs.chromium.org',
+ local_output=True)
+ results_detail_file.write(result_html_string)
+ results_detail_file.flush()
+ logging.critical('TEST RESULTS: %s', results_detail_file.Link())
+
+ ui_screenshots = test_results_presentation.ui_screenshot_set(
+ json_file.name)
+ if ui_screenshots:
+ with out_manager.ArchivedTempfile(
+ 'ui_screenshots.json',
+ 'ui_capture',
+ output_manager.Datatype.JSON) as ui_screenshot_file:
+ ui_screenshot_file.write(ui_screenshots)
+ logging.critical('UI Screenshots: %s', ui_screenshot_file.Link())
+
+ if args.command == 'perf' and (args.steps or args.single_step):
+ return 0
+
+ return (0 if all(r.DidRunPass() for r in all_iteration_results)
+ else constants.ERROR_EXIT_CODE)
+
+
+def DumpThreadStacks(_signal, _frame):
+ for thread in threading.enumerate():
+ reraiser_thread.LogThreadStack(thread)
+
+
+def main():
+ signal.signal(signal.SIGUSR1, DumpThreadStacks)
+
+ parser = argparse.ArgumentParser()
+ command_parsers = parser.add_subparsers(
+ title='test types', dest='command')
+
+ subp = command_parsers.add_parser(
+ 'gtest',
+ help='googletest-based C++ tests')
+ AddCommonOptions(subp)
+ AddDeviceOptions(subp)
+ AddGTestOptions(subp)
+ AddTracingOptions(subp)
+ AddCommandLineOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'instrumentation',
+ help='InstrumentationTestCase-based Java tests')
+ AddCommonOptions(subp)
+ AddDeviceOptions(subp)
+ AddInstrumentationTestOptions(subp)
+ AddTracingOptions(subp)
+ AddCommandLineOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'junit',
+ help='JUnit4-based Java tests')
+ AddCommonOptions(subp)
+ AddJUnitTestOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'linker',
+ help='linker tests')
+ AddCommonOptions(subp)
+ AddDeviceOptions(subp)
+ AddLinkerTestOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'monkey',
+ help="tests based on Android's monkey command")
+ AddCommonOptions(subp)
+ AddDeviceOptions(subp)
+ AddMonkeyTestOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'perf',
+ help='performance tests')
+ AddCommonOptions(subp)
+ AddDeviceOptions(subp)
+ AddPerfTestOptions(subp)
+ AddTracingOptions(subp)
+
+ subp = command_parsers.add_parser(
+ 'python',
+ help='python tests based on unittest.TestCase')
+ AddCommonOptions(subp)
+ AddPythonTestOptions(subp)
+
+ args, unknown_args = parser.parse_known_args()
+ if unknown_args:
+ if hasattr(args, 'allow_unknown') and args.allow_unknown:
+ args.command_line_flags = unknown_args
+ else:
+ parser.error('unrecognized arguments: %s' % ' '.join(unknown_args))
+
+ # --replace-system-package has the potential to cause issues if
+ # --enable-concurrent-adb is set, so disallow that combination
+ if (hasattr(args, 'replace_system_package') and
+ hasattr(args, 'enable_concurrent_adb') and args.replace_system_package and
+ args.enable_concurrent_adb):
+ parser.error('--replace-system-package and --enable-concurrent-adb cannot '
+ 'be used together')
+
+ # --use-webview-provider has the potential to cause issues if
+ # --enable-concurrent-adb is set, so disallow that combination
+ if (hasattr(args, 'use_webview_provider') and
+ hasattr(args, 'enable_concurrent_adb') and args.use_webview_provider and
+ args.enable_concurrent_adb):
+ parser.error('--use-webview-provider and --enable-concurrent-adb cannot '
+ 'be used together')
+
+ if (getattr(args, 'jacoco', False) and
+ not getattr(args, 'coverage_dir', '')):
+ parser.error('--jacoco requires --coverage-dir')
+
+ if (hasattr(args, 'debug_socket') or
+ (hasattr(args, 'wait_for_java_debugger') and
+ args.wait_for_java_debugger)):
+ args.num_retries = 0
+
+ try:
+ return RunTestsCommand(args)
+ except base_error.BaseError as e:
+ logging.exception('Error occurred.')
+ if e.is_infra_error:
+ return constants.INFRA_EXIT_CODE
+ return constants.ERROR_EXIT_CODE
+ except: # pylint: disable=W0702
+ logging.exception('Unrecognized error occurred.')
+ return constants.ERROR_EXIT_CODE
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/test_runner.pydeps b/deps/v8/build/android/test_runner.pydeps
new file mode 100644
index 0000000000..ac5be4658e
--- /dev/null
+++ b/deps/v8/build/android/test_runner.pydeps
@@ -0,0 +1,210 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android --output build/android/test_runner.pydeps build/android/test_runner.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/__init__.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/__init__.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/decorators.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/log.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/meta_class.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/multiprocessing_shim.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_proto_classes.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_trace_writer.py
+../../third_party/catapult/common/py_trace_event/py_trace_event/trace_time.py
+../../third_party/catapult/common/py_trace_event/third_party/protobuf/encoder.py
+../../third_party/catapult/common/py_trace_event/third_party/protobuf/wire_format.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
+../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
+../../third_party/catapult/common/py_utils/py_utils/contextlib_ext.py
+../../third_party/catapult/common/py_utils/py_utils/lock.py
+../../third_party/catapult/common/py_utils/py_utils/modules_util.py
+../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
+../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
+../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
+../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
+../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
+../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
+../../third_party/catapult/dependency_manager/dependency_manager/manager.py
+../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/android/__init__.py
+../../third_party/catapult/devil/devil/android/apk_helper.py
+../../third_party/catapult/devil/devil/android/battery_utils.py
+../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../third_party/catapult/devil/devil/android/constants/file_system.py
+../../third_party/catapult/devil/devil/android/crash_handler.py
+../../third_party/catapult/devil/devil/android/decorators.py
+../../third_party/catapult/devil/devil/android/device_blacklist.py
+../../third_party/catapult/devil/devil/android/device_errors.py
+../../third_party/catapult/devil/devil/android/device_list.py
+../../third_party/catapult/devil/devil/android/device_signal.py
+../../third_party/catapult/devil/devil/android/device_temp_file.py
+../../third_party/catapult/devil/devil/android/device_utils.py
+../../third_party/catapult/devil/devil/android/flag_changer.py
+../../third_party/catapult/devil/devil/android/forwarder.py
+../../third_party/catapult/devil/devil/android/install_commands.py
+../../third_party/catapult/devil/devil/android/logcat_monitor.py
+../../third_party/catapult/devil/devil/android/md5sum.py
+../../third_party/catapult/devil/devil/android/ndk/__init__.py
+../../third_party/catapult/devil/devil/android/ndk/abis.py
+../../third_party/catapult/devil/devil/android/ports.py
+../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../third_party/catapult/devil/devil/android/sdk/aapt.py
+../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py
+../../third_party/catapult/devil/devil/android/sdk/build_tools.py
+../../third_party/catapult/devil/devil/android/sdk/intent.py
+../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../third_party/catapult/devil/devil/android/sdk/shared_prefs.py
+../../third_party/catapult/devil/devil/android/sdk/split_select.py
+../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../third_party/catapult/devil/devil/android/tools/__init__.py
+../../third_party/catapult/devil/devil/android/tools/device_recovery.py
+../../third_party/catapult/devil/devil/android/tools/device_status.py
+../../third_party/catapult/devil/devil/android/tools/script_common.py
+../../third_party/catapult/devil/devil/android/tools/system_app.py
+../../third_party/catapult/devil/devil/android/tools/webview_app.py
+../../third_party/catapult/devil/devil/android/valgrind_tools/__init__.py
+../../third_party/catapult/devil/devil/android/valgrind_tools/base_tool.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/constants/__init__.py
+../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../third_party/catapult/devil/devil/devil_env.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/cmd_helper.py
+../../third_party/catapult/devil/devil/utils/file_utils.py
+../../third_party/catapult/devil/devil/utils/host_utils.py
+../../third_party/catapult/devil/devil/utils/lazy/__init__.py
+../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py
+../../third_party/catapult/devil/devil/utils/logging_common.py
+../../third_party/catapult/devil/devil/utils/lsusb.py
+../../third_party/catapult/devil/devil/utils/parallelizer.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/reset_usb.py
+../../third_party/catapult/devil/devil/utils/run_tests_helper.py
+../../third_party/catapult/devil/devil/utils/signal_handler.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+../../third_party/catapult/devil/devil/utils/zip_utils.py
+../../third_party/catapult/third_party/zipfile/zipfile_2_7_13.py
+../../third_party/colorama/src/colorama/__init__.py
+../../third_party/colorama/src/colorama/ansi.py
+../../third_party/colorama/src/colorama/ansitowin32.py
+../../third_party/colorama/src/colorama/initialise.py
+../../third_party/colorama/src/colorama/win32.py
+../../third_party/colorama/src/colorama/winterm.py
+../../third_party/jinja2/__init__.py
+../../third_party/jinja2/_compat.py
+../../third_party/jinja2/bccache.py
+../../third_party/jinja2/compiler.py
+../../third_party/jinja2/defaults.py
+../../third_party/jinja2/environment.py
+../../third_party/jinja2/exceptions.py
+../../third_party/jinja2/filters.py
+../../third_party/jinja2/idtracking.py
+../../third_party/jinja2/lexer.py
+../../third_party/jinja2/loaders.py
+../../third_party/jinja2/nodes.py
+../../third_party/jinja2/optimizer.py
+../../third_party/jinja2/parser.py
+../../third_party/jinja2/runtime.py
+../../third_party/jinja2/tests.py
+../../third_party/jinja2/utils.py
+../../third_party/jinja2/visitor.py
+../../third_party/markupsafe/__init__.py
+../../third_party/markupsafe/_compat.py
+../../third_party/markupsafe/_native.py
+../../tools/swarming_client/libs/__init__.py
+../../tools/swarming_client/libs/logdog/__init__.py
+../../tools/swarming_client/libs/logdog/bootstrap.py
+../../tools/swarming_client/libs/logdog/stream.py
+../../tools/swarming_client/libs/logdog/streamname.py
+../../tools/swarming_client/libs/logdog/varint.py
+../gn_helpers.py
+../util/lib/common/chrome_test_server_spawner.py
+../util/lib/common/unittest_util.py
+devil_chromium.py
+gyp/util/__init__.py
+gyp/util/build_utils.py
+gyp/util/md5_check.py
+incremental_install/__init__.py
+incremental_install/installer.py
+pylib/__init__.py
+pylib/base/__init__.py
+pylib/base/base_test_result.py
+pylib/base/environment.py
+pylib/base/environment_factory.py
+pylib/base/output_manager.py
+pylib/base/output_manager_factory.py
+pylib/base/test_collection.py
+pylib/base/test_exception.py
+pylib/base/test_instance.py
+pylib/base/test_instance_factory.py
+pylib/base/test_run.py
+pylib/base/test_run_factory.py
+pylib/base/test_server.py
+pylib/constants/__init__.py
+pylib/constants/host_paths.py
+pylib/gtest/__init__.py
+pylib/gtest/gtest_test_instance.py
+pylib/instrumentation/__init__.py
+pylib/instrumentation/instrumentation_parser.py
+pylib/instrumentation/instrumentation_test_instance.py
+pylib/instrumentation/test_result.py
+pylib/junit/__init__.py
+pylib/junit/junit_test_instance.py
+pylib/linker/__init__.py
+pylib/linker/linker_test_instance.py
+pylib/linker/test_case.py
+pylib/local/__init__.py
+pylib/local/device/__init__.py
+pylib/local/device/local_device_environment.py
+pylib/local/device/local_device_gtest_run.py
+pylib/local/device/local_device_instrumentation_test_run.py
+pylib/local/device/local_device_linker_test_run.py
+pylib/local/device/local_device_monkey_test_run.py
+pylib/local/device/local_device_perf_test_run.py
+pylib/local/device/local_device_test_run.py
+pylib/local/local_test_server_spawner.py
+pylib/local/machine/__init__.py
+pylib/local/machine/local_machine_environment.py
+pylib/local/machine/local_machine_junit_test_run.py
+pylib/monkey/__init__.py
+pylib/monkey/monkey_test_instance.py
+pylib/output/__init__.py
+pylib/output/local_output_manager.py
+pylib/output/noop_output_manager.py
+pylib/output/remote_output_manager.py
+pylib/perf/__init__.py
+pylib/perf/perf_test_instance.py
+pylib/results/__init__.py
+pylib/results/flakiness_dashboard/__init__.py
+pylib/results/flakiness_dashboard/json_results_generator.py
+pylib/results/flakiness_dashboard/results_uploader.py
+pylib/results/json_results.py
+pylib/results/presentation/__init__.py
+pylib/results/presentation/standard_gtest_merge.py
+pylib/results/presentation/test_results_presentation.py
+pylib/results/report_results.py
+pylib/symbols/__init__.py
+pylib/symbols/deobfuscator.py
+pylib/symbols/stack_symbolizer.py
+pylib/utils/__init__.py
+pylib/utils/decorators.py
+pylib/utils/device_dependencies.py
+pylib/utils/dexdump.py
+pylib/utils/google_storage_helper.py
+pylib/utils/instrumentation_tracing.py
+pylib/utils/logdog_helper.py
+pylib/utils/logging_utils.py
+pylib/utils/proguard.py
+pylib/utils/repo_utils.py
+pylib/utils/shared_preference_utils.py
+pylib/utils/test_filter.py
+pylib/utils/time_profile.py
+pylib/valgrind_tools.py
+test_runner.py
+tombstones.py
diff --git a/deps/v8/build/android/test_wrapper/logdog_wrapper.py b/deps/v8/build/android/test_wrapper/logdog_wrapper.py
new file mode 100755
index 0000000000..fda9f147d5
--- /dev/null
+++ b/deps/v8/build/android/test_wrapper/logdog_wrapper.py
@@ -0,0 +1,136 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper for adding logdog streaming support to swarming tasks."""
+
+import argparse
+import contextlib
+import logging
+import os
+import signal
+import subprocess
+import sys
+
+_SRC_PATH = os.path.abspath(os.path.join(
+ os.path.dirname(__file__), '..', '..', '..'))
+sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'devil'))
+sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'common',
+ 'py_utils'))
+
+from devil.utils import signal_handler
+from devil.utils import timeout_retry
+from py_utils import tempfile_ext
+
+PROJECT = 'chromium'
+OUTPUT = 'logdog'
+COORDINATOR_HOST = 'luci-logdog.appspot.com'
+SERVICE_ACCOUNT_JSON = ('/creds/service_accounts'
+ '/service-account-luci-logdog-publisher.json')
+LOGDOG_TERMINATION_TIMEOUT = 30
+
+
+def CommandParser():
+ # Parses the command line arguments being passed in
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--target',
+ help='The test target to be run. If not set, any extra '
+ 'args passed to this script are assumed to be the '
+ 'full test command to run.')
+ parser.add_argument('--logdog-bin-cmd', required=True,
+ help='The logdog bin cmd.')
+ return parser
+
+
+def CreateStopTestsMethod(proc):
+ def StopTests(signum, _frame):
+ logging.error('Forwarding signal %s to test process', str(signum))
+ proc.send_signal(signum)
+ return StopTests
+
+
+@contextlib.contextmanager
+def NoLeakingProcesses(popen):
+ try:
+ yield popen
+ finally:
+ if popen is not None:
+ try:
+ if popen.poll() is None:
+ popen.kill()
+ except OSError:
+ logging.warning('Failed to kill %s. Process may be leaked.',
+ str(popen.pid))
+
+
+def main():
+ parser = CommandParser()
+ args, extra_cmd_args = parser.parse_known_args(sys.argv[1:])
+
+ logging.basicConfig(level=logging.INFO)
+ if args.target:
+ test_cmd = [os.path.join('bin', 'run_%s' % args.target), '-v']
+ test_cmd += extra_cmd_args
+ else:
+ test_cmd = extra_cmd_args
+
+ test_env = dict(os.environ)
+ logdog_cmd = []
+
+ with tempfile_ext.NamedTemporaryDirectory(
+ prefix='tmp_android_logdog_wrapper') as temp_directory:
+ if not os.path.exists(args.logdog_bin_cmd):
+ logging.error(
+ 'Logdog binary %s unavailable. Unable to create logdog client',
+ args.logdog_bin_cmd)
+ else:
+ streamserver_uri = 'unix:%s' % os.path.join(temp_directory,
+ 'butler.sock')
+ prefix = os.path.join('android', 'swarming', 'logcats',
+ os.environ.get('SWARMING_TASK_ID'))
+
+ logdog_cmd = [
+ args.logdog_bin_cmd,
+ '-project', PROJECT,
+ '-output', OUTPUT,
+ '-prefix', prefix,
+ '--service-account-json', SERVICE_ACCOUNT_JSON,
+ '-coordinator-host', COORDINATOR_HOST,
+ 'serve',
+ '-streamserver-uri', streamserver_uri]
+ test_env.update({
+ 'LOGDOG_STREAM_PROJECT': PROJECT,
+ 'LOGDOG_STREAM_PREFIX': prefix,
+ 'LOGDOG_STREAM_SERVER_PATH': streamserver_uri,
+ 'LOGDOG_COORDINATOR_HOST': COORDINATOR_HOST,
+ })
+
+ logdog_proc = None
+ if logdog_cmd:
+ logdog_proc = subprocess.Popen(logdog_cmd)
+
+ with NoLeakingProcesses(logdog_proc):
+ with NoLeakingProcesses(
+ subprocess.Popen(test_cmd, env=test_env)) as test_proc:
+ with signal_handler.SignalHandler(signal.SIGTERM,
+ CreateStopTestsMethod(test_proc)):
+ result = test_proc.wait()
+ if logdog_proc:
+ def logdog_stopped():
+ return logdog_proc.poll() is not None
+
+ logdog_proc.terminate()
+ timeout_retry.WaitFor(logdog_stopped, wait_period=1,
+ max_tries=LOGDOG_TERMINATION_TIMEOUT)
+
+ # If logdog_proc hasn't finished by this point, allow
+ # NoLeakingProcesses to kill it.
+
+
+ return result
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/test_wrapper/logdog_wrapper.pydeps b/deps/v8/build/android/test_wrapper/logdog_wrapper.pydeps
new file mode 100644
index 0000000000..bb696587e3
--- /dev/null
+++ b/deps/v8/build/android/test_wrapper/logdog_wrapper.pydeps
@@ -0,0 +1,12 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android --output build/android/test_wrapper/logdog_wrapper.pydeps build/android/test_wrapper/logdog_wrapper.py
+../../third_party/catapult/common/py_utils/py_utils/__init__.py
+../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py
+../../third_party/catapult/devil/devil/__init__.py
+../../third_party/catapult/devil/devil/base_error.py
+../../third_party/catapult/devil/devil/utils/__init__.py
+../../third_party/catapult/devil/devil/utils/reraiser_thread.py
+../../third_party/catapult/devil/devil/utils/signal_handler.py
+../../third_party/catapult/devil/devil/utils/timeout_retry.py
+../../third_party/catapult/devil/devil/utils/watchdog_timer.py
+test_wrapper/logdog_wrapper.py
diff --git a/deps/v8/build/android/tests/symbolize/Makefile b/deps/v8/build/android/tests/symbolize/Makefile
new file mode 100644
index 0000000000..4fc53dad56
--- /dev/null
+++ b/deps/v8/build/android/tests/symbolize/Makefile
@@ -0,0 +1,11 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+TOOLCHAIN=../../../../third_party/android_ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/arm-linux-androideabi-
+CXX=$(TOOLCHAIN)g++
+
+lib%.so: %.cc
+ $(CXX) -nostdlib -g -fPIC -shared $< -o $@
+
+all: liba.so libb.so
diff --git a/deps/v8/build/android/tests/symbolize/a.cc b/deps/v8/build/android/tests/symbolize/a.cc
new file mode 100644
index 0000000000..f0c7ca4c67
--- /dev/null
+++ b/deps/v8/build/android/tests/symbolize/a.cc
@@ -0,0 +1,14 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+class A {
+ public:
+ A();
+ void Foo(int i);
+ void Bar(const char* c);
+};
+
+A::A() {}
+void A::Foo(int i) {}
+void A::Bar(const char* c) {}
diff --git a/deps/v8/build/android/tests/symbolize/b.cc b/deps/v8/build/android/tests/symbolize/b.cc
new file mode 100644
index 0000000000..db8752099a
--- /dev/null
+++ b/deps/v8/build/android/tests/symbolize/b.cc
@@ -0,0 +1,14 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+class B {
+ public:
+ B();
+ void Baz(float f);
+ void Qux(double d);
+};
+
+B::B() {}
+void B::Baz(float f) {}
+void B::Qux(double d) {}
diff --git a/deps/v8/build/android/tombstones.py b/deps/v8/build/android/tombstones.py
new file mode 100755
index 0000000000..e1a2d76d11
--- /dev/null
+++ b/deps/v8/build/android/tombstones.py
@@ -0,0 +1,282 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Find the most recent tombstone file(s) on all connected devices
+# and prints their stacks.
+#
+# Assumes tombstone file was created with current symbols.
+
+import argparse
+import datetime
+import logging
+import os
+import sys
+
+from multiprocessing.pool import ThreadPool
+
+import devil_chromium
+
+from devil.android import device_blacklist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.utils import run_tests_helper
+from pylib import constants
+from pylib.symbols import stack_symbolizer
+
+
+_TZ_UTC = {'TZ': 'UTC'}
+
+
+def _ListTombstones(device):
+ """List the tombstone files on the device.
+
+ Args:
+ device: An instance of DeviceUtils.
+
+ Yields:
+ Tuples of (tombstone filename, date time of file on device).
+ """
+ try:
+ if not device.PathExists('/data/tombstones', as_root=True):
+ return
+ entries = device.StatDirectory('/data/tombstones', as_root=True)
+ for entry in entries:
+ if 'tombstone' in entry['filename']:
+ yield (entry['filename'],
+ datetime.datetime.fromtimestamp(entry['st_mtime']))
+ except device_errors.CommandFailedError:
+ logging.exception('Could not retrieve tombstones.')
+ except device_errors.DeviceUnreachableError:
+ logging.exception('Device unreachable retrieving tombstones.')
+ except device_errors.CommandTimeoutError:
+ logging.exception('Timed out retrieving tombstones.')
+
+
+def _GetDeviceDateTime(device):
+ """Determine the date time on the device.
+
+ Args:
+ device: An instance of DeviceUtils.
+
+ Returns:
+ A datetime instance.
+ """
+ device_now_string = device.RunShellCommand(
+ ['date'], check_return=True, env=_TZ_UTC)
+ return datetime.datetime.strptime(
+ device_now_string[0], '%a %b %d %H:%M:%S %Z %Y')
+
+
+def _GetTombstoneData(device, tombstone_file):
+ """Retrieve the tombstone data from the device
+
+ Args:
+ device: An instance of DeviceUtils.
+ tombstone_file: the tombstone to retrieve
+
+ Returns:
+ A list of lines
+ """
+ return device.ReadFile(
+ '/data/tombstones/' + tombstone_file, as_root=True).splitlines()
+
+
+def _EraseTombstone(device, tombstone_file):
+ """Deletes a tombstone from the device.
+
+ Args:
+ device: An instance of DeviceUtils.
+ tombstone_file: the tombstone to delete.
+ """
+ return device.RunShellCommand(
+ ['rm', '/data/tombstones/' + tombstone_file],
+ as_root=True, check_return=True)
+
+
+def _ResolveTombstone(args):
+ tombstone = args[0]
+ tombstone_symbolizer = args[1]
+ lines = []
+ lines += [tombstone['file'] + ' created on ' + str(tombstone['time']) +
+ ', about this long ago: ' +
+ (str(tombstone['device_now'] - tombstone['time']) +
+ ' Device: ' + tombstone['serial'])]
+ logging.info('\n'.join(lines))
+ logging.info('Resolving...')
+ lines += tombstone_symbolizer.ExtractAndResolveNativeStackTraces(
+ tombstone['data'],
+ tombstone['device_abi'],
+ tombstone['stack'])
+ return lines
+
+
+def _ResolveTombstones(jobs, tombstones, tombstone_symbolizer):
+ """Resolve a list of tombstones.
+
+ Args:
+ jobs: the number of jobs to use with multithread.
+ tombstones: a list of tombstones.
+ """
+ if not tombstones:
+ logging.warning('No tombstones to resolve.')
+ return []
+ if len(tombstones) == 1:
+ data = [_ResolveTombstone([tombstones[0], tombstone_symbolizer])]
+ else:
+ pool = ThreadPool(jobs)
+ data = pool.map(
+ _ResolveTombstone,
+ [[tombstone, tombstone_symbolizer] for tombstone in tombstones])
+ pool.close()
+ pool.join()
+ resolved_tombstones = []
+ for tombstone in data:
+ resolved_tombstones.extend(tombstone)
+ return resolved_tombstones
+
+
+def _GetTombstonesForDevice(device, resolve_all_tombstones,
+ include_stack_symbols,
+ wipe_tombstones):
+ """Returns a list of tombstones on a given device.
+
+ Args:
+ device: An instance of DeviceUtils.
+ resolve_all_tombstone: Whether to resolve every tombstone.
+ include_stack_symbols: Whether to include symbols for stack data.
+ wipe_tombstones: Whether to wipe tombstones.
+ """
+ ret = []
+ all_tombstones = list(_ListTombstones(device))
+ if not all_tombstones:
+ logging.warning('No tombstones.')
+ return ret
+
+ # Sort the tombstones in date order, descending
+ all_tombstones.sort(cmp=lambda a, b: cmp(b[1], a[1]))
+
+ # Only resolve the most recent unless --all-tombstones given.
+ tombstones = all_tombstones if resolve_all_tombstones else [all_tombstones[0]]
+
+ device_now = _GetDeviceDateTime(device)
+ try:
+ for tombstone_file, tombstone_time in tombstones:
+ ret += [{'serial': str(device),
+ 'device_abi': device.product_cpu_abi,
+ 'device_now': device_now,
+ 'time': tombstone_time,
+ 'file': tombstone_file,
+ 'stack': include_stack_symbols,
+ 'data': _GetTombstoneData(device, tombstone_file)}]
+ except device_errors.CommandFailedError:
+ for entry in device.StatDirectory(
+ '/data/tombstones', as_root=True, timeout=60):
+ logging.info('%s: %s', str(device), entry)
+ raise
+
+ # Erase all the tombstones if desired.
+ if wipe_tombstones:
+ for tombstone_file, _ in all_tombstones:
+ _EraseTombstone(device, tombstone_file)
+
+ return ret
+
+
+def ClearAllTombstones(device):
+ """Clear all tombstones in the device.
+
+ Args:
+ device: An instance of DeviceUtils.
+ """
+ all_tombstones = list(_ListTombstones(device))
+ if not all_tombstones:
+ logging.warning('No tombstones to clear.')
+
+ for tombstone_file, _ in all_tombstones:
+ _EraseTombstone(device, tombstone_file)
+
+
+def ResolveTombstones(device, resolve_all_tombstones, include_stack_symbols,
+ wipe_tombstones, jobs=4, apk_under_test=None,
+ tombstone_symbolizer=None):
+ """Resolve tombstones in the device.
+
+ Args:
+ device: An instance of DeviceUtils.
+ resolve_all_tombstone: Whether to resolve every tombstone.
+ include_stack_symbols: Whether to include symbols for stack data.
+ wipe_tombstones: Whether to wipe tombstones.
+ jobs: Number of jobs to use when processing multiple crash stacks.
+
+ Returns:
+ A list of resolved tombstones.
+ """
+ return _ResolveTombstones(jobs,
+ _GetTombstonesForDevice(device,
+ resolve_all_tombstones,
+ include_stack_symbols,
+ wipe_tombstones),
+ (tombstone_symbolizer
+ or stack_symbolizer.Symbolizer(apk_under_test)))
+
+
+def main():
+ custom_handler = logging.StreamHandler(sys.stdout)
+ custom_handler.setFormatter(run_tests_helper.CustomFormatter())
+ logging.getLogger().addHandler(custom_handler)
+ logging.getLogger().setLevel(logging.INFO)
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--device',
+ help='The serial number of the device. If not specified '
+ 'will use all devices.')
+ parser.add_argument('--blacklist-file', help='Device blacklist JSON file.')
+ parser.add_argument('-a', '--all-tombstones', action='store_true',
+ help='Resolve symbols for all tombstones, rather than '
+ 'just the most recent.')
+ parser.add_argument('-s', '--stack', action='store_true',
+ help='Also include symbols for stack data')
+ parser.add_argument('-w', '--wipe-tombstones', action='store_true',
+ help='Erase all tombstones from device after processing')
+ parser.add_argument('-j', '--jobs', type=int,
+ default=4,
+ help='Number of jobs to use when processing multiple '
+ 'crash stacks.')
+ parser.add_argument('--output-directory',
+ help='Path to the root build directory.')
+ parser.add_argument('--adb-path', type=os.path.abspath,
+ help='Path to the adb binary.')
+ args = parser.parse_args()
+
+ devil_chromium.Initialize(adb_path=args.adb_path)
+
+ blacklist = (device_blacklist.Blacklist(args.blacklist_file)
+ if args.blacklist_file
+ else None)
+
+ if args.output_directory:
+ constants.SetOutputDirectory(args.output_directory)
+ # Do an up-front test that the output directory is known.
+ constants.CheckOutputDirectory()
+
+ if args.device:
+ devices = [device_utils.DeviceUtils(args.device)]
+ else:
+ devices = device_utils.DeviceUtils.HealthyDevices(blacklist)
+
+ # This must be done serially because strptime can hit a race condition if
+ # used for the first time in a multithreaded environment.
+ # http://bugs.python.org/issue7980
+ for device in devices:
+ resolved_tombstones = ResolveTombstones(
+ device, args.all_tombstones,
+ args.stack, args.wipe_tombstones, args.jobs)
+ for line in resolved_tombstones:
+ logging.info(line)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/update_deps/update_third_party_deps.py b/deps/v8/build/android/update_deps/update_third_party_deps.py
new file mode 100755
index 0000000000..3a869c43ec
--- /dev/null
+++ b/deps/v8/build/android/update_deps/update_third_party_deps.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Uploads or downloads third party libraries to or from google cloud storage.
+
+This script will only work for Android checkouts.
+"""
+
+import argparse
+import logging
+import os
+import sys
+
+
+sys.path.append(os.path.abspath(
+ os.path.join(os.path.dirname(__file__), os.pardir)))
+from pylib import constants
+from pylib.constants import host_paths
+
+sys.path.append(
+ os.path.abspath(
+ os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'depot_tools')))
+import download_from_google_storage
+import upload_to_google_storage
+
+
+def _AddBasicArguments(parser):
+ parser.add_argument(
+ '--sdk-root', default=constants.ANDROID_SDK_ROOT,
+ help='base path to the Android SDK root')
+ parser.add_argument(
+ '-v', '--verbose', action='store_true', help='print debug information')
+ parser.add_argument(
+ '-b', '--bucket-path', required=True,
+ help='The path of the lib file in Google Cloud Storage.')
+ parser.add_argument(
+ '-l', '--local-path', required=True,
+ help='The base path of the third_party directory')
+
+
+def _CheckPaths(bucket_path, local_path):
+ if bucket_path.startswith('gs://'):
+ bucket_url = bucket_path
+ else:
+ bucket_url = 'gs://%s' % bucket_path
+ local_path = os.path.join(host_paths.DIR_SOURCE_ROOT, local_path)
+ if not os.path.isdir(local_path):
+ raise IOError(
+ 'The library local path is not a valid directory: %s' % local_path)
+ return bucket_url, local_path
+
+
+def _CheckFileList(local_path, file_list):
+ local_path = os.path.abspath(local_path)
+ abs_path_list = [os.path.abspath(f) for f in file_list]
+ for f in abs_path_list:
+ if os.path.commonprefix([f, local_path]) != local_path:
+ raise IOError(
+ '%s in the arguments is not descendant of the specified directory %s'
+ % (f, local_path))
+ return abs_path_list
+
+
+def _PurgeSymlinks(local_path):
+ for dirpath, _, filenames in os.walk(local_path):
+ for f in filenames:
+ path = os.path.join(dirpath, f)
+ if os.path.islink(path):
+ os.remove(path)
+
+
+def Upload(arguments):
+ """Upload files in a third_party directory to google storage"""
+ bucket_url, local_path = _CheckPaths(arguments.bucket_path,
+ arguments.local_path)
+ file_list = _CheckFileList(local_path, arguments.file_list)
+ return upload_to_google_storage.upload_to_google_storage(
+ input_filenames=file_list,
+ base_url=bucket_url,
+ gsutil=arguments.gsutil,
+ force=False,
+ use_md5=False,
+ num_threads=1,
+ skip_hashing=False,
+ gzip=None)
+
+
+def Download(arguments):
+ """Download files based on sha1 files in a third_party dir from gcs"""
+ bucket_url, local_path = _CheckPaths(arguments.bucket_path,
+ arguments.local_path)
+ _PurgeSymlinks(local_path)
+ return download_from_google_storage.download_from_google_storage(
+ local_path,
+ bucket_url,
+ gsutil=arguments.gsutil,
+ num_threads=1,
+ directory=True,
+ recursive=True,
+ force=False,
+ output=None,
+ ignore_errors=False,
+ sha1_file=None,
+ verbose=arguments.verbose,
+ auto_platform=False,
+ extract=False)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser()
+ subparsers = parser.add_subparsers(title='commands')
+ download_parser = subparsers.add_parser(
+ 'download', help='download the library from the cloud storage')
+ _AddBasicArguments(download_parser)
+ download_parser.set_defaults(func=Download)
+
+ upload_parser = subparsers.add_parser(
+ 'upload', help='find all jar files in a third_party directory and ' +
+ 'upload them to cloud storage')
+ _AddBasicArguments(upload_parser)
+ upload_parser.set_defaults(func=Upload)
+ upload_parser.add_argument(
+ '-f', '--file-list', nargs='+', required=True,
+ help='A list of base paths for files in third_party to upload.')
+
+ arguments = parser.parse_args(argv)
+ if not os.path.isdir(arguments.sdk_root):
+ logging.debug('Did not find the Android SDK root directory at "%s".',
+ arguments.sdk_root)
+ logging.info('Skipping, not on an android checkout.')
+ return 0
+
+ arguments.gsutil = download_from_google_storage.Gsutil(
+ download_from_google_storage.GSUTIL_DEFAULT_PATH)
+ return arguments.func(arguments)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/deps/v8/build/android/update_verification.py b/deps/v8/build/android/update_verification.py
new file mode 100755
index 0000000000..40cb64ac5d
--- /dev/null
+++ b/deps/v8/build/android/update_verification.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs semi-automated update testing on a non-rooted device.
+
+This script will help verify that app data is preserved during an update.
+To use this script first run it with the create_app_data option.
+
+./update_verification.py create_app_data --old-apk <path> --app-data <path>
+
+The script will then install the old apk, prompt you to create some app data
+(bookmarks, etc.), and then save the app data in the path you gave it.
+
+Next, once you have some app data saved, run this script with the test_update
+option.
+
+./update_verification.py test_update --old-apk <path> --new-apk <path>
+--app-data <path>
+
+This will install the old apk, load the saved app data, install the new apk,
+and ask the user to verify that all of the app data was preserved.
+"""
+
+import argparse
+import logging
+import sys
+
+import devil_chromium
+
+from devil.android import apk_helper
+from devil.android import device_blacklist
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.utils import run_tests_helper
+
+def CreateAppData(device, old_apk, app_data, package_name):
+ device.Install(old_apk)
+ raw_input('Set the application state. Once ready, press enter and '
+ 'select "Backup my data" on the device.')
+ device.adb.Backup(app_data, packages=[package_name])
+ logging.critical('Application data saved to %s', app_data)
+
+def TestUpdate(device, old_apk, new_apk, app_data, package_name):
+ device.Install(old_apk)
+ device.adb.Restore(app_data)
+ # Restore command is not synchronous
+ raw_input('Select "Restore my data" on the device. Then press enter to '
+ 'continue.')
+ device_path = device.GetApplicationPaths(package_name)
+ if not device_path:
+ raise Exception('Expected package %s to already be installed. '
+ 'Package name might have changed!' % package_name)
+
+ logging.info('Verifying that %s can be overinstalled.', new_apk)
+ device.adb.Install(new_apk, reinstall=True)
+ logging.critical('Successfully updated to the new apk. Please verify that '
+ 'the application data is preserved.')
+
+def main():
+ parser = argparse.ArgumentParser(
+ description="Script to do semi-automated upgrade testing.")
+ parser.add_argument('-v', '--verbose', action='count',
+ help='Print verbose log information.')
+ parser.add_argument('--blacklist-file', help='Device blacklist JSON file.')
+ command_parsers = parser.add_subparsers(dest='command')
+
+ subparser = command_parsers.add_parser('create_app_data')
+ subparser.add_argument('--old-apk', required=True,
+ help='Path to apk to update from.')
+ subparser.add_argument('--app-data', required=True,
+ help='Path to where the app data backup should be '
+ 'saved to.')
+ subparser.add_argument('--package-name',
+ help='Chrome apk package name.')
+
+ subparser = command_parsers.add_parser('test_update')
+ subparser.add_argument('--old-apk', required=True,
+ help='Path to apk to update from.')
+ subparser.add_argument('--new-apk', required=True,
+ help='Path to apk to update to.')
+ subparser.add_argument('--app-data', required=True,
+ help='Path to where the app data backup is saved.')
+ subparser.add_argument('--package-name',
+ help='Chrome apk package name.')
+
+ args = parser.parse_args()
+ run_tests_helper.SetLogLevel(args.verbose)
+
+ devil_chromium.Initialize()
+
+ blacklist = (device_blacklist.Blacklist(args.blacklist_file)
+ if args.blacklist_file
+ else None)
+
+ devices = device_utils.DeviceUtils.HealthyDevices(blacklist)
+ if not devices:
+ raise device_errors.NoDevicesError()
+ device = devices[0]
+ logging.info('Using device %s for testing.', str(device))
+
+ package_name = (args.package_name if args.package_name
+ else apk_helper.GetPackageName(args.old_apk))
+ if args.command == 'create_app_data':
+ CreateAppData(device, args.old_apk, args.app_data, package_name)
+ elif args.command == 'test_update':
+ TestUpdate(
+ device, args.old_apk, args.new_apk, args.app_data, package_name)
+ else:
+ raise Exception('Unknown test command: %s' % args.command)
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/deps/v8/build/android/video_recorder.py b/deps/v8/build/android/video_recorder.py
new file mode 100755
index 0000000000..b21759a35a
--- /dev/null
+++ b/deps/v8/build/android/video_recorder.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+import devil_chromium
+from devil.android.tools import video_recorder
+
+if __name__ == '__main__':
+ devil_chromium.Initialize()
+ sys.exit(video_recorder.main())